From 316950051c2b61b34f9a469aca79b87c8cb95bf4 Mon Sep 17 00:00:00 2001 From: saksham-nexla Date: Fri, 6 Feb 2026 19:19:23 +0530 Subject: [PATCH] refactor: remove tools, tool_sets, and mcp_sessions resources --- .changeset/config.json | 11 + .changeset/wise-lions-whisper.md | 5 + .github/workflows/ci-ts-integration.yml | 47 + .github/workflows/ci-ts.yml | 63 + .github/workflows/ci.yml | 23 +- .github/workflows/docs.yml | 12 + .github/workflows/release-ts.yml | 40 + .gitignore | 3 + README.md | 52 +- docs/ts-sdk/api-coverage.md | 70 + docs/ts-sdk/architecture.md | 102 + docs/ts-sdk/integration-tests.md | 66 + docs/ts-sdk/migration-guide.md | 131 + docs/ts-sdk/parity-matrix.md | 92 + nexla_sdk/__init__.py | 2 + nexla_sdk/client.py | 72 +- nexla_sdk/generated/__init__.py | 14 + nexla_sdk/generated/operation_map.py | 2211 ++ nexla_sdk/generated/schema.py | 22 + nexla_sdk/models/__init__.py | 213 + nexla_sdk/models/api_keys/__init__.py | 11 + nexla_sdk/models/api_keys/requests.py | 17 + nexla_sdk/models/api_keys/responses.py | 43 + nexla_sdk/models/auth_parameters/__init__.py | 11 + nexla_sdk/models/auth_parameters/requests.py | 31 + nexla_sdk/models/auth_parameters/responses.py | 27 + nexla_sdk/models/auth_templates/__init__.py | 22 + nexla_sdk/models/auth_templates/requests.py | 37 + nexla_sdk/models/auth_templates/responses.py | 49 + nexla_sdk/models/catalog_configs/__init__.py | 11 + nexla_sdk/models/catalog_configs/requests.py | 21 + nexla_sdk/models/catalog_configs/responses.py | 22 + nexla_sdk/models/catalog_refs/__init__.py | 8 + nexla_sdk/models/catalog_refs/requests.py | 16 + nexla_sdk/models/catalog_refs/responses.py | 17 + nexla_sdk/models/clusters/__init__.py | 25 + nexla_sdk/models/clusters/requests.py | 63 + nexla_sdk/models/clusters/responses.py | 64 + nexla_sdk/models/common.py | 8 +- nexla_sdk/models/connectors/__init__.py | 28 + nexla_sdk/models/connectors/enums.py | 106 + nexla_sdk/models/connectors/requests.py | 15 + nexla_sdk/models/connectors/responses.py | 28 + nexla_sdk/models/credentials/__init__.py | 43 +- nexla_sdk/models/credentials/responses.py | 3 +- .../models/custom_data_flows/__init__.py | 11 + .../models/custom_data_flows/requests.py | 25 + .../models/custom_data_flows/responses.py | 26 + .../models/dashboard_transforms/__init__.py | 11 + .../models/dashboard_transforms/requests.py | 11 + .../models/dashboard_transforms/responses.py | 14 + .../data_credentials_groups/__init__.py | 13 + .../data_credentials_groups/requests.py | 21 + .../data_credentials_groups/responses.py | 17 + nexla_sdk/models/destinations/__init__.py | 36 +- nexla_sdk/models/destinations/responses.py | 11 +- nexla_sdk/models/flexible_enums.py | 77 + nexla_sdk/models/flow_triggers/__init__.py | 9 + nexla_sdk/models/flow_triggers/requests.py | 39 + nexla_sdk/models/flow_triggers/responses.py | 32 + nexla_sdk/models/nexsets/responses.py | 4 +- .../notification_channel_settings/__init__.py | 13 + .../notification_channel_settings/requests.py | 14 + .../responses.py | 14 + .../models/notification_settings/__init__.py | 17 + .../models/notification_settings/requests.py | 35 + .../models/notification_settings/responses.py | 37 + .../models/notification_types/__init__.py | 3 + .../models/notification_types/responses.py | 14 + nexla_sdk/models/notifications/__init__.py | 2 + nexla_sdk/models/notifications/responses.py | 12 + nexla_sdk/models/org_tiers/__init__.py | 3 + nexla_sdk/models/org_tiers/responses.py | 13 + .../models/quarantine_settings/__init__.py | 11 + .../models/quarantine_settings/requests.py | 17 + .../models/quarantine_settings/responses.py | 20 + .../models/resource_parameters/__init__.py | 11 + .../models/resource_parameters/requests.py | 29 + .../models/resource_parameters/responses.py | 23 + nexla_sdk/models/service_keys/__init__.py | 13 + nexla_sdk/models/service_keys/requests.py | 20 + nexla_sdk/models/service_keys/responses.py | 29 + nexla_sdk/models/sources/__init__.py | 28 +- nexla_sdk/models/sources/enums.py | 42 +- nexla_sdk/models/sources/responses.py | 5 +- nexla_sdk/models/user_settings/__init__.py | 11 + nexla_sdk/models/user_settings/requests.py | 16 + nexla_sdk/models/user_settings/responses.py | 18 + nexla_sdk/models/user_tiers/__init__.py | 3 + nexla_sdk/models/user_tiers/responses.py | 13 + nexla_sdk/models/users/__init__.py | 3 + nexla_sdk/models/users/credits.py | 28 + nexla_sdk/models/validators/__init__.py | 15 + nexla_sdk/models/validators/requests.py | 58 + nexla_sdk/models/validators/responses.py | 51 + nexla_sdk/models/vendor_endpoints/__init__.py | 11 + nexla_sdk/models/vendor_endpoints/requests.py | 16 + .../models/vendor_endpoints/responses.py | 16 + nexla_sdk/models/vendors/__init__.py | 11 + nexla_sdk/models/vendors/requests.py | 28 + nexla_sdk/models/vendors/responses.py | 37 + nexla_sdk/py.typed | 0 nexla_sdk/raw_operations.py | 248 + nexla_sdk/resources/__init__.py | 65 + nexla_sdk/resources/api_keys.py | 153 + nexla_sdk/resources/approval_requests.py | 42 +- nexla_sdk/resources/async_tasks.py | 8 +- nexla_sdk/resources/attribute_transforms.py | 10 + nexla_sdk/resources/auth_parameters.py | 38 + nexla_sdk/resources/auth_templates.py | 170 + nexla_sdk/resources/base_resource.py | 238 + nexla_sdk/resources/catalog_configs.py | 51 + nexla_sdk/resources/cluster_endpoints.py | 138 + nexla_sdk/resources/clusters.py | 156 + nexla_sdk/resources/code_containers.py | 16 + nexla_sdk/resources/connectors.py | 137 + nexla_sdk/resources/credentials.py | 98 + nexla_sdk/resources/cubejs.py | 15 + nexla_sdk/resources/custom_data_flows.py | 76 + nexla_sdk/resources/dashboard_transforms.py | 42 + .../resources/data_credentials_groups.py | 54 + nexla_sdk/resources/data_flows.py | 60 + nexla_sdk/resources/data_schemas.py | 50 +- nexla_sdk/resources/destinations.py | 224 + nexla_sdk/resources/doc_containers.py | 31 +- nexla_sdk/resources/flow_nodes.py | 30 + nexla_sdk/resources/flow_triggers.py | 164 + nexla_sdk/resources/flows.py | 142 +- nexla_sdk/resources/genai.py | 12 + nexla_sdk/resources/lookups.py | 38 + nexla_sdk/resources/marketplace.py | 43 + nexla_sdk/resources/metrics.py | 20 +- nexla_sdk/resources/nexsets.py | 278 + .../notification_channel_settings.py | 40 + nexla_sdk/resources/notification_settings.py | 339 + nexla_sdk/resources/notification_types.py | 21 + nexla_sdk/resources/notifications.py | 28 +- nexla_sdk/resources/org_auth_configs.py | 12 + nexla_sdk/resources/org_tiers.py | 28 + nexla_sdk/resources/organizations.py | 140 +- nexla_sdk/resources/projects.py | 56 +- nexla_sdk/resources/quarantine_settings.py | 23 + nexla_sdk/resources/resource_parameters.py | 38 + nexla_sdk/resources/search_health.py | 18 + nexla_sdk/resources/self_signup.py | 3 +- .../resources/self_signup_blocked_domains.py | 24 + nexla_sdk/resources/service_keys.py | 180 + nexla_sdk/resources/sources.py | 238 + nexla_sdk/resources/teams.py | 6 + nexla_sdk/resources/tokens.py | 43 + nexla_sdk/resources/transforms.py | 16 + nexla_sdk/resources/user_settings.py | 43 + nexla_sdk/resources/user_tiers.py | 28 + nexla_sdk/resources/users.py | 235 +- nexla_sdk/resources/validators.py | 178 + nexla_sdk/resources/vendor_endpoints.py | 44 + nexla_sdk/resources/vendors.py | 197 + package.json | 22 + packages/ts-sdk/README.md | 100 + packages/ts-sdk/coverage/lcov-report/base.css | 224 + .../coverage/lcov-report/block-navigation.js | 87 + .../ts-sdk/coverage/lcov-report/favicon.png | Bin 0 -> 445 bytes .../ts-sdk/coverage/lcov-report/index.html | 176 + .../ts-sdk/coverage/lcov-report/prettify.css | 1 + .../ts-sdk/coverage/lcov-report/prettify.js | 2 + .../lcov-report/sort-arrow-sprite.png | Bin 0 -> 138 bytes .../ts-sdk/coverage/lcov-report/sorter.js | 210 + .../lcov-report/src/auth/access-token.ts.html | 163 + .../coverage/lcov-report/src/auth/index.html | 131 + .../lcov-report/src/auth/service-key.ts.html | 517 + .../lcov-report/src/client/http.ts.html | 262 + .../lcov-report/src/client/index.html | 146 + .../src/client/nexla-client.ts.html | 715 + .../src/client/operation-types.ts.html | 142 + .../coverage/lcov-report/src/errors.ts.html | 427 + .../coverage/lcov-report/src/index.html | 116 + .../lcov-report/src/resources/index.html | 116 + .../src/resources/resource-client.ts.html | 259 + .../lcov-report/src/webhooks/index.html | 116 + .../lcov-report/src/webhooks/index.ts.html | 373 + packages/ts-sdk/coverage/lcov.info | 914 + packages/ts-sdk/eslint.config.js | 35 + packages/ts-sdk/package.json | 54 + .../scripts/check-generated-coverage.mjs | 185 + .../ts-sdk/scripts/generate-parity-matrix.mjs | 293 + .../ts-sdk/scripts/generate-resource-map.mjs | 361 + .../ts-sdk/scripts/generate-spec-metadata.mjs | 34 + packages/ts-sdk/src/auth/access-token.ts | 26 + packages/ts-sdk/src/auth/service-key.ts | 144 + packages/ts-sdk/src/auth/types.ts | 6 + packages/ts-sdk/src/client/http.ts | 59 + packages/ts-sdk/src/client/nexla-client.ts | 222 + packages/ts-sdk/src/client/operation-types.ts | 19 + packages/ts-sdk/src/client/types.ts | 14 + packages/ts-sdk/src/errors.ts | 114 + packages/ts-sdk/src/generated/resource-map.ts | 455 + packages/ts-sdk/src/generated/schema.ts | 20072 ++++++++++++++++ .../ts-sdk/src/generated/spec-metadata.ts | 8 + packages/ts-sdk/src/index.ts | 12 + .../src/resources/generated/access_control.ts | 251 + .../resources/generated/approval_requests.ts | 31 + .../src/resources/generated/async_tasks.ts | 87 + .../src/resources/generated/audit_logs.ts | 71 + .../resources/generated/code_containers.ts | 66 + .../src/resources/generated/credentials.ts | 76 + .../src/resources/generated/destinations.ts | 76 + .../ts-sdk/src/resources/generated/flows.ts | 78 + .../ts-sdk/src/resources/generated/genai.ts | 61 + .../ts-sdk/src/resources/generated/index.ts | 120 + .../ts-sdk/src/resources/generated/limits.ts | 16 + .../ts-sdk/src/resources/generated/lookups.ts | 71 + .../src/resources/generated/marketplace.ts | 76 + .../ts-sdk/src/resources/generated/metrics.ts | 51 + .../ts-sdk/src/resources/generated/nexsets.ts | 81 + .../src/resources/generated/notifications.ts | 128 + .../resources/generated/org_auth_configs.ts | 61 + .../src/resources/generated/organizations.ts | 73 + .../src/resources/generated/projects.ts | 96 + .../generated/quarantine_settings.ts | 31 + .../src/resources/generated/runtimes.ts | 66 + .../src/resources/generated/self_signup.ts | 21 + .../resources/generated/self_signup_admin.ts | 41 + .../ts-sdk/src/resources/generated/sources.ts | 76 + .../ts-sdk/src/resources/generated/teams.ts | 76 + .../ts-sdk/src/resources/generated/tokens.ts | 26 + .../src/resources/generated/transforms.ts | 96 + .../src/resources/generated/user_settings.ts | 20 + .../ts-sdk/src/resources/generated/users.ts | 57 + .../ts-sdk/src/resources/generated/utils.ts | 40 + packages/ts-sdk/src/resources/index.ts | 4 + .../ts-sdk/src/resources/resource-client.ts | 58 + packages/ts-sdk/src/webhooks/index.ts | 96 + .../tests/access-control/accessors.test.ts | 238 + .../access-control/role-limitations.test.ts | 297 + packages/ts-sdk/tests/access-token.test.ts | 15 + packages/ts-sdk/tests/auth.test.ts | 80 + packages/ts-sdk/tests/client.test.ts | 39 + .../ts-sdk/tests/coverage-branches.test.ts | 57 + .../tests/coverage-nexla-client.test.ts | 84 + packages/ts-sdk/tests/errors.test.ts | 17 + .../ts-sdk/tests/http-retry-headers.test.ts | 28 + .../ts-sdk/tests/integration/live-api.test.ts | 44 + packages/ts-sdk/tests/logout.test.ts | 40 + packages/ts-sdk/tests/optional-auth.test.ts | 32 + packages/ts-sdk/tests/request-errors.test.ts | 26 + .../tests/resource-client-actions.test.ts | 23 + .../tests/resource-client-error.test.ts | 26 + packages/ts-sdk/tests/resource-client.test.ts | 54 + .../tests/resources/credentials.test.ts | 273 + .../tests/resources/destinations.test.ts | 245 + packages/ts-sdk/tests/resources/flows.test.ts | 236 + .../ts-sdk/tests/resources/nexsets.test.ts | 245 + .../tests/resources/organizations.test.ts | 136 + .../ts-sdk/tests/resources/projects.test.ts | 339 + .../ts-sdk/tests/resources/sources.test.ts | 245 + packages/ts-sdk/tests/resources/teams.test.ts | 251 + packages/ts-sdk/tests/resources/users.test.ts | 203 + packages/ts-sdk/tests/retry.test.ts | 22 + .../ts-sdk/tests/utils/factories/accessors.ts | 81 + .../ts-sdk/tests/utils/factories/common.ts | 61 + .../tests/utils/factories/credentials.ts | 64 + .../tests/utils/factories/destinations.ts | 70 + .../ts-sdk/tests/utils/factories/flows.ts | 104 + .../ts-sdk/tests/utils/factories/index.ts | 15 + .../ts-sdk/tests/utils/factories/nexsets.ts | 73 + .../tests/utils/factories/organizations.ts | 82 + .../ts-sdk/tests/utils/factories/projects.ts | 74 + .../ts-sdk/tests/utils/factories/sources.ts | 76 + .../ts-sdk/tests/utils/factories/teams.ts | 82 + .../ts-sdk/tests/utils/factories/users.ts | 72 + packages/ts-sdk/tests/utils/index.ts | 7 + packages/ts-sdk/tests/utils/mock-fetch.ts | 140 + packages/ts-sdk/tests/utils/test-client.ts | 65 + packages/ts-sdk/tests/webhooks-error.test.ts | 24 + packages/ts-sdk/tests/webhooks.test.ts | 41 + packages/ts-sdk/tsconfig.json | 25 + packages/ts-sdk/tsconfig.typecheck.json | 5 + packages/ts-sdk/tsup.config.ts | 12 + packages/ts-sdk/vitest.config.ts | 31 + pnpm-lock.yaml | 15591 ++++++++++++ pnpm-workspace.yaml | 3 + pyproject.toml | 4 + scripts/parity/README.md | 24 + scripts/parity/build_matrices.py | 267 + scripts/parity/check_operation_map_sync.py | 54 + scripts/parity/generate_operation_map.py | 128 + test_auth_param_import.py | 48 + tests/test_client_init.py | 7 + tests/unit/test_access_control.py | 305 + tests/unit/test_access_insights.py | 434 + tests/unit/test_api_keys.py | 180 + tests/unit/test_approval_requests.py | 7 + tests/unit/test_audit_log.py | 400 + tests/unit/test_auth_templates.py | 200 + tests/unit/test_cluster_endpoints.py | 218 + tests/unit/test_clusters.py | 288 + tests/unit/test_connector_enums.py | 311 + tests/unit/test_connectors.py | 173 + tests/unit/test_credentials.py | 1 + tests/unit/test_docs_operations.py | 381 + tests/unit/test_error_scenarios.py | 716 + tests/unit/test_flexible_enums.py | 154 + tests/unit/test_flow_triggers.py | 265 + tests/unit/test_lifecycle_operations.py | 382 + tests/unit/test_notification_settings.py | 319 + tests/unit/test_pagination.py | 335 + tests/unit/test_parity_tooling.py | 88 + tests/unit/test_projects.py | 50 + tests/unit/test_raw_operations.py | 66 + tests/unit/test_search_operations.py | 329 + tests/unit/test_service_keys.py | 266 + tests/unit/test_tag_management.py | 525 + tests/unit/test_tokens_resource.py | 60 + tests/unit/test_validators.py | 286 + tests/unit/test_vendors.py | 188 + tests/utils/__init__.py | 6 + tests/utils/mock_builders.py | 178 + turbo.json | 24 + verify_type_checking.py | 61 + 319 files changed, 67265 insertions(+), 85 deletions(-) create mode 100644 .changeset/config.json create mode 100644 .changeset/wise-lions-whisper.md create mode 100644 .github/workflows/ci-ts-integration.yml create mode 100644 .github/workflows/ci-ts.yml create mode 100644 .github/workflows/release-ts.yml create mode 100644 docs/ts-sdk/api-coverage.md create mode 100644 docs/ts-sdk/architecture.md create mode 100644 docs/ts-sdk/integration-tests.md create mode 100644 docs/ts-sdk/migration-guide.md create mode 100644 docs/ts-sdk/parity-matrix.md create mode 100644 nexla_sdk/generated/__init__.py create mode 100644 nexla_sdk/generated/operation_map.py create mode 100644 nexla_sdk/generated/schema.py create mode 100644 nexla_sdk/models/api_keys/__init__.py create mode 100644 nexla_sdk/models/api_keys/requests.py create mode 100644 nexla_sdk/models/api_keys/responses.py create mode 100644 nexla_sdk/models/auth_parameters/__init__.py create mode 100644 nexla_sdk/models/auth_parameters/requests.py create mode 100644 nexla_sdk/models/auth_parameters/responses.py create mode 100644 nexla_sdk/models/auth_templates/__init__.py create mode 100644 nexla_sdk/models/auth_templates/requests.py create mode 100644 nexla_sdk/models/auth_templates/responses.py create mode 100644 nexla_sdk/models/catalog_configs/__init__.py create mode 100644 nexla_sdk/models/catalog_configs/requests.py create mode 100644 nexla_sdk/models/catalog_configs/responses.py create mode 100644 nexla_sdk/models/catalog_refs/__init__.py create mode 100644 nexla_sdk/models/catalog_refs/requests.py create mode 100644 nexla_sdk/models/catalog_refs/responses.py create mode 100644 nexla_sdk/models/clusters/__init__.py create mode 100644 nexla_sdk/models/clusters/requests.py create mode 100644 nexla_sdk/models/clusters/responses.py create mode 100644 nexla_sdk/models/connectors/__init__.py create mode 100644 nexla_sdk/models/connectors/enums.py create mode 100644 nexla_sdk/models/connectors/requests.py create mode 100644 nexla_sdk/models/connectors/responses.py create mode 100644 nexla_sdk/models/custom_data_flows/__init__.py create mode 100644 nexla_sdk/models/custom_data_flows/requests.py create mode 100644 nexla_sdk/models/custom_data_flows/responses.py create mode 100644 nexla_sdk/models/dashboard_transforms/__init__.py create mode 100644 nexla_sdk/models/dashboard_transforms/requests.py create mode 100644 nexla_sdk/models/dashboard_transforms/responses.py create mode 100644 nexla_sdk/models/data_credentials_groups/__init__.py create mode 100644 nexla_sdk/models/data_credentials_groups/requests.py create mode 100644 nexla_sdk/models/data_credentials_groups/responses.py create mode 100644 nexla_sdk/models/flexible_enums.py create mode 100644 nexla_sdk/models/flow_triggers/__init__.py create mode 100644 nexla_sdk/models/flow_triggers/requests.py create mode 100644 nexla_sdk/models/flow_triggers/responses.py create mode 100644 nexla_sdk/models/notification_channel_settings/__init__.py create mode 100644 nexla_sdk/models/notification_channel_settings/requests.py create mode 100644 nexla_sdk/models/notification_channel_settings/responses.py create mode 100644 nexla_sdk/models/notification_settings/__init__.py create mode 100644 nexla_sdk/models/notification_settings/requests.py create mode 100644 nexla_sdk/models/notification_settings/responses.py create mode 100644 nexla_sdk/models/notification_types/__init__.py create mode 100644 nexla_sdk/models/notification_types/responses.py create mode 100644 nexla_sdk/models/org_tiers/__init__.py create mode 100644 nexla_sdk/models/org_tiers/responses.py create mode 100644 nexla_sdk/models/quarantine_settings/__init__.py create mode 100644 nexla_sdk/models/quarantine_settings/requests.py create mode 100644 nexla_sdk/models/quarantine_settings/responses.py create mode 100644 nexla_sdk/models/resource_parameters/__init__.py create mode 100644 nexla_sdk/models/resource_parameters/requests.py create mode 100644 nexla_sdk/models/resource_parameters/responses.py create mode 100644 nexla_sdk/models/service_keys/__init__.py create mode 100644 nexla_sdk/models/service_keys/requests.py create mode 100644 nexla_sdk/models/service_keys/responses.py create mode 100644 nexla_sdk/models/user_settings/__init__.py create mode 100644 nexla_sdk/models/user_settings/requests.py create mode 100644 nexla_sdk/models/user_settings/responses.py create mode 100644 nexla_sdk/models/user_tiers/__init__.py create mode 100644 nexla_sdk/models/user_tiers/responses.py create mode 100644 nexla_sdk/models/users/credits.py create mode 100644 nexla_sdk/models/validators/__init__.py create mode 100644 nexla_sdk/models/validators/requests.py create mode 100644 nexla_sdk/models/validators/responses.py create mode 100644 nexla_sdk/models/vendor_endpoints/__init__.py create mode 100644 nexla_sdk/models/vendor_endpoints/requests.py create mode 100644 nexla_sdk/models/vendor_endpoints/responses.py create mode 100644 nexla_sdk/models/vendors/__init__.py create mode 100644 nexla_sdk/models/vendors/requests.py create mode 100644 nexla_sdk/models/vendors/responses.py create mode 100644 nexla_sdk/py.typed create mode 100644 nexla_sdk/raw_operations.py create mode 100644 nexla_sdk/resources/api_keys.py create mode 100644 nexla_sdk/resources/auth_parameters.py create mode 100644 nexla_sdk/resources/auth_templates.py create mode 100644 nexla_sdk/resources/catalog_configs.py create mode 100644 nexla_sdk/resources/cluster_endpoints.py create mode 100644 nexla_sdk/resources/clusters.py create mode 100644 nexla_sdk/resources/connectors.py create mode 100644 nexla_sdk/resources/cubejs.py create mode 100644 nexla_sdk/resources/custom_data_flows.py create mode 100644 nexla_sdk/resources/dashboard_transforms.py create mode 100644 nexla_sdk/resources/data_credentials_groups.py create mode 100644 nexla_sdk/resources/data_flows.py create mode 100644 nexla_sdk/resources/flow_nodes.py create mode 100644 nexla_sdk/resources/flow_triggers.py create mode 100644 nexla_sdk/resources/notification_channel_settings.py create mode 100644 nexla_sdk/resources/notification_settings.py create mode 100644 nexla_sdk/resources/notification_types.py create mode 100644 nexla_sdk/resources/org_tiers.py create mode 100644 nexla_sdk/resources/quarantine_settings.py create mode 100644 nexla_sdk/resources/resource_parameters.py create mode 100644 nexla_sdk/resources/search_health.py create mode 100644 nexla_sdk/resources/self_signup_blocked_domains.py create mode 100644 nexla_sdk/resources/service_keys.py create mode 100644 nexla_sdk/resources/tokens.py create mode 100644 nexla_sdk/resources/user_settings.py create mode 100644 nexla_sdk/resources/user_tiers.py create mode 100644 nexla_sdk/resources/validators.py create mode 100644 nexla_sdk/resources/vendor_endpoints.py create mode 100644 nexla_sdk/resources/vendors.py create mode 100644 package.json create mode 100644 packages/ts-sdk/README.md create mode 100644 packages/ts-sdk/coverage/lcov-report/base.css create mode 100644 packages/ts-sdk/coverage/lcov-report/block-navigation.js create mode 100644 packages/ts-sdk/coverage/lcov-report/favicon.png create mode 100644 packages/ts-sdk/coverage/lcov-report/index.html create mode 100644 packages/ts-sdk/coverage/lcov-report/prettify.css create mode 100644 packages/ts-sdk/coverage/lcov-report/prettify.js create mode 100644 packages/ts-sdk/coverage/lcov-report/sort-arrow-sprite.png create mode 100644 packages/ts-sdk/coverage/lcov-report/sorter.js create mode 100644 packages/ts-sdk/coverage/lcov-report/src/auth/access-token.ts.html create mode 100644 packages/ts-sdk/coverage/lcov-report/src/auth/index.html create mode 100644 packages/ts-sdk/coverage/lcov-report/src/auth/service-key.ts.html create mode 100644 packages/ts-sdk/coverage/lcov-report/src/client/http.ts.html create mode 100644 packages/ts-sdk/coverage/lcov-report/src/client/index.html create mode 100644 packages/ts-sdk/coverage/lcov-report/src/client/nexla-client.ts.html create mode 100644 packages/ts-sdk/coverage/lcov-report/src/client/operation-types.ts.html create mode 100644 packages/ts-sdk/coverage/lcov-report/src/errors.ts.html create mode 100644 packages/ts-sdk/coverage/lcov-report/src/index.html create mode 100644 packages/ts-sdk/coverage/lcov-report/src/resources/index.html create mode 100644 packages/ts-sdk/coverage/lcov-report/src/resources/resource-client.ts.html create mode 100644 packages/ts-sdk/coverage/lcov-report/src/webhooks/index.html create mode 100644 packages/ts-sdk/coverage/lcov-report/src/webhooks/index.ts.html create mode 100644 packages/ts-sdk/coverage/lcov.info create mode 100644 packages/ts-sdk/eslint.config.js create mode 100644 packages/ts-sdk/package.json create mode 100644 packages/ts-sdk/scripts/check-generated-coverage.mjs create mode 100644 packages/ts-sdk/scripts/generate-parity-matrix.mjs create mode 100644 packages/ts-sdk/scripts/generate-resource-map.mjs create mode 100644 packages/ts-sdk/scripts/generate-spec-metadata.mjs create mode 100644 packages/ts-sdk/src/auth/access-token.ts create mode 100644 packages/ts-sdk/src/auth/service-key.ts create mode 100644 packages/ts-sdk/src/auth/types.ts create mode 100644 packages/ts-sdk/src/client/http.ts create mode 100644 packages/ts-sdk/src/client/nexla-client.ts create mode 100644 packages/ts-sdk/src/client/operation-types.ts create mode 100644 packages/ts-sdk/src/client/types.ts create mode 100644 packages/ts-sdk/src/errors.ts create mode 100644 packages/ts-sdk/src/generated/resource-map.ts create mode 100644 packages/ts-sdk/src/generated/schema.ts create mode 100644 packages/ts-sdk/src/generated/spec-metadata.ts create mode 100644 packages/ts-sdk/src/index.ts create mode 100644 packages/ts-sdk/src/resources/generated/access_control.ts create mode 100644 packages/ts-sdk/src/resources/generated/approval_requests.ts create mode 100644 packages/ts-sdk/src/resources/generated/async_tasks.ts create mode 100644 packages/ts-sdk/src/resources/generated/audit_logs.ts create mode 100644 packages/ts-sdk/src/resources/generated/code_containers.ts create mode 100644 packages/ts-sdk/src/resources/generated/credentials.ts create mode 100644 packages/ts-sdk/src/resources/generated/destinations.ts create mode 100644 packages/ts-sdk/src/resources/generated/flows.ts create mode 100644 packages/ts-sdk/src/resources/generated/genai.ts create mode 100644 packages/ts-sdk/src/resources/generated/index.ts create mode 100644 packages/ts-sdk/src/resources/generated/limits.ts create mode 100644 packages/ts-sdk/src/resources/generated/lookups.ts create mode 100644 packages/ts-sdk/src/resources/generated/marketplace.ts create mode 100644 packages/ts-sdk/src/resources/generated/metrics.ts create mode 100644 packages/ts-sdk/src/resources/generated/nexsets.ts create mode 100644 packages/ts-sdk/src/resources/generated/notifications.ts create mode 100644 packages/ts-sdk/src/resources/generated/org_auth_configs.ts create mode 100644 packages/ts-sdk/src/resources/generated/organizations.ts create mode 100644 packages/ts-sdk/src/resources/generated/projects.ts create mode 100644 packages/ts-sdk/src/resources/generated/quarantine_settings.ts create mode 100644 packages/ts-sdk/src/resources/generated/runtimes.ts create mode 100644 packages/ts-sdk/src/resources/generated/self_signup.ts create mode 100644 packages/ts-sdk/src/resources/generated/self_signup_admin.ts create mode 100644 packages/ts-sdk/src/resources/generated/sources.ts create mode 100644 packages/ts-sdk/src/resources/generated/teams.ts create mode 100644 packages/ts-sdk/src/resources/generated/tokens.ts create mode 100644 packages/ts-sdk/src/resources/generated/transforms.ts create mode 100644 packages/ts-sdk/src/resources/generated/user_settings.ts create mode 100644 packages/ts-sdk/src/resources/generated/users.ts create mode 100644 packages/ts-sdk/src/resources/generated/utils.ts create mode 100644 packages/ts-sdk/src/resources/index.ts create mode 100644 packages/ts-sdk/src/resources/resource-client.ts create mode 100644 packages/ts-sdk/src/webhooks/index.ts create mode 100644 packages/ts-sdk/tests/access-control/accessors.test.ts create mode 100644 packages/ts-sdk/tests/access-control/role-limitations.test.ts create mode 100644 packages/ts-sdk/tests/access-token.test.ts create mode 100644 packages/ts-sdk/tests/auth.test.ts create mode 100644 packages/ts-sdk/tests/client.test.ts create mode 100644 packages/ts-sdk/tests/coverage-branches.test.ts create mode 100644 packages/ts-sdk/tests/coverage-nexla-client.test.ts create mode 100644 packages/ts-sdk/tests/errors.test.ts create mode 100644 packages/ts-sdk/tests/http-retry-headers.test.ts create mode 100644 packages/ts-sdk/tests/integration/live-api.test.ts create mode 100644 packages/ts-sdk/tests/logout.test.ts create mode 100644 packages/ts-sdk/tests/optional-auth.test.ts create mode 100644 packages/ts-sdk/tests/request-errors.test.ts create mode 100644 packages/ts-sdk/tests/resource-client-actions.test.ts create mode 100644 packages/ts-sdk/tests/resource-client-error.test.ts create mode 100644 packages/ts-sdk/tests/resource-client.test.ts create mode 100644 packages/ts-sdk/tests/resources/credentials.test.ts create mode 100644 packages/ts-sdk/tests/resources/destinations.test.ts create mode 100644 packages/ts-sdk/tests/resources/flows.test.ts create mode 100644 packages/ts-sdk/tests/resources/nexsets.test.ts create mode 100644 packages/ts-sdk/tests/resources/organizations.test.ts create mode 100644 packages/ts-sdk/tests/resources/projects.test.ts create mode 100644 packages/ts-sdk/tests/resources/sources.test.ts create mode 100644 packages/ts-sdk/tests/resources/teams.test.ts create mode 100644 packages/ts-sdk/tests/resources/users.test.ts create mode 100644 packages/ts-sdk/tests/retry.test.ts create mode 100644 packages/ts-sdk/tests/utils/factories/accessors.ts create mode 100644 packages/ts-sdk/tests/utils/factories/common.ts create mode 100644 packages/ts-sdk/tests/utils/factories/credentials.ts create mode 100644 packages/ts-sdk/tests/utils/factories/destinations.ts create mode 100644 packages/ts-sdk/tests/utils/factories/flows.ts create mode 100644 packages/ts-sdk/tests/utils/factories/index.ts create mode 100644 packages/ts-sdk/tests/utils/factories/nexsets.ts create mode 100644 packages/ts-sdk/tests/utils/factories/organizations.ts create mode 100644 packages/ts-sdk/tests/utils/factories/projects.ts create mode 100644 packages/ts-sdk/tests/utils/factories/sources.ts create mode 100644 packages/ts-sdk/tests/utils/factories/teams.ts create mode 100644 packages/ts-sdk/tests/utils/factories/users.ts create mode 100644 packages/ts-sdk/tests/utils/index.ts create mode 100644 packages/ts-sdk/tests/utils/mock-fetch.ts create mode 100644 packages/ts-sdk/tests/utils/test-client.ts create mode 100644 packages/ts-sdk/tests/webhooks-error.test.ts create mode 100644 packages/ts-sdk/tests/webhooks.test.ts create mode 100644 packages/ts-sdk/tsconfig.json create mode 100644 packages/ts-sdk/tsconfig.typecheck.json create mode 100644 packages/ts-sdk/tsup.config.ts create mode 100644 packages/ts-sdk/vitest.config.ts create mode 100644 pnpm-lock.yaml create mode 100644 pnpm-workspace.yaml create mode 100644 scripts/parity/README.md create mode 100755 scripts/parity/build_matrices.py create mode 100755 scripts/parity/check_operation_map_sync.py create mode 100755 scripts/parity/generate_operation_map.py create mode 100644 test_auth_param_import.py create mode 100644 tests/unit/test_access_control.py create mode 100644 tests/unit/test_access_insights.py create mode 100644 tests/unit/test_api_keys.py create mode 100644 tests/unit/test_audit_log.py create mode 100644 tests/unit/test_auth_templates.py create mode 100644 tests/unit/test_cluster_endpoints.py create mode 100644 tests/unit/test_clusters.py create mode 100644 tests/unit/test_connector_enums.py create mode 100644 tests/unit/test_connectors.py create mode 100644 tests/unit/test_docs_operations.py create mode 100644 tests/unit/test_error_scenarios.py create mode 100644 tests/unit/test_flexible_enums.py create mode 100644 tests/unit/test_flow_triggers.py create mode 100644 tests/unit/test_lifecycle_operations.py create mode 100644 tests/unit/test_notification_settings.py create mode 100644 tests/unit/test_pagination.py create mode 100644 tests/unit/test_parity_tooling.py create mode 100644 tests/unit/test_raw_operations.py create mode 100644 tests/unit/test_search_operations.py create mode 100644 tests/unit/test_service_keys.py create mode 100644 tests/unit/test_tag_management.py create mode 100644 tests/unit/test_tokens_resource.py create mode 100644 tests/unit/test_validators.py create mode 100644 tests/unit/test_vendors.py create mode 100644 turbo.json create mode 100644 verify_type_checking.py diff --git a/.changeset/config.json b/.changeset/config.json new file mode 100644 index 0000000..cdc5ff3 --- /dev/null +++ b/.changeset/config.json @@ -0,0 +1,11 @@ +{ + "$schema": "https://unpkg.com/@changesets/config@3.0.1/schema.json", + "changelog": ["@changesets/cli/changelog", { "repo": "nexla/nexla-sdk" }], + "commit": false, + "fixed": [], + "linked": [], + "access": "public", + "baseBranch": "main", + "updateInternalDependencies": "patch", + "ignore": [] +} diff --git a/.changeset/wise-lions-whisper.md b/.changeset/wise-lions-whisper.md new file mode 100644 index 0000000..205e70d --- /dev/null +++ b/.changeset/wise-lions-whisper.md @@ -0,0 +1,5 @@ +--- +"@nexla/sdk": major +--- + +Promote the TypeScript SDK to production-ready GA with OpenAPI coverage gates, spec metadata validation, and parity/migration documentation. diff --git a/.github/workflows/ci-ts-integration.yml b/.github/workflows/ci-ts-integration.yml new file mode 100644 index 0000000..f56fb70 --- /dev/null +++ b/.github/workflows/ci-ts-integration.yml @@ -0,0 +1,47 @@ +name: CI (TypeScript SDK Integration) + +on: + workflow_dispatch: + schedule: + - cron: "0 9 * * 1" + pull_request: + branches: [ main ] + paths: + - "packages/ts-sdk/**" + - "plugin-redoc-0.yaml" + - "pnpm-workspace.yaml" + - "package.json" + - "pnpm-lock.yaml" + - ".github/workflows/ci-ts-integration.yml" + +jobs: + integration: + if: ${{ secrets.NEXLA_SERVICE_KEY != '' || secrets.NEXLA_ACCESS_TOKEN != '' }} + runs-on: ubuntu-latest + continue-on-error: true + steps: + - uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: "pnpm" + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 9 + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Generate SDK artifacts + run: pnpm -C packages/ts-sdk gen + + - name: Run integration tests + run: pnpm -C packages/ts-sdk test:integration + env: + NEXLA_SERVICE_KEY: ${{ secrets.NEXLA_SERVICE_KEY }} + NEXLA_ACCESS_TOKEN: ${{ secrets.NEXLA_ACCESS_TOKEN }} + NEXLA_API_URL: ${{ vars.NEXLA_API_URL }} diff --git a/.github/workflows/ci-ts.yml b/.github/workflows/ci-ts.yml new file mode 100644 index 0000000..1968679 --- /dev/null +++ b/.github/workflows/ci-ts.yml @@ -0,0 +1,63 @@ +name: CI (TypeScript SDK) + +on: + push: + branches: [ main ] + paths: + - "packages/ts-sdk/**" + - "plugin-redoc-0.yaml" + - "pnpm-workspace.yaml" + - "package.json" + - "pnpm-lock.yaml" + - "turbo.json" + - ".github/workflows/ci-ts.yml" + pull_request: + branches: [ main ] + paths: + - "packages/ts-sdk/**" + - "plugin-redoc-0.yaml" + - "pnpm-workspace.yaml" + - "package.json" + - "pnpm-lock.yaml" + - "turbo.json" + - ".github/workflows/ci-ts.yml" + +jobs: + build-test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: 'pnpm' + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 9 + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Validate generated coverage and spec metadata + run: pnpm -C packages/ts-sdk check:generated + + - name: Generate SDK artifacts + run: | + pnpm -C packages/ts-sdk gen + git diff --exit-code + + - name: Lint + run: pnpm -C packages/ts-sdk lint + + - name: Typecheck + run: pnpm -C packages/ts-sdk typecheck + + - name: Test + run: pnpm -C packages/ts-sdk coverage + + - name: Build + run: pnpm -C packages/ts-sdk build diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1632485..30410d9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,8 +3,26 @@ name: CI on: push: branches: [ main ] + paths: + - "nexla_sdk/**" + - "scripts/**" + - "tests/**" + - "pyproject.toml" + - "requirements.txt" + - "pytest.ini" + - "README.md" + - ".github/workflows/ci.yml" pull_request: branches: [ main ] + paths: + - "nexla_sdk/**" + - "scripts/**" + - "tests/**" + - "pyproject.toml" + - "requirements.txt" + - "pytest.ini" + - "README.md" + - ".github/workflows/ci.yml" jobs: build-test: @@ -30,9 +48,12 @@ jobs: pip install ruff ruff check nexla_sdk + - name: Check Generated Operation Map Sync + run: | + python scripts/parity/check_operation_map_sync.py + - name: Run unit tests env: PYTHONWARNINGS: default run: | pytest -m unit --maxfail=1 -q - diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 99f405e..c33a188 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -4,8 +4,20 @@ on: push: branches: [ main ] tags: [ 'v*' ] + paths: + - "docs-site/**" + - "docs/**" + - "nexla_sdk/**" + - "README.md" + - ".github/workflows/docs.yml" pull_request: branches: [ main ] + paths: + - "docs-site/**" + - "docs/**" + - "nexla_sdk/**" + - "README.md" + - ".github/workflows/docs.yml" permissions: contents: read diff --git a/.github/workflows/release-ts.yml b/.github/workflows/release-ts.yml new file mode 100644 index 0000000..43658e8 --- /dev/null +++ b/.github/workflows/release-ts.yml @@ -0,0 +1,40 @@ +name: Release TypeScript SDK + +on: + push: + branches: [ main ] + paths: + - "packages/ts-sdk/**" + - ".changeset/**" + - "pnpm-workspace.yaml" + - "package.json" + - "pnpm-lock.yaml" + - ".github/workflows/release-ts.yml" + +jobs: + release: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 20 + cache: 'pnpm' + + - name: Setup pnpm + uses: pnpm/action-setup@v4 + with: + version: 9 + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Create release PR or publish + uses: changesets/action@v1 + with: + publish: pnpm release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.gitignore b/.gitignore index f811e81..43a74d1 100644 --- a/.gitignore +++ b/.gitignore @@ -202,3 +202,6 @@ create_flow_sample/ .claude/ .direnv/ .envrc + +# Parity artifacts generated from local backend/spec analysis +artifacts/parity/*.json diff --git a/README.md b/README.md index c88a4bb..46d1f15 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,25 @@ A Python SDK for interacting with the Nexla API. +## TypeScript SDK (New) + +This repository now includes a production-ready TypeScript SDK in `packages/ts-sdk`. + +### Install + +```bash +npm install @nexla/sdk +``` + +### Quick Start + +```ts +import { NexlaClient } from "@nexla/sdk"; + +const client = new NexlaClient({ serviceKey: process.env.NEXLA_SERVICE_KEY }); +const flows = await client.request("get", "/flows"); +``` + ## Installation ```bash @@ -576,6 +595,25 @@ doc_audit = client.doc_containers.get_audit_log(doc_container_id=1001) schema_audit = client.data_schemas.get_audit_log(schema_id=5001) ``` +### Raw Operation-Level Access + +```python +# List available OpenAPI operation ids +ops = client.raw.list_operations() + +# Call by operation id with typed path/query/body slots +project_flows = client.raw.call( + "get_project_flows", + path_params={"project_id": 123}, +) + +# Direct raw HTTP helpers are also available +limits = client.raw.get("/limits") + +# Backend-only or non-spec route access +approved = client.raw.request("POST", "/self_signup_requests/42/approve") +``` + ## Coverage Matrix Mapping of major OpenAPI areas to SDK resources. All requests set `Accept: application/vnd.nexla.api.v1+json` and default base URL `https://dataops.nexla.io/nexla-api`. @@ -605,7 +643,8 @@ Mapping of major OpenAPI areas to SDK resources. All requests set `Accept: appli - GenAI Configurations/Org Settings: `client.genai` — configs CRUD; org settings CRUD; active_config - Doc Containers: `client.doc_containers` — audit_log; (access control via BaseResource helpers) - Data Schemas: `client.data_schemas` — audit_log; (access control via BaseResource helpers) -- Webhooks: not included as a dedicated helper yet (use direct HTTP with API key per spec) +- Webhooks: `client.create_webhook_client(api_key=...)` for API-key authenticated webhook sends +- Full OpenAPI operation-level access: `client.raw.call(operation_id, ...)` ## Error Handling @@ -682,6 +721,17 @@ export NEXLA_API_URL="https://your-nexla-instance.com/nexla-api" pytest tests/integration/ ``` +### Parity Tooling + +```bash +# Generate operation map for client.raw +python scripts/parity/generate_operation_map.py + +# Build OpenAPI/admin-routes/SDK parity matrices +python scripts/parity/build_matrices.py \ + --admin-routes /Users/sakshammittal/Documents/GitHub/admin-api/config/routes.rb +``` + ### Setting Up Environment ```bash diff --git a/docs/ts-sdk/api-coverage.md b/docs/ts-sdk/api-coverage.md new file mode 100644 index 0000000..e1a87bc --- /dev/null +++ b/docs/ts-sdk/api-coverage.md @@ -0,0 +1,70 @@ +# TypeScript SDK API Coverage Process + +## Coverage Dimensions + +The TS SDK tracks coverage in two dimensions: + +- **OpenAPI operation coverage:** whether every API `operationId` is available through generated TS resource methods. +- **Python-to-TS surface parity:** whether Python `client.` surfaces exist as first-class TS resource clients. + +Operation coverage can be high while resource parity is still incomplete; these are tracked separately by design. + +## Source of Truth + +- OpenAPI spec: `plugin-redoc-0.yaml` +- Generated TS schema: `packages/ts-sdk/src/generated/schema.ts` +- Generated TS resources: `packages/ts-sdk/src/resources/generated/*.ts` +- Python resource surface: `nexla_sdk/client.py` +- Generated parity report: `docs/ts-sdk/parity-matrix.md` + +## Refresh Workflow + +Run this from repository root whenever API surface changes: + +```bash +pnpm -C packages/ts-sdk gen +node packages/ts-sdk/scripts/generate-parity-matrix.mjs +pnpm -C packages/ts-sdk lint +pnpm -C packages/ts-sdk typecheck +pnpm -C packages/ts-sdk coverage +``` + +What this does: + +1. Regenerates all TS OpenAPI artifacts. +2. Recomputes Python->TS parity and operation coverage matrix. +3. Verifies SDK quality gates still pass. + +## CI Expectations + +`ci-ts.yml` already enforces generated artifact consistency (`pnpm -C packages/ts-sdk gen` + clean git diff) and blocks merges if generated files are stale. + +The parity matrix file is documentation output and should be refreshed in the same PR when parity or coverage changes. + +## Interpreting `parity-matrix.md` + +- **Python resource parity %**: how many Python resource surfaces currently have first-class TS resource clients. +- **Session operationId coverage %**: how many spec operationIds are generated into TS resource clients. +- **TS-only resources**: TS resource clients that do not yet exist as Python first-class resource properties. + +## Gap Handling Policy + +If a Python resource is not yet available as a generated TS resource client: + +- Use `client.raw` as a typed fallback for path-level access. +- Keep migration docs updated with guidance for that resource. +- Track progress by refreshing `docs/ts-sdk/parity-matrix.md`. + +## PR Checklist (Coverage-Sensitive Changes) + +1. Regenerate TS artifacts (`pnpm -C packages/ts-sdk gen`). +2. Refresh parity matrix (`node packages/ts-sdk/scripts/generate-parity-matrix.mjs`). +3. Run lint/typecheck/tests for TS SDK. +4. Update `docs/ts-sdk/migration-guide.md` if Python->TS mapping changed. +5. Include generated doc diffs (`docs/ts-sdk/parity-matrix.md`) in the PR. + +## Related Docs + +- [Architecture](./architecture.md) +- [Migration guide](./migration-guide.md) +- [Integration testing (non-blocking guidance)](./integration-tests.md) diff --git a/docs/ts-sdk/architecture.md b/docs/ts-sdk/architecture.md new file mode 100644 index 0000000..c381a26 --- /dev/null +++ b/docs/ts-sdk/architecture.md @@ -0,0 +1,102 @@ +# Nexla TypeScript SDK Architecture + +## Goals + +- Ship a production-ready TypeScript SDK with strict typing and stable runtime behavior. +- Keep API coverage OpenAPI-first so endpoint additions are code-generated, not hand-written. +- Keep migration friction low for Python SDK users by preserving resource naming and common call patterns. + +## Package Boundaries + +- Repository root remains Python-first (`nexla_sdk/`, `tests/`, packaging). +- TypeScript SDK lives in `packages/ts-sdk` and has independent build/test/lint/typecheck tasks. +- TS docs live in `docs/ts-sdk` and are versioned with code. + +## TypeScript SDK Layout + +```text +packages/ts-sdk/ + src/ + auth/ # Service-key and access-token providers + client/ # NexlaClient, retry logic, typed request helpers + generated/ # OpenAPI-generated schema + resource map + resources/ + generated/ # Generated resource clients (operationId methods + CRUD aliases) + webhooks/ # Webhook client (API key auth) + errors.ts # SDK exception hierarchy + index.ts # Public package exports + scripts/ + generate-resource-map.mjs + generate-parity-matrix.mjs + tests/ +``` + +## Runtime Request Pipeline + +1. `NexlaClient` selects auth mode from constructor options or env vars. +2. `createFetchWithRetry` wraps `fetch` with retry/backoff behavior. +3. `openapi-fetch` middleware injects headers: + - `Authorization: Bearer ` + - `Accept: application/vnd.nexla.api.+json` + - `Content-Type: application/json` for non-GET/HEAD +4. Typed requests flow through `request` / `requestOperation`. +5. HTTP status codes map to SDK errors (`AuthenticationError`, `ValidationError`, `RateLimitError`, etc.). +6. Service-key auth can auto-refresh on 401 and retry once. + +## Authentication Architecture + +- `serviceKey`: obtains session tokens via `/token`, caches token, refreshes with margin. +- `accessToken`: direct bearer token, no refresh flow. +- `webhookApiKey`: optional webhook client (`client.webhooks`) for webhook endpoints. + +## OpenAPI Generation Pipeline + +The OpenAPI spec (`plugin-redoc-0.yaml`) is the source of truth. + +```bash +pnpm -C packages/ts-sdk gen +``` + +This generates: + +- `packages/ts-sdk/src/generated/schema.ts` (all paths/components/operations types) +- `packages/ts-sdk/src/generated/resource-map.ts` (CRUD alias map) +- `packages/ts-sdk/src/resources/generated/*.ts` (per-resource operation clients) + +Parity reporting is generated separately: + +```bash +node packages/ts-sdk/scripts/generate-parity-matrix.mjs +``` + +Output: + +- `docs/ts-sdk/parity-matrix.md` + +## Coverage Model + +- **Operation coverage:** measured against OpenAPI `operationId`s in generated TS resources. +- **Python surface parity:** measured at resource-surface level (`client.` parity). +- Both are documented in: + - `docs/ts-sdk/api-coverage.md` + - `docs/ts-sdk/parity-matrix.md` + +## Release and CI Boundaries + +Blocking TypeScript checks are in `.github/workflows/ci-ts.yml`: + +- artifact generation + clean diff +- lint +- typecheck +- tests/coverage +- build + +Integration tests are intentionally documented as non-blocking relative to publish/release gating: + +- `docs/ts-sdk/integration-tests.md` + +## Related Docs + +- [Coverage process](./api-coverage.md) +- [Python-to-TS migration](./migration-guide.md) +- [Generated parity matrix](./parity-matrix.md) diff --git a/docs/ts-sdk/integration-tests.md b/docs/ts-sdk/integration-tests.md new file mode 100644 index 0000000..87fe990 --- /dev/null +++ b/docs/ts-sdk/integration-tests.md @@ -0,0 +1,66 @@ +# TypeScript SDK Integration Testing (Non-Blocking) + +This document defines how to add and run TS integration tests without turning them into a release blocker. + +## Policy + +- Integration tests are **non-blocking** relative to TypeScript release/publish gates. +- Unit/lint/typecheck/build remain the blocking CI checks. +- Integration runs are for confidence, signal, and early regression detection. +- Existing workflow: `.github/workflows/ci-ts-integration.yml` (scheduled + manual + PR path-based). + +## When to Add Integration Tests + +Add integration tests for: + +- auth/token lifecycle behavior against a live environment +- critical write/read flows that cannot be fully validated with mocked HTTP +- regressions reported from real API interactions + +## Test Placement and Naming + +- Directory: `packages/ts-sdk/tests/integration/` +- File pattern: `*.test.ts` +- Keep tests idempotent and cleanup-aware. +- Avoid assumptions about globally shared account state. + +## Required Environment Variables + +- `NEXLA_SERVICE_KEY` (preferred) +- or `NEXLA_ACCESS_TOKEN` +- optional `NEXLA_API_URL` + +Never commit credentials or test fixtures containing secrets. + +## Local Run Commands + +```bash +NEXLA_SERVICE_KEY=... pnpm -C packages/ts-sdk test -- --passWithNoTests tests/integration +``` + +For iterative runs: + +```bash +NEXLA_SERVICE_KEY=... pnpm -C packages/ts-sdk test:watch -- tests/integration +``` + +## Suggested CI Shape (Non-Blocking) + +If a CI workflow is added for TS integration tests, keep it isolated from release gating. Recommended options: + +- separate workflow (`workflow_dispatch` and/or scheduled) +- `continue-on-error: true` on the integration job +- avoid `needs` dependencies from release jobs + +Example job policy snippet: + +```yaml +continue-on-error: true +if: ${{ secrets.NEXLA_SERVICE_KEY != '' || secrets.NEXLA_ACCESS_TOKEN != '' }} +``` + +## Reporting Expectations + +- Integration test failures should create a clear signal in CI logs. +- Failing integration runs should not block package release automation by default. +- Use failures to prioritize reliability work and test hardening. diff --git a/docs/ts-sdk/migration-guide.md b/docs/ts-sdk/migration-guide.md new file mode 100644 index 0000000..7c70da9 --- /dev/null +++ b/docs/ts-sdk/migration-guide.md @@ -0,0 +1,131 @@ +# Migration Guide: Python SDK to TypeScript SDK + +This guide maps common Python SDK patterns to their TypeScript SDK equivalents. + +## Initialization and Auth Mapping + +| Python SDK | TypeScript SDK | +| --- | --- | +| `NexlaClient(service_key="...")` | `new NexlaClient({ serviceKey: "..." })` | +| `NexlaClient(access_token="...")` | `new NexlaClient({ accessToken: "..." })` | +| `base_url=` | `baseUrl:` | +| `api_version=` | `apiVersion:` | + +Shared environment variables: + +- `NEXLA_SERVICE_KEY` +- `NEXLA_ACCESS_TOKEN` +- `NEXLA_API_URL` + +## Core Call-Pattern Mapping + +| Python pattern | TS pattern | +| --- | --- | +| `client..list()` | `await client..list()` | +| `client..get(id)` | `await client..get({ params: { path: { : id } } })` | +| `client..create(payload)` | `await client..create({ body: payload })` | +| `client.request("get", "/flows")` | `await client.request("get", "/flows")` | +| synchronous return values | Promise-based (`await`) | + +For non-CRUD endpoints, TS exposes operationId methods: + +```ts +const activated = await client.flows.flow_activate_with_flow_id({ + params: { path: { flow_id: 123 } } +}); +``` + +## Python to TS Resource Mapping Guidance + +### One-to-one common resources + +- `flows` -> `client.flows` +- `sources` -> `client.sources` +- `destinations` -> `client.destinations` +- `credentials` -> `client.credentials` +- `nexsets` -> `client.nexsets` +- `users` -> `client.users` +- `teams` -> `client.teams` +- `projects` -> `client.projects` +- `organizations` -> `client.organizations` + +### Webhooks mapping + +Python: + +```python +webhooks = client.create_webhook_client(api_key="...") +``` + +TypeScript: + +```ts +import { NexlaClient, WebhooksClient } from "@nexla/sdk"; + +// Option 1: standalone +const webhooks = new WebhooksClient({ apiKey: "..." }); + +// Option 2: attached to NexlaClient +const client = new NexlaClient({ serviceKey: "...", webhookApiKey: "..." }); +await client.webhooks?.sendOneRecord("https://api.nexla.com/webhook/abc", { id: 1 }); +``` + +### Python resources not yet first-class in TS + +For resources that are still missing in TS as dedicated `client.` clients, use typed raw access: + +```ts +const result = await client.raw.GET("/clusters"); +``` + +Use [parity-matrix.md](./parity-matrix.md) as the current source of Python-to-TS parity status. + +## Type Mapping Guidance + +Python models are Pydantic-based. TS uses generated OpenAPI types. + +```ts +import type { operations } from "@nexla/sdk"; + +type GetFlowResponse = operations["get_flow_by_id"]["responses"][200]["content"]["application/json"]; +``` + +## Error Mapping + +| Python exception | TS exception | +| --- | --- | +| `AuthenticationError` | `AuthenticationError` | +| `NotFoundError` | `NotFoundError` | +| `ValidationError` | `ValidationError` | +| `RateLimitError` | `RateLimitError` | +| `ServerError` | `ServerError` | +| `NexlaError` | `NexlaError` | + +TS usage example: + +```ts +import { AuthenticationError } from "@nexla/sdk"; + +try { + await client.flows.list(); +} catch (error) { + if (error instanceof AuthenticationError) { + // re-authenticate or fail fast + } + throw error; +} +``` + +## Practical Migration Checklist + +1. Convert constructor args (`service_key` -> `serviceKey`, `access_token` -> `accessToken`). +2. Convert synchronous calls to `await`-based calls. +3. Move positional path args into `params.path` objects. +4. Move payload arguments into `body`. +5. For uncovered resources, use `client.raw` and track parity updates in [parity-matrix.md](./parity-matrix.md). + +## Related Docs + +- [Architecture](./architecture.md) +- [Coverage process](./api-coverage.md) +- [Generated parity matrix](./parity-matrix.md) diff --git a/docs/ts-sdk/parity-matrix.md b/docs/ts-sdk/parity-matrix.md new file mode 100644 index 0000000..f4b5ee8 --- /dev/null +++ b/docs/ts-sdk/parity-matrix.md @@ -0,0 +1,92 @@ +# TypeScript SDK Parity Matrix (Generated) + +> Auto-generated by `node packages/ts-sdk/scripts/generate-parity-matrix.mjs`. Do not edit this file manually. + +Generated at: `2026-02-06T03:36:11.979Z` + +## Summary + +- Python resources discovered: **57** +- TS resources discovered: **29** +- Python resource parity: **25/57 (43.9%)** +- OpenAPI operations in spec: **272** +- OpenAPI session operations in spec (excluding webhook-tagged operations): **272** +- OpenAPI webhook-tagged operations in spec: **0** +- OperationIds implemented in generated TS resources: **272** +- Session operationId coverage: **272/272 (100.0%)** + +## Python To TS Resource Parity + +| Python resource | TS equivalent | Status | Notes | +| --- | --- | --- | --- | +| `api_keys` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `approval_requests` | `client.approval_requests` | covered | Generated TS resource client available. | +| `async_tasks` | `client.async_tasks` | covered | Generated TS resource client available. | +| `attribute_transforms` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `auth_parameters` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `auth_templates` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `catalog_configs` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `cluster_endpoints` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `clusters` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `code_containers` | `client.code_containers` | covered | Generated TS resource client available. | +| `connectors` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `credentials` | `client.credentials` | covered | Generated TS resource client available. | +| `cubejs` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `custom_data_flows` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `dashboard_transforms` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `data_credentials_groups` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `data_flows` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `data_schemas` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `destinations` | `client.destinations` | covered | Generated TS resource client available. | +| `doc_containers` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `flow_nodes` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `flow_triggers` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `flows` | `client.flows` | covered | Generated TS resource client available. | +| `genai` | `client.genai` | covered | Generated TS resource client available. | +| `lookups` | `client.lookups` | covered | Generated TS resource client available. | +| `marketplace` | `client.marketplace` | covered | Generated TS resource client available. | +| `mcp_sessions` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `metrics` | `client.metrics` | covered | Generated TS resource client available. | +| `nexsets` | `client.nexsets` | covered | Generated TS resource client available. | +| `notification_channel_settings` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `notification_settings` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `notification_types` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `notifications` | `client.notifications` | covered | Generated TS resource client available. | +| `org_auth_configs` | `client.org_auth_configs` | covered | Generated TS resource client available. | +| `org_tiers` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `organizations` | `client.organizations` | covered | Generated TS resource client available. | +| `projects` | `client.projects` | covered | Generated TS resource client available. | +| `quarantine_settings` | `client.quarantine_settings` | covered | Generated TS resource client available. | +| `resource_parameters` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `runtimes` | `client.runtimes` | covered | Generated TS resource client available. | +| `search_health` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `self_signup` | `client.self_signup` | covered | Generated TS resource client available. | +| `self_signup_blocked_domains` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `service_keys` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `sources` | `client.sources` | covered | Generated TS resource client available. | +| `teams` | `client.teams` | covered | Generated TS resource client available. | +| `tokens` | `client.tokens` | covered | Generated TS resource client available. | +| `tool_sets` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `tools` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `transforms` | `client.transforms` | covered | Generated TS resource client available. | +| `user_settings` | `client.user_settings` | covered | Generated TS resource client available. | +| `user_tiers` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `users` | `client.users` | covered | Generated TS resource client available. | +| `validators` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `vendor_endpoints` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `vendors` | -- | missing | Use `client.raw` for typed path-level access until this resource is generated. | +| `webhooks` | `WebhooksClient` / `client.webhooks` | covered | Python uses create_webhook_client(); TS uses WebhooksClient directly or NexlaClient({ webhookApiKey }). | + +## TS-Only Resource Surfaces + +| TS resource | Notes | +| --- | --- | +| `access_control` | OpenAPI-generated TS resource. Python client does not expose this as a first-class resource property. | +| `audit_logs` | OpenAPI-generated TS resource. Python client does not expose this as a first-class resource property. | +| `limits` | OpenAPI-generated TS resource. Python client does not expose this as a first-class resource property. | +| `self_signup_admin` | OpenAPI-generated TS resource. Python client does not expose this as a first-class resource property. | + +## Operation Coverage Details + +- Missing session operationIds in generated TS resources: **0** +- Extra operationIds in generated TS resources (not found in current spec session set): **0** diff --git a/nexla_sdk/__init__.py b/nexla_sdk/__init__.py index 010e8f0..d51335b 100644 --- a/nexla_sdk/__init__.py +++ b/nexla_sdk/__init__.py @@ -14,6 +14,7 @@ # Import main client from nexla_sdk.client import NexlaClient +from nexla_sdk.raw_operations import RawOperationsClient # Import exceptions from nexla_sdk.exceptions import ( @@ -84,6 +85,7 @@ __all__ = [ # Client "NexlaClient", + "RawOperationsClient", # Resources "CredentialsResource", "FlowsResource", diff --git a/nexla_sdk/client.py b/nexla_sdk/client.py index 156c407..f1433d2 100644 --- a/nexla_sdk/client.py +++ b/nexla_sdk/client.py @@ -18,30 +18,60 @@ ValidationError, ) from .http_client import HttpClientError, HttpClientInterface, RequestsHttpClient +from .raw_operations import RawOperationsClient from .resources.approval_requests import ApprovalRequestsResource from .resources.async_tasks import AsyncTasksResource from .resources.attribute_transforms import AttributeTransformsResource +from .resources.auth_parameters import AuthParametersResource +from .resources.auth_templates import AuthTemplatesResource +from .resources.api_keys import ApiKeysResource +from .resources.catalog_configs import CatalogConfigsResource +from .resources.cluster_endpoints import ClusterEndpointsResource +from .resources.clusters import ClustersResource from .resources.code_containers import CodeContainersResource +from .resources.connectors import ConnectorsResource from .resources.credentials import CredentialsResource +from .resources.custom_data_flows import CustomDataFlowsResource +from .resources.data_credentials_groups import DataCredentialsGroupsResource +from .resources.data_flows import DataFlowsResource from .resources.data_schemas import DataSchemasResource +from .resources.dashboard_transforms import DashboardTransformsResource from .resources.destinations import DestinationsResource from .resources.doc_containers import DocContainersResource +from .resources.flow_nodes import FlowNodesResource +from .resources.flow_triggers import FlowTriggersResource from .resources.flows import FlowsResource +from .resources.cubejs import CubeJsResource +from .resources.notification_channel_settings import NotificationChannelSettingsResource +from .resources.notification_types import NotificationTypesResource from .resources.genai import GenAIResource from .resources.lookups import LookupsResource from .resources.marketplace import MarketplaceResource from .resources.metrics import MetricsResource from .resources.nexsets import NexsetsResource from .resources.notifications import NotificationsResource +from .resources.notification_settings import NotificationSettingsResource from .resources.org_auth_configs import OrgAuthConfigsResource +from .resources.org_tiers import OrgTiersResource from .resources.organizations import OrganizationsResource from .resources.projects import ProjectsResource +from .resources.quarantine_settings import QuarantineSettingsResource +from .resources.resource_parameters import ResourceParametersResource from .resources.runtimes import RuntimesResource +from .resources.search_health import SearchHealthResource from .resources.self_signup import SelfSignupResource +from .resources.self_signup_blocked_domains import SelfSignupBlockedDomainsResource +from .resources.service_keys import ServiceKeysResource from .resources.sources import SourcesResource from .resources.teams import TeamsResource +from .resources.tokens import TokensResource from .resources.transforms import TransformsResource from .resources.users import UsersResource +from .resources.user_settings import UserSettingsResource +from .resources.user_tiers import UserTiersResource +from .resources.validators import ValidatorsResource +from .resources.vendor_endpoints import VendorEndpointsResource +from .resources.vendors import VendorsResource from .resources.webhooks import WebhooksResource logger = logging.getLogger(__name__) @@ -98,7 +128,7 @@ def __init__( access_token: Nexla access token for direct authentication (mutually exclusive with service_key) base_url: Nexla API base URL (defaults to environment variable or standard URL) api_version: API version to use - token_refresh_margin: Seconds before token expiry to trigger refresh (default: 5 minutes) + token_refresh_margin: Seconds before token expiry to trigger refresh (default: 1 hour) http_client: HTTP client implementation (defaults to RequestsHttpClient) trace_enabled: Explicitly enable/disable OpenTelemetry tracing. If None, tracing auto-enables when a global OTEL config is detected. @@ -164,18 +194,32 @@ def __init__( http_client=self.http_client, ) + # Full operation-level API access (OpenAPI operation_id based) + self.raw: RawOperationsClient = RawOperationsClient(self) + # Initialize API endpoints self.flows = FlowsResource(self) + self.flow_nodes = FlowNodesResource(self) + self.data_flows = DataFlowsResource(self) self.sources = SourcesResource(self) self.destinations = DestinationsResource(self) self.credentials = CredentialsResource(self) + self.custom_data_flows = CustomDataFlowsResource(self) + self.data_credentials_groups = DataCredentialsGroupsResource(self) self.lookups = LookupsResource(self) self.nexsets = NexsetsResource(self) self.users = UsersResource(self) + self.user_settings = UserSettingsResource(self) + self.user_tiers = UserTiersResource(self) self.organizations = OrganizationsResource(self) self.teams = TeamsResource(self) self.projects = ProjectsResource(self) self.notifications = NotificationsResource(self) + self.notification_settings = NotificationSettingsResource(self) + self.notification_channel_settings = NotificationChannelSettingsResource(self) + self.notification_types = NotificationTypesResource(self) + self.quarantine_settings = QuarantineSettingsResource(self) + self.dashboard_transforms = DashboardTransformsResource(self) self.metrics = MetricsResource(self) self.code_containers = CodeContainersResource(self) self.transforms = TransformsResource(self) @@ -185,10 +229,34 @@ def __init__( self.runtimes = RuntimesResource(self) self.marketplace = MarketplaceResource(self) self.org_auth_configs = OrgAuthConfigsResource(self) + self.org_tiers = OrgTiersResource(self) + self.auth_parameters = AuthParametersResource(self) + self.resource_parameters = ResourceParametersResource(self) + self.catalog_configs = CatalogConfigsResource(self) + self.vendor_endpoints = VendorEndpointsResource(self) self.genai = GenAIResource(self) self.self_signup = SelfSignupResource(self) + self.self_signup_blocked_domains = SelfSignupBlockedDomainsResource(self) self.doc_containers = DocContainersResource(self) self.data_schemas = DataSchemasResource(self) + self.tokens = TokensResource(self) + self.search_health = SearchHealthResource(self) + self.cubejs = CubeJsResource(self) + + # Phase 1 resources + self.validators = ValidatorsResource(self) + self.service_keys = ServiceKeysResource(self) + self.flow_triggers = FlowTriggersResource(self) + + # Phase 3 resources + self.clusters = ClustersResource(self) + self.cluster_endpoints = ClusterEndpointsResource(self) + + # Phase 4 resources + self.api_keys = ApiKeysResource(self) + self.connectors = ConnectorsResource(self) + self.vendors = VendorsResource(self) + self.auth_templates = AuthTemplatesResource(self) def get_access_token(self) -> str: """ @@ -425,7 +493,7 @@ def _handle_http_error( } # Map status codes to specific exceptions - if status_code == 400: + if status_code in (400, 422): raise ValidationError( error_msg, status_code=status_code, diff --git a/nexla_sdk/generated/__init__.py b/nexla_sdk/generated/__init__.py new file mode 100644 index 0000000..af49ba7 --- /dev/null +++ b/nexla_sdk/generated/__init__.py @@ -0,0 +1,14 @@ +"""Generated OpenAPI metadata for Nexla SDK.""" + +from .operation_map import OPERATION_MAP, OperationId, OperationSpec +from .schema import JSONObject, JSONValue, RawRequest, RawResponse + +__all__ = [ + "OPERATION_MAP", + "OperationId", + "OperationSpec", + "JSONValue", + "JSONObject", + "RawRequest", + "RawResponse", +] diff --git a/nexla_sdk/generated/operation_map.py b/nexla_sdk/generated/operation_map.py new file mode 100644 index 0000000..456c864 --- /dev/null +++ b/nexla_sdk/generated/operation_map.py @@ -0,0 +1,2211 @@ +"""Auto-generated operation map from OpenAPI. Do not edit manually.""" + +from typing import Dict, List, Literal, TypedDict + + +class OperationSpec(TypedDict): + method: str + path: str + tags: List[str] + summary: str + path_params: List[str] + + +OperationId = Literal[ + 'acknowledge_async_task', + 'activate_data_sink', + 'activate_nexset', + 'activate_runtime', + 'activate_source', + 'add_code_container_accessors', + 'add_data_credential_accessors', + 'add_data_map_accessors', + 'add_data_schema_accessors', + 'add_data_sink_accessors', + 'add_data_source_accessors', + 'add_doc_container_accessors', + 'add_domain_custodians', + 'add_flow_accessors', + 'add_flow_accessors__deprecated', + 'add_nexset_accessors', + 'add_org_custodians', + 'add_project_accessors', + 'add_project_flows', + 'add_project_flows__deprecated', + 'add_self_signup_blocked_domain', + 'add_team_accessors', + 'add_team_members', + 'approve_approval_request', + 'approve_self_sign_up_request', + 'check_data_map_entries', + 'copy_code_container', + 'copy_data_sink_source', + 'copy_nexset', + 'copy_source', + 'copy_transform', + 'create_api_auth_config', + 'create_async_task', + 'create_attribute_transform', + 'create_code_container', + 'create_data_credential', + 'create_data_sink', + 'create_data_source', + 'create_domain', + 'create_domain_item', + 'create_domains', + 'create_gen_ai_config', + 'create_gen_ai_org_setting', + 'create_nexset', + 'create_notification_channel_setting', + 'create_notification_setting', + 'create_project', + 'create_quarantine_data_export_settings', + 'create_reusable_record_transform', + 'create_runtime', + 'create_static_data_map', + 'create_team', + 'create_user', + 'data_credential_probe', + 'data_set_docs_recommendation', + 'delete_all_notifications', + 'delete_api_auth_config', + 'delete_async_task', + 'delete_attribute_transform', + 'delete_code_container', + 'delete_code_container_accessors', + 'delete_data_credential', + 'delete_data_credential_accessors', + 'delete_data_map', + 'delete_data_map_accessors', + 'delete_data_map_entries', + 'delete_data_schema_accessors', + 'delete_data_sink', + 'delete_data_sink_accessors', + 'delete_data_source', + 'delete_data_source_accessors', + 'delete_doc_container_accessors', + 'delete_domain', + 'delete_flow', + 'delete_flow_accessors', + 'delete_flow_accessors__deprecated', + 'delete_flow_by_resource_id', + 'delete_gen_ai_integration_config', + 'delete_gen_ai_org_setting', + 'delete_nexset', + 'delete_nexset_accessors', + 'delete_notification_channel_setting', + 'delete_notification_setting', + 'delete_notifications', + 'delete_org_members', + 'delete_project', + 'delete_project_accessors', + 'delete_reusable_record_transform', + 'delete_runtime', + 'delete_self_signup_blocked_domain', + 'delete_team', + 'delete_team_accessors', + 'delete_team_members', + 'delete_user_quarantine_data_export_settings', + 'flow_activate_with_flow_id', + 'flow_activate_with_resource_id', + 'flow_copy_with_flow_id', + 'flow_docs_recommendation', + 'flow_pause_with_flow_id', + 'flow_pause_with_resource_id', + 'gen_ai_org_settings_show_active', + 'get_all_api_auth_configs', + 'get_api_auth_configs', + 'get_api_auth_configs_2', + 'get_api_auth_settings', + 'get_async_task', + 'get_async_task_download_link', + 'get_async_task_result', + 'get_async_task_types', + 'get_async_tasks', + 'get_async_tasks_by_status', + 'get_async_tasks_explain_arguments', + 'get_async_tasks_of_type', + 'get_attribute_transform', + 'get_attribute_transforms', + 'get_code_container', + 'get_code_container_accessors', + 'get_code_container_audit_log', + 'get_code_containers', + 'get_current_user', + 'get_data_credential', + 'get_data_credential_accessors', + 'get_data_credential_audit_log', + 'get_data_credential_expanded', + 'get_data_credentials', + 'get_data_map', + 'get_data_map_accessors', + 'get_data_map_audit_log', + 'get_data_maps', + 'get_data_schema_accessors', + 'get_data_schema_audit_log', + 'get_data_sink', + 'get_data_sink_accessors', + 'get_data_sink_audit_log', + 'get_data_sink_expanded', + 'get_data_sinks', + 'get_data_source', + 'get_data_source_accessors', + 'get_data_source_audit_log', + 'get_data_source_expanded', + 'get_data_sources', + 'get_doc_container_accessors', + 'get_doc_container_audit_log', + 'get_domain', + 'get_domain_custodians', + 'get_domain_items', + 'get_domains', + 'get_domains_for_org', + 'get_flow_accessors', + 'get_flow_accessors__deprecated', + 'get_flow_by_id', + 'get_flow_by_resource_id', + 'get_flow_logs_for_run_id', + 'get_flow_metrics', + 'get_flows', + 'get_gen_ai_configs', + 'get_gen_ai_integration_config', + 'get_gen_ai_org_setting', + 'get_gen_ai_org_settings', + 'get_nexset', + 'get_nexset_accessors', + 'get_nexset_audit_log', + 'get_nexset_samples', + 'get_nexsets', + 'get_notification', + 'get_notification_channel_setting', + 'get_notification_count', + 'get_notification_setting', + 'get_notification_types', + 'get_notifications', + 'get_org', + 'get_org_audit_log', + 'get_org_custodians', + 'get_org_members', + 'get_orgs', + 'get_pending_approval_requests', + 'get_project', + 'get_project_accessors', + 'get_project_audit_log', + 'get_project_flows', + 'get_project_flows__deprecated', + 'get_projects', + 'get_public_attribute_transforms', + 'get_public_code_containers', + 'get_public_reusable_record_transforms', + 'get_requested_approval_requests', + 'get_resource_metrics_by_run', + 'get_resource_metrics_daily', + 'get_reusable_record_transform', + 'get_reusable_record_transforms', + 'get_runtime', + 'get_runtimes', + 'get_self_signup_blocked_domains', + 'get_self_signup_requests', + 'get_team', + 'get_team_accessors', + 'get_team_audit_log', + 'get_team_members', + 'get_teams', + 'get_user', + 'get_user_audit_log', + 'get_user_expand', + 'get_user_quarantine_data_export_settings', + 'get_user_settings', + 'get_users', + 'get_users_expand', + 'limits', + 'list_notification_channel_settings', + 'list_notification_settings', + 'list_notification_settings_by_type', + 'list_notification_type', + 'list_resource_notification_settings', + 'login_with_basic_auth', + 'logout', + 'notifications_mark_read', + 'notifications_mark_unread', + 'org_account_metrics_total', + 'pause_data_sink', + 'pause_nexset', + 'pause_runtime', + 'pause_source', + 'preview_connector_content', + 'preview_storage_structure', + 'reject_approval_request', + 'remove_domain_custodians', + 'remove_org_custodians', + 'remove_project_flows', + 'remove_project_flows__deprecated', + 'replace_code_container_accessors', + 'replace_data_credential_accessors', + 'replace_data_map_accessors', + 'replace_data_schema_accessors', + 'replace_data_sink_accessors', + 'replace_data_source_accessors', + 'replace_doc_container_accessors', + 'replace_flow_accessors', + 'replace_flow_accessors__deprecated', + 'replace_nexset_accessors', + 'replace_project_accessors', + 'replace_project_flows', + 'replace_project_flows__deprecated', + 'replace_team_accessors', + 'replace_team_members', + 'rerun_async_task', + 'self_sign_up', + 'update_api_auth_config', + 'update_api_auth_config_2', + 'update_attribute_transform', + 'update_code_container', + 'update_data_credential', + 'update_data_map_metadata', + 'update_data_sink', + 'update_data_source', + 'update_domain', + 'update_domain_custodians', + 'update_gen_ai_integration_config', + 'update_nexset', + 'update_notification_channel_setting', + 'update_notification_setting', + 'update_org', + 'update_org_custodians', + 'update_org_members', + 'update_project', + 'update_reusable_record_transform', + 'update_runtime', + 'update_self_signup_blocked_domain', + 'update_team', + 'update_user', + 'update_user_quarantine_data_export_settings', + 'upsert_data_map_entries', + 'user_24_hour_flow_stats', + 'user_account_metrics_total', + 'user_metrics_daily', + 'verify_email' +] + + +OPERATION_MAP: Dict[str, OperationSpec] = { + 'acknowledge_async_task': { + 'method': 'POST', + 'path': '/async_tasks/{task_id}/acknowledge', + 'tags': ['Async Tasks'], + 'summary': 'Acknowledge async operation', + 'path_params': ['task_id'], + }, + 'activate_data_sink': { + 'method': 'PUT', + 'path': '/data_sinks/{sink_id}/activate', + 'tags': ['Destinations (Data Sinks)'], + 'summary': 'Activate a Sink', + 'path_params': ['sink_id'], + }, + 'activate_nexset': { + 'method': 'PUT', + 'path': '/data_sets/{set_id}/activate', + 'tags': ['Nexsets (Data Sets)'], + 'summary': 'Activate Nexset', + 'path_params': ['set_id'], + }, + 'activate_runtime': { + 'method': 'PUT', + 'path': '/runtimes/{runtime_id}/activate', + 'tags': ['Custom Runtimes'], + 'summary': 'Activate a Custom Runtime', + 'path_params': ['runtime_id'], + }, + 'activate_source': { + 'method': 'PUT', + 'path': '/data_sources/{source_id}/activate', + 'tags': ['Sources'], + 'summary': 'Activate a Source', + 'path_params': ['source_id'], + }, + 'add_code_container_accessors': { + 'method': 'PUT', + 'path': '/code_containers/{code_container_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Add Access Rules on Code Container', + 'path_params': ['code_container_id'], + }, + 'add_data_credential_accessors': { + 'method': 'PUT', + 'path': '/data_credentials/{data_credential_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Add Access Rules on Data Credential', + 'path_params': ['data_credential_id'], + }, + 'add_data_map_accessors': { + 'method': 'PUT', + 'path': '/data_maps/{data_map_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Add Access Rules on Data Map', + 'path_params': ['data_map_id'], + }, + 'add_data_schema_accessors': { + 'method': 'PUT', + 'path': '/data_schemas/{data_schema_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Add Access Rules on Data Schema', + 'path_params': ['data_schema_id'], + }, + 'add_data_sink_accessors': { + 'method': 'PUT', + 'path': '/data_sinks/{data_sink_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Add Access Rules on Data Sink', + 'path_params': ['data_sink_id'], + }, + 'add_data_source_accessors': { + 'method': 'PUT', + 'path': '/data_sources/{data_source_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Add Access Rules on Data Source', + 'path_params': ['data_source_id'], + }, + 'add_doc_container_accessors': { + 'method': 'PUT', + 'path': '/doc_containers/{doc_container_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Add Access Rules on Document', + 'path_params': ['doc_container_id'], + }, + 'add_domain_custodians': { + 'method': 'POST', + 'path': '/marketplace/domains/{domain_id}/custodians', + 'tags': ['Marketplace'], + 'summary': 'Add custodians to a marketplace domain.', + 'path_params': ['domain_id'], + }, + 'add_flow_accessors': { + 'method': 'PUT', + 'path': '/flows/{flow_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Add Access Rules on Flow', + 'path_params': ['flow_id'], + }, + 'add_flow_accessors__deprecated': { + 'method': 'PUT', + 'path': '/data_flows/{data_flow_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Add Access Rules on Flow (Deprecated)', + 'path_params': ['data_flow_id'], + }, + 'add_nexset_accessors': { + 'method': 'PUT', + 'path': '/data_sets/{data_set_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Add Access Rules on Nexset', + 'path_params': ['data_set_id'], + }, + 'add_org_custodians': { + 'method': 'POST', + 'path': '/orgs/{org_id}/custodians', + 'tags': ['Organizations'], + 'summary': 'Add organization custodians.', + 'path_params': ['org_id'], + }, + 'add_project_accessors': { + 'method': 'PUT', + 'path': '/projects/{project_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Add Project Accessors', + 'path_params': ['project_id'], + }, + 'add_project_flows': { + 'method': 'PUT', + 'path': '/projects/{project_id}/flows', + 'tags': ['Projects'], + 'summary': 'Add Flows to Project', + 'path_params': ['project_id'], + }, + 'add_project_flows__deprecated': { + 'method': 'PUT', + 'path': '/projects/{project_id}/data_flows', + 'tags': ['Projects'], + 'summary': 'Add Flows to Project (Deprecated)', + 'path_params': ['project_id'], + }, + 'add_self_signup_blocked_domain': { + 'method': 'POST', + 'path': '/self_signup_blocked_domains', + 'tags': ['Self Sign-Up Admin'], + 'summary': 'Add self-sign-up blocked domain for admins.', + 'path_params': [], + }, + 'add_team_accessors': { + 'method': 'PUT', + 'path': '/teams/{team_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Add Team Accessors', + 'path_params': ['team_id'], + }, + 'add_team_members': { + 'method': 'PUT', + 'path': '/teams/{team_id}/members', + 'tags': ['Teams'], + 'summary': 'Add Members to A Team', + 'path_params': ['team_id'], + }, + 'approve_approval_request': { + 'method': 'PUT', + 'path': '/approval_requests/{request_id}/approve', + 'tags': ['Approval Requests'], + 'summary': 'Approve pending approval requests', + 'path_params': ['request_id'], + }, + 'approve_self_sign_up_request': { + 'method': 'PUT', + 'path': '/self_signup_requests/{request_id}/approve', + 'tags': ['Self Sign-Up Admin'], + 'summary': 'Approve Self Sign Up Request', + 'path_params': ['request_id'], + }, + 'check_data_map_entries': { + 'method': 'GET', + 'path': '/data_maps/{data_map_id}/entries/{entry_keys}', + 'tags': ['Data Maps'], + 'summary': 'Check Data Map Entries', + 'path_params': ['data_map_id', 'entry_keys'], + }, + 'copy_code_container': { + 'method': 'POST', + 'path': '/code_containers/{code_container_id}/copy', + 'tags': ['Code Containers'], + 'summary': 'Copy a Code Container', + 'path_params': ['code_container_id'], + }, + 'copy_data_sink_source': { + 'method': 'POST', + 'path': '/data_sinks/{sink_id}/copy', + 'tags': ['Destinations (Data Sinks)'], + 'summary': 'Copy a Sink', + 'path_params': ['sink_id'], + }, + 'copy_nexset': { + 'method': 'POST', + 'path': '/data_sets/{set_id}/copy', + 'tags': ['Nexsets (Data Sets)'], + 'summary': 'Copy Nexset', + 'path_params': ['set_id'], + }, + 'copy_source': { + 'method': 'POST', + 'path': '/data_sources/{source_id}/copy', + 'tags': ['Sources'], + 'summary': 'Copy a Source', + 'path_params': ['source_id'], + }, + 'copy_transform': { + 'method': 'POST', + 'path': '/transforms/{transform_id}/copy', + 'tags': ['Transforms'], + 'summary': 'Copy a Reusable Record Transform', + 'path_params': ['transform_id'], + }, + 'create_api_auth_config': { + 'method': 'POST', + 'path': '/api_auth_configs', + 'tags': ['Org authentication configs'], + 'summary': 'Create auth config.', + 'path_params': [], + }, + 'create_async_task': { + 'method': 'POST', + 'path': '/async_tasks', + 'tags': ['Async Tasks'], + 'summary': 'Create an async operation.', + 'path_params': [], + }, + 'create_attribute_transform': { + 'method': 'POST', + 'path': '/attribute_transforms', + 'tags': ['Transforms'], + 'summary': 'Create an Attribute Transform', + 'path_params': [], + }, + 'create_code_container': { + 'method': 'POST', + 'path': '/code_containers', + 'tags': ['Code Containers'], + 'summary': 'Create a Code Container', + 'path_params': [], + }, + 'create_data_credential': { + 'method': 'POST', + 'path': '/data_credentials', + 'tags': ['Credentials'], + 'summary': 'Create a Credential', + 'path_params': [], + }, + 'create_data_sink': { + 'method': 'POST', + 'path': '/data_sinks', + 'tags': ['Destinations (Data Sinks)'], + 'summary': 'Create a Sink', + 'path_params': [], + }, + 'create_data_source': { + 'method': 'POST', + 'path': '/data_sources', + 'tags': ['Sources'], + 'summary': 'Create a Source', + 'path_params': [], + }, + 'create_domain': { + 'method': 'POST', + 'path': '/marketplace/domains/{domain_id}', + 'tags': ['Marketplace'], + 'summary': 'Create a single marketplace domain.', + 'path_params': ['domain_id'], + }, + 'create_domain_item': { + 'method': 'POST', + 'path': '/marketplace/domains/{domain_id}/items', + 'tags': ['Marketplace'], + 'summary': 'Create a marketplace item for a domain.', + 'path_params': ['domain_id'], + }, + 'create_domains': { + 'method': 'POST', + 'path': '/marketplace/domains', + 'tags': ['Marketplace'], + 'summary': 'Create marketplace domains.', + 'path_params': [], + }, + 'create_gen_ai_config': { + 'method': 'POST', + 'path': '/gen_ai_integration_configs', + 'tags': ['GenAI Configurations'], + 'summary': 'Create a GenAI config', + 'path_params': [], + }, + 'create_gen_ai_org_setting': { + 'method': 'POST', + 'path': '/gen_ai_org_settings', + 'tags': ['GenAI Configurations'], + 'summary': 'Create a binding of GenAI config for the org for specific usage.', + 'path_params': [], + }, + 'create_nexset': { + 'method': 'POST', + 'path': '/data_sets', + 'tags': ['Nexsets (Data Sets)'], + 'summary': 'Create a Nexset', + 'path_params': [], + }, + 'create_notification_channel_setting': { + 'method': 'POST', + 'path': '/notification_channel_settings', + 'tags': ['Notifications'], + 'summary': 'Create a Notification Channel Setting', + 'path_params': [], + }, + 'create_notification_setting': { + 'method': 'POST', + 'path': '/notification_settings', + 'tags': ['Notifications'], + 'summary': 'Create a Notification Setting', + 'path_params': [], + }, + 'create_project': { + 'method': 'POST', + 'path': '/projects', + 'tags': ['Projects'], + 'summary': 'Create a project', + 'path_params': [], + }, + 'create_quarantine_data_export_settings': { + 'method': 'POST', + 'path': '/users/{user_id}/quarantine_settings', + 'tags': ['Quarantine Settings'], + 'summary': 'Set Quarantine Data Export Settings for A User', + 'path_params': ['user_id'], + }, + 'create_reusable_record_transform': { + 'method': 'POST', + 'path': '/transforms', + 'tags': ['Transforms'], + 'summary': 'Create a Reusable Record Transform', + 'path_params': [], + }, + 'create_runtime': { + 'method': 'POST', + 'path': '/runtimes', + 'tags': ['Custom Runtimes'], + 'summary': 'Create a Custom Runtime', + 'path_params': [], + }, + 'create_static_data_map': { + 'method': 'POST', + 'path': '/data_maps', + 'tags': ['Data Maps'], + 'summary': 'Create a Static Data Map', + 'path_params': [], + }, + 'create_team': { + 'method': 'POST', + 'path': '/teams', + 'tags': ['Teams'], + 'summary': 'Create a team', + 'path_params': [], + }, + 'create_user': { + 'method': 'POST', + 'path': '/users', + 'tags': ['Users'], + 'summary': 'Create a User', + 'path_params': [], + }, + 'data_credential_probe': { + 'method': 'GET', + 'path': '/data_credentials/{credential_id}/probe', + 'tags': ['Credentials'], + 'summary': 'Test credential validity', + 'path_params': ['credential_id'], + }, + 'data_set_docs_recommendation': { + 'method': 'POST', + 'path': '/data_sets/{data_set_id}/docs/recommendation', + 'tags': ['Nexsets (Data Sets)', 'Gen AI Recommendations'], + 'summary': 'Generate an AI suggestion for Nexset documentation', + 'path_params': ['data_set_id'], + }, + 'delete_all_notifications': { + 'method': 'DELETE', + 'path': '/notifications/all', + 'tags': ['Notifications'], + 'summary': 'Delete All Notifications', + 'path_params': [], + }, + 'delete_api_auth_config': { + 'method': 'DELETE', + 'path': '/api_auth_configs/{auth_config_id}', + 'tags': ['Org authentication configs'], + 'summary': 'Delete auth config.', + 'path_params': ['auth_config_id'], + }, + 'delete_async_task': { + 'method': 'DELETE', + 'path': '/async_tasks/{task_id}', + 'tags': ['Async Tasks'], + 'summary': 'Delete async operation by ID', + 'path_params': ['task_id'], + }, + 'delete_attribute_transform': { + 'method': 'DELETE', + 'path': '/attribute_transforms/{attribute_transform_id}', + 'tags': ['Transforms'], + 'summary': 'Delete an Attribute Transform', + 'path_params': ['attribute_transform_id'], + }, + 'delete_code_container': { + 'method': 'DELETE', + 'path': '/code_containers/{code_container_id}', + 'tags': ['Code Containers'], + 'summary': 'Delete a Code Container', + 'path_params': ['code_container_id'], + }, + 'delete_code_container_accessors': { + 'method': 'DELETE', + 'path': '/code_containers/{code_container_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Delete Access Rules on Code Container', + 'path_params': ['code_container_id'], + }, + 'delete_data_credential': { + 'method': 'DELETE', + 'path': '/data_credentials/{credential_id}', + 'tags': ['Credentials'], + 'summary': 'Delete a Credential', + 'path_params': ['credential_id'], + }, + 'delete_data_credential_accessors': { + 'method': 'DELETE', + 'path': '/data_credentials/{data_credential_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Delete Access Rules on Data Credential', + 'path_params': ['data_credential_id'], + }, + 'delete_data_map': { + 'method': 'DELETE', + 'path': '/data_maps/{data_map_id}', + 'tags': ['Data Maps'], + 'summary': 'Delete a Data Map', + 'path_params': ['data_map_id'], + }, + 'delete_data_map_accessors': { + 'method': 'DELETE', + 'path': '/data_maps/{data_map_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Delete Access Rules on Data Map', + 'path_params': ['data_map_id'], + }, + 'delete_data_map_entries': { + 'method': 'DELETE', + 'path': '/data_maps/{data_map_id}/entries/{entry_keys}', + 'tags': ['Data Maps'], + 'summary': 'Delete Data Map Entries', + 'path_params': ['data_map_id', 'entry_keys'], + }, + 'delete_data_schema_accessors': { + 'method': 'DELETE', + 'path': '/data_schemas/{data_schema_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Delete Access Rules on Data Schema', + 'path_params': ['data_schema_id'], + }, + 'delete_data_sink': { + 'method': 'DELETE', + 'path': '/data_sinks/{sink_id}', + 'tags': ['Destinations (Data Sinks)'], + 'summary': 'Delete a Sink', + 'path_params': ['sink_id'], + }, + 'delete_data_sink_accessors': { + 'method': 'DELETE', + 'path': '/data_sinks/{data_sink_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Delete Access Rules on Data Sink', + 'path_params': ['data_sink_id'], + }, + 'delete_data_source': { + 'method': 'DELETE', + 'path': '/data_sources/{source_id}', + 'tags': ['Sources'], + 'summary': 'Delete a Source', + 'path_params': ['source_id'], + }, + 'delete_data_source_accessors': { + 'method': 'DELETE', + 'path': '/data_sources/{data_source_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Delete Access Rules on Data Source', + 'path_params': ['data_source_id'], + }, + 'delete_doc_container_accessors': { + 'method': 'DELETE', + 'path': '/doc_containers/{doc_container_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Delete Access Rules on Document', + 'path_params': ['doc_container_id'], + }, + 'delete_domain': { + 'method': 'DELETE', + 'path': '/marketplace/domains/{domain_id}', + 'tags': ['Marketplace'], + 'summary': 'Delete a single marketplace domain.', + 'path_params': ['domain_id'], + }, + 'delete_flow': { + 'method': 'DELETE', + 'path': '/flows/{flow_id}', + 'tags': ['Flows'], + 'summary': 'Delete a Flow', + 'path_params': ['flow_id'], + }, + 'delete_flow_accessors': { + 'method': 'DELETE', + 'path': '/flows/{flow_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Delete Access Rules on Flow', + 'path_params': ['flow_id'], + }, + 'delete_flow_accessors__deprecated': { + 'method': 'DELETE', + 'path': '/data_flows/{data_flow_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Delete Access Rules on Flow (Deprecated)', + 'path_params': ['data_flow_id'], + }, + 'delete_flow_by_resource_id': { + 'method': 'DELETE', + 'path': '/{resource_type}/{resource_id}/flow', + 'tags': ['Flows'], + 'summary': 'Delete a Flow (by Resource ID)', + 'path_params': ['resource_id', 'resource_type'], + }, + 'delete_gen_ai_integration_config': { + 'method': 'DELETE', + 'path': '/gen_ai_integration_configs/{gen_ai_config_id}', + 'tags': ['GenAI Configs'], + 'summary': 'Delete GenAI Integration Config', + 'path_params': ['gen_ai_config_id'], + }, + 'delete_gen_ai_org_setting': { + 'method': 'DELETE', + 'path': '/gen_ai_org_settings/{gen_ai_org_setting_id}', + 'tags': ['GenAI Configurations'], + 'summary': 'Delete GenAI Config binding for org.', + 'path_params': ['gen_ai_org_setting_id'], + }, + 'delete_nexset': { + 'method': 'DELETE', + 'path': '/data_sets/{set_id}', + 'tags': ['Nexsets (Data Sets)'], + 'summary': 'Delete a Nexset', + 'path_params': ['set_id'], + }, + 'delete_nexset_accessors': { + 'method': 'DELETE', + 'path': '/data_sets/{data_set_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Delete Access Rules on Nexset', + 'path_params': ['data_set_id'], + }, + 'delete_notification_channel_setting': { + 'method': 'DELETE', + 'path': '/notification_channel_settings/{notification_channel_setting_id}', + 'tags': ['Notifications'], + 'summary': 'Delete a Notification Channel Setting', + 'path_params': ['notification_channel_setting_id'], + }, + 'delete_notification_setting': { + 'method': 'DELETE', + 'path': '/notification_settings/{notification_setting_id}', + 'tags': ['Notifications'], + 'summary': 'Delete a Notification Setting', + 'path_params': ['notification_setting_id'], + }, + 'delete_notifications': { + 'method': 'DELETE', + 'path': '/notifications/{notification_id}', + 'tags': ['Notifications'], + 'summary': 'Delete a Notification', + 'path_params': ['notification_id'], + }, + 'delete_org_members': { + 'method': 'DELETE', + 'path': '/orgs/{org_id}/members', + 'tags': ['Organizations'], + 'summary': 'Remove Members from an Organization.', + 'path_params': ['org_id'], + }, + 'delete_project': { + 'method': 'DELETE', + 'path': '/projects/{project_id}', + 'tags': ['Projects'], + 'summary': 'Delete Project by ID', + 'path_params': ['project_id'], + }, + 'delete_project_accessors': { + 'method': 'DELETE', + 'path': '/projects/{project_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Delete Project Accessors', + 'path_params': ['project_id'], + }, + 'delete_reusable_record_transform': { + 'method': 'DELETE', + 'path': '/transforms/{transform_id}', + 'tags': ['Transforms'], + 'summary': 'Delete a Reusable Record Transform', + 'path_params': ['transform_id'], + }, + 'delete_runtime': { + 'method': 'DELETE', + 'path': '/runtimes/{runtime_id}', + 'tags': ['Custom Runtimes'], + 'summary': 'Delete a Custom Runtime', + 'path_params': ['runtime_id'], + }, + 'delete_self_signup_blocked_domain': { + 'method': 'DELETE', + 'path': '/self_signup_blocked_domains/{domain_id}', + 'tags': ['Self Sign-Up Admin'], + 'summary': 'Delete self-sign-up blocked domain for admins.', + 'path_params': ['domain_id'], + }, + 'delete_team': { + 'method': 'DELETE', + 'path': '/teams/{team_id}', + 'tags': ['Teams'], + 'summary': 'Delete Team by ID', + 'path_params': ['team_id'], + }, + 'delete_team_accessors': { + 'method': 'DELETE', + 'path': '/teams/{team_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Delete Team Accessors', + 'path_params': ['team_id'], + }, + 'delete_team_members': { + 'method': 'DELETE', + 'path': '/teams/{team_id}/members', + 'tags': ['Teams'], + 'summary': 'Remove Team Members', + 'path_params': ['team_id'], + }, + 'delete_user_quarantine_data_export_settings': { + 'method': 'DELETE', + 'path': '/users/{user_id}/quarantine_settings', + 'tags': ['Quarantine Settings'], + 'summary': 'Delete Quarantine Data Export Settings for A User', + 'path_params': ['user_id'], + }, + 'flow_activate_with_flow_id': { + 'method': 'PUT', + 'path': '/flows/{flow_id}/activate', + 'tags': ['Flows'], + 'summary': 'Activate a Flow', + 'path_params': ['flow_id'], + }, + 'flow_activate_with_resource_id': { + 'method': 'PUT', + 'path': '/{resource_type}/{resource_id}/activate', + 'tags': ['Flows'], + 'summary': 'Activate a Flow (with Resource ID)', + 'path_params': ['resource_id', 'resource_type'], + }, + 'flow_copy_with_flow_id': { + 'method': 'POST', + 'path': '/flows/{flow_id}/copy', + 'tags': ['Flows'], + 'summary': 'Copy a Flow', + 'path_params': ['flow_id'], + }, + 'flow_docs_recommendation': { + 'method': 'POST', + 'path': '/flows/{flow_id}/docs/recommendation', + 'tags': ['Flows', 'Gen AI Recommendations'], + 'summary': 'Generate an AI suggestion for flow documentation', + 'path_params': ['flow_id'], + }, + 'flow_pause_with_flow_id': { + 'method': 'PUT', + 'path': '/flows/{flow_id}/pause', + 'tags': ['Flows'], + 'summary': 'Pause a Flow', + 'path_params': ['flow_id'], + }, + 'flow_pause_with_resource_id': { + 'method': 'PUT', + 'path': '/{resource_type}/{resource_id}/pause', + 'tags': ['Flows'], + 'summary': 'Pause a Flow (with Resource ID)', + 'path_params': ['resource_id', 'resource_type'], + }, + 'gen_ai_org_settings_show_active': { + 'method': 'GET', + 'path': '/gen_ai_org_settings/active_config', + 'tags': ['GenAI Configurations'], + 'summary': 'Shows active GenAI Configuration for specific usage', + 'path_params': [], + }, + 'get_all_api_auth_configs': { + 'method': 'GET', + 'path': '/api_auth_configs/all', + 'tags': ['Org authentication configs'], + 'summary': 'Get all auth configs.', + 'path_params': [], + }, + 'get_api_auth_configs': { + 'method': 'GET', + 'path': '/api_auth_configs', + 'tags': ['Org authentication configs'], + 'summary': 'Get auth configs.', + 'path_params': [], + }, + 'get_api_auth_configs_2': { + 'method': 'GET', + 'path': '/api_auth_configs/{auth_config_id}', + 'tags': ['Org authentication configs'], + 'summary': 'Get auth configs.', + 'path_params': ['auth_config_id'], + }, + 'get_api_auth_settings': { + 'method': 'GET', + 'path': '/orgs/{org_id}/auth_settings', + 'tags': ['Org authentication configs'], + 'summary': 'Get auth settings for org.', + 'path_params': ['org_id'], + }, + 'get_async_task': { + 'method': 'GET', + 'path': '/async_tasks/{task_id}', + 'tags': ['Async Tasks'], + 'summary': 'Get async operation by ID', + 'path_params': ['task_id'], + }, + 'get_async_task_download_link': { + 'method': 'GET', + 'path': '/async_tasks/{task_id}/download_link', + 'tags': ['Async Tasks'], + 'summary': 'Get download link for async operation result', + 'path_params': ['task_id'], + }, + 'get_async_task_result': { + 'method': 'GET', + 'path': '/async_tasks/{task_id}/result', + 'tags': ['Async Tasks'], + 'summary': 'Get async operation result', + 'path_params': ['task_id'], + }, + 'get_async_task_types': { + 'method': 'GET', + 'path': '/async_tasks/types', + 'tags': ['Async Tasks'], + 'summary': 'Get async operation types', + 'path_params': [], + }, + 'get_async_tasks': { + 'method': 'GET', + 'path': '/async_tasks', + 'tags': ['Async Tasks'], + 'summary': 'Get async operations list for current user.', + 'path_params': [], + }, + 'get_async_tasks_by_status': { + 'method': 'GET', + 'path': '/async_tasks/by_status/{status}', + 'tags': ['Async Tasks'], + 'summary': 'Get async operations list for current user by status', + 'path_params': ['status'], + }, + 'get_async_tasks_explain_arguments': { + 'method': 'GET', + 'path': '/async_tasks/explain_arguments/{task_type}', + 'tags': ['Async Tasks'], + 'summary': 'Get async operation arguments for a specific type with descriptions', + 'path_params': ['task_type'], + }, + 'get_async_tasks_of_type': { + 'method': 'GET', + 'path': '/async_tasks/of_type/{task_type}', + 'tags': ['Async Tasks'], + 'summary': 'Get async operations list for current user of a specific type.', + 'path_params': ['task_type'], + }, + 'get_attribute_transform': { + 'method': 'GET', + 'path': '/attribute_transforms/{attribute_transform_id}', + 'tags': ['Transforms'], + 'summary': 'Get Attribute Transform by ID', + 'path_params': ['attribute_transform_id'], + }, + 'get_attribute_transforms': { + 'method': 'GET', + 'path': '/attribute_transforms', + 'tags': ['Transforms'], + 'summary': 'Get all Attribute Transforms', + 'path_params': [], + }, + 'get_code_container': { + 'method': 'GET', + 'path': '/code_containers/{code_container_id}', + 'tags': ['Code Containers'], + 'summary': 'Get Code Container by ID', + 'path_params': ['code_container_id'], + }, + 'get_code_container_accessors': { + 'method': 'GET', + 'path': '/code_containers/{code_container_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Get Access Rules on Code Container', + 'path_params': ['code_container_id'], + }, + 'get_code_container_audit_log': { + 'method': 'GET', + 'path': '/code_containers/{code_container_id}/audit_log', + 'tags': ['Audit Logs'], + 'summary': 'Get Audit Log for a Code Container', + 'path_params': ['code_container_id'], + }, + 'get_code_containers': { + 'method': 'GET', + 'path': '/code_containers', + 'tags': ['Code Containers'], + 'summary': 'Get all Code Containers', + 'path_params': [], + }, + 'get_current_user': { + 'method': 'GET', + 'path': '/users/current', + 'tags': ['Session Management', 'Users'], + 'summary': 'Get info on current user', + 'path_params': [], + }, + 'get_data_credential': { + 'method': 'GET', + 'path': '/data_credentials/{credential_id}', + 'tags': ['Credentials'], + 'summary': 'Get Credential by ID', + 'path_params': ['credential_id'], + }, + 'get_data_credential_accessors': { + 'method': 'GET', + 'path': '/data_credentials/{data_credential_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Get Access Rules on Data Credential', + 'path_params': ['data_credential_id'], + }, + 'get_data_credential_audit_log': { + 'method': 'GET', + 'path': '/data_credentials/{credential_id}/audit_log', + 'tags': ['Audit Logs'], + 'summary': 'Get Audit Log for a Data Credential', + 'path_params': ['credential_id'], + }, + 'get_data_credential_expanded': { + 'method': 'GET', + 'path': '/data_credentials/{credential_id}?expand=1', + 'tags': ['Credentials'], + 'summary': 'Get Credential by ID with expanded references', + 'path_params': ['credential_id'], + }, + 'get_data_credentials': { + 'method': 'GET', + 'path': '/data_credentials', + 'tags': ['Credentials'], + 'summary': 'Get All Credentials', + 'path_params': [], + }, + 'get_data_map': { + 'method': 'GET', + 'path': '/data_maps/{data_map_id}', + 'tags': ['Data Maps'], + 'summary': 'Get Data Map by ID', + 'path_params': ['data_map_id'], + }, + 'get_data_map_accessors': { + 'method': 'GET', + 'path': '/data_maps/{data_map_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Get Access Rules on Data Map', + 'path_params': ['data_map_id'], + }, + 'get_data_map_audit_log': { + 'method': 'GET', + 'path': '/data_maps/{data_map_id}/audit_log', + 'tags': ['Audit Logs'], + 'summary': 'Get Audit Log for a Data Map', + 'path_params': ['data_map_id'], + }, + 'get_data_maps': { + 'method': 'GET', + 'path': '/data_maps', + 'tags': ['Data Maps'], + 'summary': 'Get all Data Maps', + 'path_params': [], + }, + 'get_data_schema_accessors': { + 'method': 'GET', + 'path': '/data_schemas/{data_schema_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Get Access Rules on Data Schema', + 'path_params': ['data_schema_id'], + }, + 'get_data_schema_audit_log': { + 'method': 'GET', + 'path': '/data_schemas/{schema_id}/audit_log', + 'tags': ['Audit Logs'], + 'summary': 'Get Audit Log for a Data Schema', + 'path_params': ['schema_id'], + }, + 'get_data_sink': { + 'method': 'GET', + 'path': '/data_sinks/{sink_id}', + 'tags': ['Destinations (Data Sinks)'], + 'summary': 'Get Sink by ID', + 'path_params': ['sink_id'], + }, + 'get_data_sink_accessors': { + 'method': 'GET', + 'path': '/data_sinks/{data_sink_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Get Access Rules on Data Sink', + 'path_params': ['data_sink_id'], + }, + 'get_data_sink_audit_log': { + 'method': 'GET', + 'path': '/data_sinks/{sink_id}/audit_log', + 'tags': ['Audit Logs'], + 'summary': 'Get Audit Log for a Data Sink', + 'path_params': ['sink_id'], + }, + 'get_data_sink_expanded': { + 'method': 'GET', + 'path': '/data_sinks/{sink_id}?expand=1', + 'tags': ['Destinations (Data Sinks)'], + 'summary': 'Get Sink by ID with Expanded References', + 'path_params': ['sink_id'], + }, + 'get_data_sinks': { + 'method': 'GET', + 'path': '/data_sinks', + 'tags': ['Destinations (Data Sinks)'], + 'summary': 'Get All Sinks', + 'path_params': [], + }, + 'get_data_source': { + 'method': 'GET', + 'path': '/data_sources/{source_id}', + 'tags': ['Sources'], + 'summary': 'Get Source by ID', + 'path_params': ['source_id'], + }, + 'get_data_source_accessors': { + 'method': 'GET', + 'path': '/data_sources/{data_source_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Get Access Rules on Data Source', + 'path_params': ['data_source_id'], + }, + 'get_data_source_audit_log': { + 'method': 'GET', + 'path': '/data_sources/{source_id}/audit_log', + 'tags': ['Audit Logs'], + 'summary': 'Get Audit Log for a Data Source', + 'path_params': ['source_id'], + }, + 'get_data_source_expanded': { + 'method': 'GET', + 'path': '/data_sources/{source_id}?expand=1', + 'tags': ['Sources'], + 'summary': 'Get Source by ID with Expanded References', + 'path_params': ['source_id'], + }, + 'get_data_sources': { + 'method': 'GET', + 'path': '/data_sources', + 'tags': ['Sources'], + 'summary': 'Get All Sources', + 'path_params': [], + }, + 'get_doc_container_accessors': { + 'method': 'GET', + 'path': '/doc_containers/{doc_container_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Get Access Rules on Document', + 'path_params': ['doc_container_id'], + }, + 'get_doc_container_audit_log': { + 'method': 'GET', + 'path': '/doc_containers/{doc_container_id}/audit_log', + 'tags': ['Audit Logs'], + 'summary': 'Get Audit Log for a Document', + 'path_params': ['doc_container_id'], + }, + 'get_domain': { + 'method': 'GET', + 'path': '/marketplace/domains/{domain_id}', + 'tags': ['Marketplace'], + 'summary': 'Get a single marketplace domain.', + 'path_params': ['domain_id'], + }, + 'get_domain_custodians': { + 'method': 'GET', + 'path': '/marketplace/domains/{domain_id}/custodians', + 'tags': ['Marketplace'], + 'summary': 'Get custodians for a marketplace domain.', + 'path_params': ['domain_id'], + }, + 'get_domain_items': { + 'method': 'GET', + 'path': '/marketplace/domains/{domain_id}/items', + 'tags': ['Marketplace'], + 'summary': 'Get marketplace items for a domain.', + 'path_params': ['domain_id'], + }, + 'get_domains': { + 'method': 'GET', + 'path': '/marketplace/domains', + 'tags': ['Marketplace'], + 'summary': 'Get marketplace domains.', + 'path_params': [], + }, + 'get_domains_for_org': { + 'method': 'GET', + 'path': '/marketplace/domains/for_org', + 'tags': ['Marketplace'], + 'summary': 'Get marketplace domains for organization.', + 'path_params': [], + }, + 'get_flow_accessors': { + 'method': 'GET', + 'path': '/flows/{flow_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Get Access Rules on Flow', + 'path_params': ['flow_id'], + }, + 'get_flow_accessors__deprecated': { + 'method': 'GET', + 'path': '/data_flows/{data_flow_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Get Access Rules on Flow (Deprecated)', + 'path_params': ['data_flow_id'], + }, + 'get_flow_by_id': { + 'method': 'GET', + 'path': '/flows/{flow_id}', + 'tags': ['Flows'], + 'summary': 'Get Flow by ID', + 'path_params': ['flow_id'], + }, + 'get_flow_by_resource_id': { + 'method': 'GET', + 'path': '/{resource_type}/{resource_id}/flow', + 'tags': ['Flows'], + 'summary': 'Get Flow (by Resource ID)', + 'path_params': ['resource_id', 'resource_type'], + }, + 'get_flow_logs_for_run_id': { + 'method': 'GET', + 'path': '/data_flows/{resource_type}/{resource_id}/logs', + 'tags': ['Metrics'], + 'summary': 'Get Flow Execution Logs for Run ID of a Flow', + 'path_params': ['resource_id', 'resource_type'], + }, + 'get_flow_metrics': { + 'method': 'GET', + 'path': '/data_flows/{resource_type}/{resource_id}/metrics', + 'tags': ['Metrics'], + 'summary': 'Get Metrics for a Flow', + 'path_params': ['resource_id', 'resource_type'], + }, + 'get_flows': { + 'method': 'GET', + 'path': '/flows', + 'tags': ['Flows'], + 'summary': 'Get All Flows', + 'path_params': [], + }, + 'get_gen_ai_configs': { + 'method': 'GET', + 'path': '/gen_ai_integration_configs', + 'tags': ['GenAI Configurations'], + 'summary': 'Get all GenAI configs in org', + 'path_params': [], + }, + 'get_gen_ai_integration_config': { + 'method': 'GET', + 'path': '/gen_ai_integration_configs/{gen_ai_config_id}', + 'tags': ['GenAI Configurations'], + 'summary': 'Get GenAI Integration Config', + 'path_params': ['gen_ai_config_id'], + }, + 'get_gen_ai_org_setting': { + 'method': 'GET', + 'path': '/gen_ai_org_settings/{gen_ai_org_setting_id}', + 'tags': ['GenAI Configuration'], + 'summary': 'Get Org GenAI binding', + 'path_params': ['gen_ai_org_setting_id'], + }, + 'get_gen_ai_org_settings': { + 'method': 'GET', + 'path': '/gen_ai_org_settings', + 'tags': ['GenAI Configurations'], + 'summary': 'Get all bindings of GenAI configs of the org for specified usages.', + 'path_params': [], + }, + 'get_nexset': { + 'method': 'GET', + 'path': '/data_sets/{set_id}', + 'tags': ['Nexsets (Data Sets)'], + 'summary': 'Get a Nexset', + 'path_params': ['set_id'], + }, + 'get_nexset_accessors': { + 'method': 'GET', + 'path': '/data_sets/{data_set_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Get Access Rules on Nexset', + 'path_params': ['data_set_id'], + }, + 'get_nexset_audit_log': { + 'method': 'GET', + 'path': '/data_sets/{set_id}/audit_log', + 'tags': ['Audit Logs'], + 'summary': 'Get Audit Log for a Nexset', + 'path_params': ['set_id'], + }, + 'get_nexset_samples': { + 'method': 'GET', + 'path': '/data_sets/{set_id}/samples', + 'tags': ['Nexsets (Data Sets)'], + 'summary': 'Get Nexset Samples', + 'path_params': ['set_id'], + }, + 'get_nexsets': { + 'method': 'GET', + 'path': '/data_sets', + 'tags': ['Nexsets (Data Sets)'], + 'summary': 'Get All Nexsets', + 'path_params': [], + }, + 'get_notification': { + 'method': 'GET', + 'path': '/notifications/{notification_id}', + 'tags': ['Notifications'], + 'summary': 'Get a Notification', + 'path_params': ['notification_id'], + }, + 'get_notification_channel_setting': { + 'method': 'GET', + 'path': '/notification_channel_settings/{notification_channel_setting_id}', + 'tags': ['Notifications'], + 'summary': 'Get a Notification Channel Setting', + 'path_params': ['notification_channel_setting_id'], + }, + 'get_notification_count': { + 'method': 'GET', + 'path': '/notifications/count', + 'tags': ['Notifications'], + 'summary': 'Get Notifications Count', + 'path_params': [], + }, + 'get_notification_setting': { + 'method': 'GET', + 'path': '/notification_settings/{notification_setting_id}', + 'tags': ['Notifications'], + 'summary': 'Get a Notification Setting', + 'path_params': ['notification_setting_id'], + }, + 'get_notification_types': { + 'method': 'GET', + 'path': '/notification_types', + 'tags': ['Notifications'], + 'summary': 'Get All Notification Types', + 'path_params': [], + }, + 'get_notifications': { + 'method': 'GET', + 'path': '/notifications', + 'tags': ['Notifications'], + 'summary': 'Get All Notifications', + 'path_params': [], + }, + 'get_org': { + 'method': 'GET', + 'path': '/orgs/{org_id}', + 'tags': ['Organizations'], + 'summary': 'Get Organization by ID', + 'path_params': ['org_id'], + }, + 'get_org_audit_log': { + 'method': 'GET', + 'path': '/orgs/{org_id}/audit_log', + 'tags': ['Audit Logs'], + 'summary': 'Get Audit Log for an Organization', + 'path_params': ['org_id'], + }, + 'get_org_custodians': { + 'method': 'GET', + 'path': '/orgs/{org_id}/custodians', + 'tags': ['Organizations'], + 'summary': 'Get organization custodians.', + 'path_params': ['org_id'], + }, + 'get_org_members': { + 'method': 'GET', + 'path': '/orgs/{org_id}/members', + 'tags': ['Organizations'], + 'summary': 'Get All Members in Organization', + 'path_params': ['org_id'], + }, + 'get_orgs': { + 'method': 'GET', + 'path': '/orgs', + 'tags': ['Organizations'], + 'summary': 'Get all Organizations', + 'path_params': [], + }, + 'get_pending_approval_requests': { + 'method': 'GET', + 'path': '/approval_requests/pending', + 'tags': ['Approval Requests'], + 'summary': 'Get all pending approval requests.', + 'path_params': [], + }, + 'get_project': { + 'method': 'GET', + 'path': '/projects/{project_id}', + 'tags': ['Projects'], + 'summary': 'Get Project by ID', + 'path_params': ['project_id'], + }, + 'get_project_accessors': { + 'method': 'GET', + 'path': '/projects/{project_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Get Project Accessors', + 'path_params': ['project_id'], + }, + 'get_project_audit_log': { + 'method': 'GET', + 'path': '/projects/{project_id}/audit_log', + 'tags': ['Audit Logs'], + 'summary': 'Get Audit Log for a Project', + 'path_params': ['project_id'], + }, + 'get_project_flows': { + 'method': 'GET', + 'path': '/projects/{project_id}/flows', + 'tags': ['Projects'], + 'summary': 'Get Project Flows', + 'path_params': ['project_id'], + }, + 'get_project_flows__deprecated': { + 'method': 'GET', + 'path': '/projects/{project_id}/data_flows', + 'tags': ['Projects'], + 'summary': 'Get Project Flows (Deprecated)', + 'path_params': ['project_id'], + }, + 'get_projects': { + 'method': 'GET', + 'path': '/projects', + 'tags': ['Projects'], + 'summary': 'Get all Projects', + 'path_params': [], + }, + 'get_public_attribute_transforms': { + 'method': 'GET', + 'path': '/attribute_transforms/public', + 'tags': ['Transforms'], + 'summary': 'Get all Public Attribute Transforms', + 'path_params': [], + }, + 'get_public_code_containers': { + 'method': 'GET', + 'path': '/code_containers/public', + 'tags': ['Code Containers'], + 'summary': 'Get all Public Code Containers', + 'path_params': [], + }, + 'get_public_reusable_record_transforms': { + 'method': 'GET', + 'path': '/transforms/public', + 'tags': ['Transforms'], + 'summary': 'Get all Public Reusable Record Transforms', + 'path_params': [], + }, + 'get_requested_approval_requests': { + 'method': 'GET', + 'path': '/approval_requests/requested', + 'tags': ['Approval Requests'], + 'summary': 'Get all requested approval requests by the user.', + 'path_params': [], + }, + 'get_resource_metrics_by_run': { + 'method': 'GET', + 'path': '/{resource_type}/{resource_id}/metrics/run_summary', + 'tags': ['Metrics'], + 'summary': 'Get Metrics By Run ID for a Resource of a Flow', + 'path_params': ['resource_id', 'resource_type'], + }, + 'get_resource_metrics_daily': { + 'method': 'GET', + 'path': '/{resource_type}/{resource_id}/metrics', + 'tags': ['Metrics'], + 'summary': 'Get Daily Metrics for a Resource of a Flow', + 'path_params': ['resource_id', 'resource_type'], + }, + 'get_reusable_record_transform': { + 'method': 'GET', + 'path': '/transforms/{transform_id}', + 'tags': ['Transforms'], + 'summary': 'Get A Reusable Record Transform', + 'path_params': ['transform_id'], + }, + 'get_reusable_record_transforms': { + 'method': 'GET', + 'path': '/transforms', + 'tags': ['Transforms'], + 'summary': 'Get all Reusable Record Transforms', + 'path_params': [], + }, + 'get_runtime': { + 'method': 'GET', + 'path': '/runtimes/{runtime_id}', + 'tags': ['Custom Runtimes'], + 'summary': 'Get a custom runtime by ID', + 'path_params': ['runtime_id'], + }, + 'get_runtimes': { + 'method': 'GET', + 'path': '/runtimes', + 'tags': ['Custom Runtimes'], + 'summary': 'Get all Custom Runtimes', + 'path_params': [], + }, + 'get_self_signup_blocked_domains': { + 'method': 'GET', + 'path': '/self_signup_blocked_domains', + 'tags': ['Self Sign-Up Admin'], + 'summary': 'List self-sign-up blocked domains for admins.', + 'path_params': [], + }, + 'get_self_signup_requests': { + 'method': 'GET', + 'path': '/self_signup_requests', + 'tags': ['Self Sign-Up Admin'], + 'summary': 'List Self Sign Up Requests', + 'path_params': [], + }, + 'get_team': { + 'method': 'GET', + 'path': '/teams/{team_id}', + 'tags': ['Teams'], + 'summary': 'Get Team by ID', + 'path_params': ['team_id'], + }, + 'get_team_accessors': { + 'method': 'GET', + 'path': '/teams/{team_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Get Team Accessors', + 'path_params': ['team_id'], + }, + 'get_team_audit_log': { + 'method': 'GET', + 'path': '/teams/{team_id}/audit_log', + 'tags': ['Audit Logs'], + 'summary': 'Get Audit Log for a Team', + 'path_params': ['team_id'], + }, + 'get_team_members': { + 'method': 'GET', + 'path': '/teams/{team_id}/members', + 'tags': ['Teams'], + 'summary': 'Get Team Members', + 'path_params': ['team_id'], + }, + 'get_teams': { + 'method': 'GET', + 'path': '/teams', + 'tags': ['Teams'], + 'summary': 'Get all Teams', + 'path_params': [], + }, + 'get_user': { + 'method': 'GET', + 'path': '/users/{user_id}', + 'tags': ['Users'], + 'summary': 'Get User by ID', + 'path_params': ['user_id'], + }, + 'get_user_audit_log': { + 'method': 'GET', + 'path': '/users/{user_id}/audit_log', + 'tags': ['Audit Logs'], + 'summary': 'Get Audit Log for a User', + 'path_params': ['user_id'], + }, + 'get_user_expand': { + 'method': 'GET', + 'path': '/users/{user_id}?expand=1', + 'tags': ['Users'], + 'summary': 'Get User by ID with Expanded References', + 'path_params': ['user_id'], + }, + 'get_user_quarantine_data_export_settings': { + 'method': 'GET', + 'path': '/users/{user_id}/quarantine_settings', + 'tags': ['Quarantine Settings'], + 'summary': 'Get Quarantine Data Export Settings for A User', + 'path_params': ['user_id'], + }, + 'get_user_settings': { + 'method': 'GET', + 'path': '/user_settings', + 'tags': ['User Settings'], + 'summary': "Get the current user's settings", + 'path_params': [], + }, + 'get_users': { + 'method': 'GET', + 'path': '/users', + 'tags': ['Users'], + 'summary': 'Get All Users', + 'path_params': [], + }, + 'get_users_expand': { + 'method': 'GET', + 'path': '/users?expand=1', + 'tags': ['Users'], + 'summary': 'Get All Users with Expanded References', + 'path_params': [], + }, + 'limits': { + 'method': 'GET', + 'path': '/limits', + 'tags': ['Limits'], + 'summary': 'Get current rate limit and usage', + 'path_params': [], + }, + 'list_notification_channel_settings': { + 'method': 'GET', + 'path': '/notification_channel_settings', + 'tags': ['Notifications'], + 'summary': 'List Notification Channel Settings', + 'path_params': [], + }, + 'list_notification_settings': { + 'method': 'GET', + 'path': '/notification_settings', + 'tags': ['Notifications'], + 'summary': 'List Notification Settings', + 'path_params': [], + }, + 'list_notification_settings_by_type': { + 'method': 'GET', + 'path': '/notification_settings/notification_types/{notification_type_id}', + 'tags': ['Notifications'], + 'summary': 'Get Notification Settings for an Event', + 'path_params': ['notification_type_id'], + }, + 'list_notification_type': { + 'method': 'GET', + 'path': '/notification_types/list', + 'tags': ['Notifications'], + 'summary': 'Get One Notification Type', + 'path_params': [], + }, + 'list_resource_notification_settings': { + 'method': 'GET', + 'path': '/notification_settings/{resource_type}/{resource_id}', + 'tags': ['Notifications'], + 'summary': 'Get Notification Settings For a Resource', + 'path_params': ['resource_id', 'resource_type'], + }, + 'login_with_basic_auth': { + 'method': 'POST', + 'path': '/token', + 'tags': ['Session Management'], + 'summary': 'Login with Basic Authentication', + 'path_params': [], + }, + 'logout': { + 'method': 'POST', + 'path': '/token/logout', + 'tags': ['Session Management'], + 'summary': 'Logout', + 'path_params': [], + }, + 'notifications_mark_read': { + 'method': 'PUT', + 'path': '/notifications/mark_read', + 'tags': ['Notifications'], + 'summary': 'Mark Notification Read', + 'path_params': [], + }, + 'notifications_mark_unread': { + 'method': 'PUT', + 'path': '/notifications/mark_unread', + 'tags': ['Notifications'], + 'summary': 'Mark Notification Unread', + 'path_params': [], + }, + 'org_account_metrics_total': { + 'method': 'GET', + 'path': '/orgs/{org_id}/flows/account_metrics', + 'tags': ['Metrics'], + 'summary': 'Get Total Account Metrics for An Organization', + 'path_params': ['org_id'], + }, + 'pause_data_sink': { + 'method': 'PUT', + 'path': '/data_sinks/{sink_id}/pause', + 'tags': ['Destinations (Data Sinks)'], + 'summary': 'Pause a Sink', + 'path_params': ['sink_id'], + }, + 'pause_nexset': { + 'method': 'PUT', + 'path': '/data_sets/{set_id}/pause', + 'tags': ['Nexsets (Data Sets)'], + 'summary': 'Pause Nexset', + 'path_params': ['set_id'], + }, + 'pause_runtime': { + 'method': 'PUT', + 'path': '/runtimes/{runtime_id}/pause', + 'tags': ['Custom Runtimes'], + 'summary': 'Pause a Custom Runtime', + 'path_params': ['runtime_id'], + }, + 'pause_source': { + 'method': 'PUT', + 'path': '/data_sources/{source_id}/pause', + 'tags': ['Sources'], + 'summary': 'Pause a Source', + 'path_params': ['source_id'], + }, + 'preview_connector_content': { + 'method': 'POST', + 'path': '/data_credentials/{credential_id}/probe/sample', + 'tags': ['Credentials'], + 'summary': 'Preview Connector Content', + 'path_params': ['credential_id'], + }, + 'preview_storage_structure': { + 'method': 'POST', + 'path': '/data_credentials/{credential_id}/probe/tree', + 'tags': ['Credentials'], + 'summary': 'Preview Storage Structure', + 'path_params': ['credential_id'], + }, + 'reject_approval_request': { + 'method': 'DELETE', + 'path': '/approval_requests/{request_id}/reject', + 'tags': ['Approval Requests'], + 'summary': 'Reject pending approval requests', + 'path_params': ['request_id'], + }, + 'remove_domain_custodians': { + 'method': 'DELETE', + 'path': '/marketplace/domains/{domain_id}/custodians', + 'tags': ['Marketplace'], + 'summary': 'Remove custodians from a marketplace domain.', + 'path_params': ['domain_id'], + }, + 'remove_org_custodians': { + 'method': 'DELETE', + 'path': '/orgs/{org_id}/custodians', + 'tags': ['Organizations'], + 'summary': 'Remove organization custodians.', + 'path_params': ['org_id'], + }, + 'remove_project_flows': { + 'method': 'DELETE', + 'path': '/projects/{project_id}/flows', + 'tags': ['Projects'], + 'summary': 'Remove Flows From A Project', + 'path_params': ['project_id'], + }, + 'remove_project_flows__deprecated': { + 'method': 'DELETE', + 'path': '/projects/{project_id}/data_flows', + 'tags': ['Projects'], + 'summary': 'Remove Flows From A Project (Deprecated)', + 'path_params': ['project_id'], + }, + 'replace_code_container_accessors': { + 'method': 'POST', + 'path': '/code_containers/{code_container_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Replace Access Rules on Code Container', + 'path_params': ['code_container_id'], + }, + 'replace_data_credential_accessors': { + 'method': 'POST', + 'path': '/data_credentials/{data_credential_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Replace Access Rules on Data Credential', + 'path_params': ['data_credential_id'], + }, + 'replace_data_map_accessors': { + 'method': 'POST', + 'path': '/data_maps/{data_map_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Replace Access Rules on Data Map', + 'path_params': ['data_map_id'], + }, + 'replace_data_schema_accessors': { + 'method': 'POST', + 'path': '/data_schemas/{data_schema_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Replace Access Rules on Data Schema', + 'path_params': ['data_schema_id'], + }, + 'replace_data_sink_accessors': { + 'method': 'POST', + 'path': '/data_sinks/{data_sink_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Replace Access Rules on Data Sink', + 'path_params': ['data_sink_id'], + }, + 'replace_data_source_accessors': { + 'method': 'POST', + 'path': '/data_sources/{data_source_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Replace Access Rules on Data Source', + 'path_params': ['data_source_id'], + }, + 'replace_doc_container_accessors': { + 'method': 'POST', + 'path': '/doc_containers/{doc_container_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Replace Access Rules on Document', + 'path_params': ['doc_container_id'], + }, + 'replace_flow_accessors': { + 'method': 'POST', + 'path': '/flows/{flow_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Replace Access Rules on Flow', + 'path_params': ['flow_id'], + }, + 'replace_flow_accessors__deprecated': { + 'method': 'POST', + 'path': '/data_flows/{data_flow_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Replace Access Rules on Flow (Deprecated)', + 'path_params': ['data_flow_id'], + }, + 'replace_nexset_accessors': { + 'method': 'POST', + 'path': '/data_sets/{data_set_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Replace Access Rules on Nexset', + 'path_params': ['data_set_id'], + }, + 'replace_project_accessors': { + 'method': 'POST', + 'path': '/projects/{project_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Replace Access Rules on Project', + 'path_params': ['project_id'], + }, + 'replace_project_flows': { + 'method': 'POST', + 'path': '/projects/{project_id}/flows', + 'tags': ['Projects'], + 'summary': 'Replace Project Flows List', + 'path_params': ['project_id'], + }, + 'replace_project_flows__deprecated': { + 'method': 'POST', + 'path': '/projects/{project_id}/data_flows', + 'tags': ['Projects'], + 'summary': 'Replace Project Flows List (Deprecated)', + 'path_params': ['project_id'], + }, + 'replace_team_accessors': { + 'method': 'POST', + 'path': '/teams/{team_id}/accessors', + 'tags': ['Access Control'], + 'summary': 'Replace Team Accessors List', + 'path_params': ['team_id'], + }, + 'replace_team_members': { + 'method': 'POST', + 'path': '/teams/{team_id}/members', + 'tags': ['Teams'], + 'summary': 'Replace Team Members List', + 'path_params': ['team_id'], + }, + 'rerun_async_task': { + 'method': 'POST', + 'path': '/async_tasks/{task_id}/rerun', + 'tags': ['Async Tasks'], + 'summary': 'Rerun async operation', + 'path_params': ['task_id'], + }, + 'self_sign_up': { + 'method': 'POST', + 'path': '/signup', + 'tags': ['Self Sign-Up'], + 'summary': 'Sign Up', + 'path_params': [], + }, + 'update_api_auth_config': { + 'method': 'PUT', + 'path': '/api_auth_configs/{auth_config_id}', + 'tags': ['Org authentication configs'], + 'summary': 'Update auth config.', + 'path_params': ['auth_config_id'], + }, + 'update_api_auth_config_2': { + 'method': 'PUT', + 'path': '/orgs/{org_id}/auth_settings/{auth_setting_id}', + 'tags': ['Org authentication configs'], + 'summary': 'Update auth config (enable/disable).', + 'path_params': ['auth_setting_id', 'org_id'], + }, + 'update_attribute_transform': { + 'method': 'PUT', + 'path': '/attribute_transforms/{attribute_transform_id}', + 'tags': ['Transforms'], + 'summary': 'Update Attribute Transform', + 'path_params': ['attribute_transform_id'], + }, + 'update_code_container': { + 'method': 'PUT', + 'path': '/code_containers/{code_container_id}', + 'tags': ['Code Containers'], + 'summary': 'Update a Code Container', + 'path_params': ['code_container_id'], + }, + 'update_data_credential': { + 'method': 'PUT', + 'path': '/data_credentials/{credential_id}', + 'tags': ['Credentials'], + 'summary': 'Update Credential', + 'path_params': ['credential_id'], + }, + 'update_data_map_metadata': { + 'method': 'PUT', + 'path': '/data_maps/{data_map_id}', + 'tags': ['Data Maps'], + 'summary': 'Update Data Map Metadata', + 'path_params': ['data_map_id'], + }, + 'update_data_sink': { + 'method': 'PUT', + 'path': '/data_sinks/{sink_id}', + 'tags': ['Destinations (Data Sinks)'], + 'summary': 'Update Sink', + 'path_params': ['sink_id'], + }, + 'update_data_source': { + 'method': 'PUT', + 'path': '/data_sources/{source_id}', + 'tags': ['Sources'], + 'summary': 'Update a Source', + 'path_params': ['source_id'], + }, + 'update_domain': { + 'method': 'PUT', + 'path': '/marketplace/domains/{domain_id}', + 'tags': ['Marketplace'], + 'summary': 'Update a single marketplace domain.', + 'path_params': ['domain_id'], + }, + 'update_domain_custodians': { + 'method': 'PUT', + 'path': '/marketplace/domains/{domain_id}/custodians', + 'tags': ['Marketplace'], + 'summary': 'Update custodians for a marketplace domain.', + 'path_params': ['domain_id'], + }, + 'update_gen_ai_integration_config': { + 'method': 'PUT', + 'path': '/gen_ai_integration_configs/{gen_ai_config_id}', + 'tags': ['GenAI Configs'], + 'summary': 'Update GenAI Integration Config', + 'path_params': ['gen_ai_config_id'], + }, + 'update_nexset': { + 'method': 'PUT', + 'path': '/data_sets/{set_id}', + 'tags': ['Nexsets (Data Sets)'], + 'summary': 'Update a Nexset', + 'path_params': ['set_id'], + }, + 'update_notification_channel_setting': { + 'method': 'PUT', + 'path': '/notification_channel_settings/{notification_channel_setting_id}', + 'tags': ['Notifications'], + 'summary': 'Update a Notification Channel Setting', + 'path_params': ['notification_channel_setting_id'], + }, + 'update_notification_setting': { + 'method': 'PUT', + 'path': '/notification_settings/{notification_setting_id}', + 'tags': ['Notifications'], + 'summary': 'Modify a Notification Setting', + 'path_params': ['notification_setting_id'], + }, + 'update_org': { + 'method': 'PUT', + 'path': '/orgs/{org_id}', + 'tags': ['Organizations'], + 'summary': 'Update an Organization', + 'path_params': ['org_id'], + }, + 'update_org_custodians': { + 'method': 'PUT', + 'path': '/orgs/{org_id}/custodians', + 'tags': ['Organizations'], + 'summary': 'Update organization custodians.', + 'path_params': ['org_id'], + }, + 'update_org_members': { + 'method': 'PUT', + 'path': '/orgs/{org_id}/members', + 'tags': ['Organizations'], + 'summary': 'Update Organization Members', + 'path_params': ['org_id'], + }, + 'update_project': { + 'method': 'PUT', + 'path': '/projects/{project_id}', + 'tags': ['Projects'], + 'summary': 'Modify a Project', + 'path_params': ['project_id'], + }, + 'update_reusable_record_transform': { + 'method': 'PUT', + 'path': '/transforms/{transform_id}', + 'tags': ['Transforms'], + 'summary': 'Update Reusable Record Transform', + 'path_params': ['transform_id'], + }, + 'update_runtime': { + 'method': 'PUT', + 'path': '/runtimes/{runtime_id}', + 'tags': ['Custom Runtimes'], + 'summary': 'Update a Custom Runtime', + 'path_params': ['runtime_id'], + }, + 'update_self_signup_blocked_domain': { + 'method': 'PUT', + 'path': '/self_signup_blocked_domains/{domain_id}', + 'tags': ['Self Sign-Up Admin'], + 'summary': 'Update self-sign-up blocked domain for admins.', + 'path_params': ['domain_id'], + }, + 'update_team': { + 'method': 'PUT', + 'path': '/teams/{team_id}', + 'tags': ['Teams'], + 'summary': 'Modify a Team', + 'path_params': ['team_id'], + }, + 'update_user': { + 'method': 'PUT', + 'path': '/users/{user_id}', + 'tags': ['Users'], + 'summary': 'Modify a User', + 'path_params': ['user_id'], + }, + 'update_user_quarantine_data_export_settings': { + 'method': 'PUT', + 'path': '/users/{user_id}/quarantine_settings', + 'tags': ['Quarantine Settings'], + 'summary': 'Update Quarantine Data Export Settings for A User', + 'path_params': ['user_id'], + }, + 'upsert_data_map_entries': { + 'method': 'PUT', + 'path': '/data_maps/{data_map_id}/entries', + 'tags': ['Data Maps'], + 'summary': 'Upsert Static Data Map Entries', + 'path_params': ['data_map_id'], + }, + 'user_24_hour_flow_stats': { + 'method': 'GET', + 'path': '/users/{user_id}/flows/dashboard', + 'tags': ['Metrics'], + 'summary': 'Get 24 Hour Flow Stats for a User', + 'path_params': ['user_id'], + }, + 'user_account_metrics_total': { + 'method': 'GET', + 'path': '/users/{user_id}/flows/account_metrics', + 'tags': ['Metrics'], + 'summary': 'Get Total Account Metrics for a User', + 'path_params': ['user_id'], + }, + 'user_metrics_daily': { + 'method': 'GET', + 'path': '/users/{user_id}/metrics', + 'tags': ['Metrics'], + 'summary': 'Get Daily Data Processing Metrics for a User', + 'path_params': ['user_id'], + }, + 'verify_email': { + 'method': 'GET', + 'path': '/signup/verify_email', + 'tags': ['Self Sign-Up'], + 'summary': 'Verify Email', + 'path_params': [], + }, +} diff --git a/nexla_sdk/generated/schema.py b/nexla_sdk/generated/schema.py new file mode 100644 index 0000000..cc9ed41 --- /dev/null +++ b/nexla_sdk/generated/schema.py @@ -0,0 +1,22 @@ +"""Schema typing helpers for generated operation-level access.""" + +from typing import Any, Dict, List, TypedDict, Union + +JSONPrimitive = Union[str, int, float, bool, None] +JSONValue = Union[JSONPrimitive, Dict[str, "JSONValue"], List["JSONValue"]] +JSONObject = Dict[str, JSONValue] + + +class RawRequest(TypedDict, total=False): + path_params: Dict[str, Any] + query: Dict[str, Any] + body: JSONObject + headers: Dict[str, str] + + +class RawResponse(TypedDict, total=False): + status: str + message: str + data: Any + errors: List[str] + metadata: Dict[str, Any] diff --git a/nexla_sdk/models/__init__.py b/nexla_sdk/models/__init__.py index 3ff2f19..14c96d7 100644 --- a/nexla_sdk/models/__init__.py +++ b/nexla_sdk/models/__init__.py @@ -12,6 +12,12 @@ UserAccessorRequest, UserAccessorResponse, ) +from nexla_sdk.models.api_keys import ( + ApiKey, + ApiKeyCreate, + ApiKeyUpdate, + ApiKeysIndex, +) from nexla_sdk.models.approval_requests import ApprovalDecision, ApprovalRequest from nexla_sdk.models.async_tasks import ( AsyncTask, @@ -24,13 +30,50 @@ AttributeTransformCreate, AttributeTransformUpdate, ) +from nexla_sdk.models.auth_templates import ( + AuthParameter, # Deprecated alias for AuthTemplateParameter + AuthParameterCreate, + AuthTemplate, + AuthTemplateCreate, + AuthTemplateParameter, + AuthTemplateUpdate, +) +from nexla_sdk.models.auth_parameters import ( + AuthParameter as AuthParameterResource, + AuthParameterCreate as AuthParameterResourceCreate, + AuthParameterUpdate as AuthParameterResourceUpdate, +) +from nexla_sdk.models.resource_parameters import ( + ResourceParameter, + ResourceParameterCreate, + ResourceParameterUpdate, +) +from nexla_sdk.models.vendor_endpoints import ( + VendorEndpoint, + VendorEndpointCreate, + VendorEndpointUpdate, +) from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.clusters import ( + Cluster, + ClusterCreate, + ClusterEndpoint, + ClusterEndpointCreate, + ClusterEndpointItem, + ClusterEndpointRef, + ClusterEndpointUpdate, + ClusterUpdate, +) from nexla_sdk.models.code_containers import ( CodeContainer, CodeContainerCreate, CodeContainerUpdate, ) from nexla_sdk.models.common import Connector, FlowNode, LogEntry, Organization, Owner +from nexla_sdk.models.connectors import ( + Connector as ConnectorDefinition, + ConnectorUpdate, +) # Import all models from subpackages from nexla_sdk.models.credentials import ( @@ -45,6 +88,17 @@ VerifiedStatus, ) from nexla_sdk.models.data_schemas import DataSchema +from nexla_sdk.models.custom_data_flows import ( + CustomDataFlow, + CustomDataFlowCreate, + CustomDataFlowUpdate, +) +from nexla_sdk.models.data_credentials_groups import ( + DataCredentialsGroup, + DataCredentialsGroupCreate, + DataCredentialsGroupUpdate, + DataCredentialsGroupRemoveCredentials, +) from nexla_sdk.models.destinations import ( DataMapInfo, DataSetInfo, @@ -57,6 +111,35 @@ DestinationUpdate, ) from nexla_sdk.models.doc_containers import DocContainer +from nexla_sdk.models.notification_channel_settings import ( + NotificationChannelSetting as NotificationChannelSettingResource, + NotificationChannelSettingCreate as NotificationChannelSettingResourceCreate, + NotificationChannelSettingUpdate as NotificationChannelSettingResourceUpdate, +) +from nexla_sdk.models.notification_types import NotificationType as NotificationTypeResource +from nexla_sdk.models.quarantine_settings import ( + QuarantineSetting, + QuarantineSettingCreate, + QuarantineSettingUpdate, +) +from nexla_sdk.models.dashboard_transforms import ( + DashboardTransform, + DashboardTransformCreate, + DashboardTransformUpdate, +) +from nexla_sdk.models.catalog_configs import ( + CatalogConfig, + CatalogConfigCreate, + CatalogConfigUpdate, +) +from nexla_sdk.models.catalog_refs import ( + CatalogRef, + CatalogRefCreate, + CatalogRefUpdate, +) +from nexla_sdk.models.org_tiers import OrgTier as OrgTierInfo +from nexla_sdk.models.user_tiers import UserTier as UserTierInfo +from nexla_sdk.models.user_settings import UserSetting, UserSettingCreate, UserSettingUpdate from nexla_sdk.models.enums import ( AccessRole, ConnectorCategory, @@ -68,6 +151,18 @@ UserStatus, UserTier, ) +from nexla_sdk.models.flexible_enums import ( + FlexibleConnectorCategory, + FlexibleCredentialType, + FlexibleDestinationFormat, + FlexibleDestinationType, + FlexibleEnum, + FlexibleSourceType, +) +from nexla_sdk.models.flow_triggers import ( + FlowTrigger, + FlowTriggerCreate, +) from nexla_sdk.models.flows import ( DocsRecommendation, FlowCopyOptions, @@ -129,6 +224,7 @@ NotificationChannelSettingUpdate, NotificationCount, NotificationSetting, + NotificationSettingBrief, NotificationSettingCreate, NotificationSettingUpdate, NotificationType, @@ -155,6 +251,11 @@ ) from nexla_sdk.models.runtimes import Runtime, RuntimeCreate, RuntimeUpdate from nexla_sdk.models.self_signup import BlockedDomain, SelfSignupRequest +from nexla_sdk.models.service_keys import ( + ServiceKey, + ServiceKeyCreate, + ServiceKeyUpdate, +) from nexla_sdk.models.sources import ( DataSetBrief, FlowType, @@ -181,11 +282,25 @@ DefaultOrg, OrgMembership, User, + UserCredit, + UserCreditCreate, UserCreate, UserExpanded, UserSettings, UserUpdate, ) +from nexla_sdk.models.validators import ( + Validator, + ValidatorCopyOptions, + ValidatorCreate, + ValidatorUpdate, +) +from nexla_sdk.models.vendors import ( + Vendor, + VendorCreate, + VendorRef, + VendorUpdate, +) from nexla_sdk.models.webhooks import WebhookResponse, WebhookSendOptions __all__ = [ @@ -209,6 +324,9 @@ "AccessorRequestList", "AccessorResponseList", "AccessorType", + # API Keys + "ApiKey", + "ApiKeysIndex", # General Enums "AccessRole", "ResourceStatus", @@ -219,6 +337,35 @@ "UserStatus", "OrgMembershipStatus", "ConnectorCategory", + # Auth Templates + "AuthTemplate", + "AuthTemplateCreate", + "AuthTemplateUpdate", + "AuthTemplateParameter", + "AuthParameterCreate", + "AuthParameter", # Deprecated - use AuthTemplateParameter instead + # Clusters + "Cluster", + "ClusterCreate", + "ClusterUpdate", + "ClusterEndpoint", + "ClusterEndpointCreate", + "ClusterEndpointUpdate", + "ClusterEndpointItem", + "ClusterEndpointRef", + # Connectors + "ConnectorDefinition", + "ConnectorUpdate", + # Flexible Enum helpers + "FlexibleEnum", + "FlexibleDestinationType", + "FlexibleDestinationFormat", + "FlexibleSourceType", + "FlexibleCredentialType", + "FlexibleConnectorCategory", + # Flow Triggers + "FlowTrigger", + "FlowTriggerCreate", # Credential models and enums "CredentialType", "VerifiedStatus", @@ -336,6 +483,16 @@ "Transform", "TransformCreate", "TransformUpdate", + # Validators + "Validator", + "ValidatorCreate", + "ValidatorUpdate", + "ValidatorCopyOptions", + # Vendors + "Vendor", + "VendorCreate", + "VendorUpdate", + "VendorRef", # Attribute transforms "AttributeTransform", "AttributeTransformCreate", @@ -371,9 +528,65 @@ # Self-signup "SelfSignupRequest", "BlockedDomain", + # Service Keys + "ServiceKey", + "ServiceKeyCreate", + "ServiceKeyUpdate", # Doc containers / Data schemas "DocContainer", "DataSchema", + # Custom data flows + "CustomDataFlow", + "CustomDataFlowCreate", + "CustomDataFlowUpdate", + # API keys (requests) + "ApiKeyCreate", + "ApiKeyUpdate", + # Data credentials groups + "DataCredentialsGroup", + "DataCredentialsGroupCreate", + "DataCredentialsGroupUpdate", + "DataCredentialsGroupRemoveCredentials", + # Auth/resource parameters + "AuthParameterResource", + "AuthParameterResourceCreate", + "AuthParameterResourceUpdate", + "ResourceParameter", + "ResourceParameterCreate", + "ResourceParameterUpdate", + # Vendor endpoints + "VendorEndpoint", + "VendorEndpointCreate", + "VendorEndpointUpdate", + # Notification channel/types (resource-level) + "NotificationChannelSettingResource", + "NotificationChannelSettingResourceCreate", + "NotificationChannelSettingResourceUpdate", + "NotificationTypeResource", + # Quarantine settings + "QuarantineSetting", + "QuarantineSettingCreate", + "QuarantineSettingUpdate", + # Dashboard transforms + "DashboardTransform", + "DashboardTransformCreate", + "DashboardTransformUpdate", + # Catalog configs/refs + "CatalogConfig", + "CatalogConfigCreate", + "CatalogConfigUpdate", + "CatalogRef", + "CatalogRefCreate", + "CatalogRefUpdate", + # Org/User tiers and settings + "OrgTierInfo", + "UserTierInfo", + "UserSetting", + "UserSettingCreate", + "UserSettingUpdate", + # User credits + "UserCredit", + "UserCreditCreate", # Webhooks "WebhookSendOptions", "WebhookResponse", diff --git a/nexla_sdk/models/api_keys/__init__.py b/nexla_sdk/models/api_keys/__init__.py new file mode 100644 index 0000000..7322d3c --- /dev/null +++ b/nexla_sdk/models/api_keys/__init__.py @@ -0,0 +1,11 @@ +"""API Keys models.""" + +from nexla_sdk.models.api_keys.requests import ApiKeyCreate, ApiKeyUpdate +from nexla_sdk.models.api_keys.responses import ApiKey, ApiKeysIndex + +__all__ = [ + "ApiKey", + "ApiKeysIndex", + "ApiKeyCreate", + "ApiKeyUpdate", +] diff --git a/nexla_sdk/models/api_keys/requests.py b/nexla_sdk/models/api_keys/requests.py new file mode 100644 index 0000000..3fb1dca --- /dev/null +++ b/nexla_sdk/models/api_keys/requests.py @@ -0,0 +1,17 @@ +from typing import Optional + +from nexla_sdk.models.base import BaseModel + + +class ApiKeyCreate(BaseModel): + name: Optional[str] = None + description: Optional[str] = None + scope: Optional[str] = None + expires_at: Optional[str] = None + + +class ApiKeyUpdate(BaseModel): + name: Optional[str] = None + description: Optional[str] = None + scope: Optional[str] = None + expires_at: Optional[str] = None diff --git a/nexla_sdk/models/api_keys/responses.py b/nexla_sdk/models/api_keys/responses.py new file mode 100644 index 0000000..f98fe40 --- /dev/null +++ b/nexla_sdk/models/api_keys/responses.py @@ -0,0 +1,43 @@ +"""API Keys response models.""" + +from datetime import datetime +from typing import List, Optional + +from nexla_sdk.models.base import BaseModel + + +class ApiKey(BaseModel): + """API key response model. + + API keys are used for programmatic access to specific resources + like datasets, data sources, data sinks, and users. + """ + + id: int + owner_id: Optional[int] = None + org_id: Optional[int] = None + data_set_id: Optional[int] = None + data_source_id: Optional[int] = None + data_sink_id: Optional[int] = None + user_id: Optional[int] = None + cluster_id: Optional[int] = None # Super user only + cluster_uid: Optional[str] = None # Super user only + name: Optional[str] = None + description: Optional[str] = None + status: Optional[str] = None + scope: Optional[str] = None + api_key: Optional[str] = None + url: Optional[str] = None + last_rotated_key: Optional[str] = None + last_rotated_at: Optional[datetime] = None + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None + + +class ApiKeysIndex(BaseModel): + """Response model for API keys index (grouped by type).""" + + data_sets: Optional[List[ApiKey]] = None + data_sinks: Optional[List[ApiKey]] = None + data_sources: Optional[List[ApiKey]] = None + users: Optional[List[ApiKey]] = None diff --git a/nexla_sdk/models/auth_parameters/__init__.py b/nexla_sdk/models/auth_parameters/__init__.py new file mode 100644 index 0000000..05e3283 --- /dev/null +++ b/nexla_sdk/models/auth_parameters/__init__.py @@ -0,0 +1,11 @@ +from nexla_sdk.models.auth_parameters.requests import ( + AuthParameterCreate, + AuthParameterUpdate, +) +from nexla_sdk.models.auth_parameters.responses import AuthParameter + +__all__ = [ + "AuthParameter", + "AuthParameterCreate", + "AuthParameterUpdate", +] diff --git a/nexla_sdk/models/auth_parameters/requests.py b/nexla_sdk/models/auth_parameters/requests.py new file mode 100644 index 0000000..5c15260 --- /dev/null +++ b/nexla_sdk/models/auth_parameters/requests.py @@ -0,0 +1,31 @@ +from typing import Any, Dict, List, Optional + +from nexla_sdk.models.base import BaseModel + + +class AuthParameterCreate(BaseModel): + name: str + display_name: Optional[str] = None + description: Optional[str] = None + data_type: Optional[str] = None + order: Optional[int] = None + config: Optional[Dict[str, Any]] = None + secured: Optional[bool] = None + global_param: Optional[bool] = None + auth_template_id: Optional[int] = None + vendor_id: Optional[int] = None + allowed_values: Optional[List[Any]] = None + + +class AuthParameterUpdate(BaseModel): + name: Optional[str] = None + display_name: Optional[str] = None + description: Optional[str] = None + data_type: Optional[str] = None + order: Optional[int] = None + config: Optional[Dict[str, Any]] = None + secured: Optional[bool] = None + global_param: Optional[bool] = None + auth_template_id: Optional[int] = None + vendor_id: Optional[int] = None + allowed_values: Optional[List[Any]] = None diff --git a/nexla_sdk/models/auth_parameters/responses.py b/nexla_sdk/models/auth_parameters/responses.py new file mode 100644 index 0000000..aa908fb --- /dev/null +++ b/nexla_sdk/models/auth_parameters/responses.py @@ -0,0 +1,27 @@ +from datetime import datetime +from typing import TYPE_CHECKING, Any, Dict, List, Optional + +from pydantic import Field + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.vendors.responses import Vendor + +if TYPE_CHECKING: + from nexla_sdk.models.auth_templates.responses import AuthTemplate + + +class AuthParameter(BaseModel): + id: int + name: Optional[str] = None + display_name: Optional[str] = None + description: Optional[str] = None + data_type: Optional[str] = None + order: Optional[int] = None + config: Optional[Dict[str, Any]] = None + secured: Optional[bool] = None + global_param: Optional[bool] = Field(default=None, alias="global") + vendor: Optional[Vendor] = None + auth_template: Optional["AuthTemplate"] = None + allowed_values: Optional[List[Any]] = None + updated_at: Optional[datetime] = None + created_at: Optional[datetime] = None diff --git a/nexla_sdk/models/auth_templates/__init__.py b/nexla_sdk/models/auth_templates/__init__.py new file mode 100644 index 0000000..62f25c4 --- /dev/null +++ b/nexla_sdk/models/auth_templates/__init__.py @@ -0,0 +1,22 @@ +"""Auth template models.""" + +from nexla_sdk.models.auth_templates.requests import ( + AuthParameterCreate, + AuthTemplateCreate, + AuthTemplateUpdate, +) +from nexla_sdk.models.auth_templates.responses import ( + AuthParameter, # Deprecated alias for backward compatibility + AuthTemplate, + AuthTemplateParameter, +) + +__all__ = [ + "AuthTemplate", + "AuthTemplateCreate", + "AuthTemplateUpdate", + "AuthTemplateParameter", + "AuthParameterCreate", + # Deprecated - use AuthTemplateParameter instead + "AuthParameter", +] diff --git a/nexla_sdk/models/auth_templates/requests.py b/nexla_sdk/models/auth_templates/requests.py new file mode 100644 index 0000000..ddd6811 --- /dev/null +++ b/nexla_sdk/models/auth_templates/requests.py @@ -0,0 +1,37 @@ +"""Auth template request models.""" + +from typing import Any, Dict, List, Optional + +from nexla_sdk.models.base import BaseModel + + +class AuthParameterCreate(BaseModel): + """Auth parameter for template creation.""" + + name: str + display_name: Optional[str] = None + description: Optional[str] = None + param_type: Optional[str] = None + required: Optional[bool] = None + default_value: Optional[Any] = None + config: Optional[Dict[str, Any]] = None + + +class AuthTemplateCreate(BaseModel): + """Request model for creating an auth template (super user only).""" + + name: str + vendor_id: int + display_name: Optional[str] = None + description: Optional[str] = None + config: Optional[Dict[str, Any]] = None + auth_parameters: Optional[List[AuthParameterCreate]] = None + + +class AuthTemplateUpdate(BaseModel): + """Request model for updating an auth template (super user only).""" + + name: Optional[str] = None + display_name: Optional[str] = None + description: Optional[str] = None + config: Optional[Dict[str, Any]] = None diff --git a/nexla_sdk/models/auth_templates/responses.py b/nexla_sdk/models/auth_templates/responses.py new file mode 100644 index 0000000..26ea463 --- /dev/null +++ b/nexla_sdk/models/auth_templates/responses.py @@ -0,0 +1,49 @@ +"""Auth template response models.""" + +from datetime import datetime +from typing import Any, Dict, List, Optional + +from pydantic import Field + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.vendors.responses import VendorRef + + +class AuthTemplateParameter(BaseModel): + """Auth parameter configuration for auth templates. + + This is a lightweight nested model used within AuthTemplate responses. + For the full auth parameter resource model, see nexla_sdk.models.auth_parameters. + """ + + id: Optional[int] = None + name: Optional[str] = None + display_name: Optional[str] = None + description: Optional[str] = None + param_type: Optional[str] = None + required: Optional[bool] = None + default_value: Optional[Any] = None + config: Optional[Dict[str, Any]] = None + + +# Deprecated alias for backward compatibility +AuthParameter = AuthTemplateParameter + + +class AuthTemplate(BaseModel): + """Auth template response model. + + Auth templates define authentication configurations for vendors. + """ + + id: int + name: Optional[str] = None + display_name: Optional[str] = None + description: Optional[str] = None + config: Optional[Dict[str, Any]] = None + credentials_type: Optional[str] = None # From associated connector + vendor: Optional[VendorRef] = None + vendor_id: Optional[int] = None + auth_parameters: Optional[List[AuthTemplateParameter]] = Field(default_factory=list) + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None diff --git a/nexla_sdk/models/catalog_configs/__init__.py b/nexla_sdk/models/catalog_configs/__init__.py new file mode 100644 index 0000000..3512412 --- /dev/null +++ b/nexla_sdk/models/catalog_configs/__init__.py @@ -0,0 +1,11 @@ +from nexla_sdk.models.catalog_configs.requests import ( + CatalogConfigCreate, + CatalogConfigUpdate, +) +from nexla_sdk.models.catalog_configs.responses import CatalogConfig + +__all__ = [ + "CatalogConfig", + "CatalogConfigCreate", + "CatalogConfigUpdate", +] diff --git a/nexla_sdk/models/catalog_configs/requests.py b/nexla_sdk/models/catalog_configs/requests.py new file mode 100644 index 0000000..74da22c --- /dev/null +++ b/nexla_sdk/models/catalog_configs/requests.py @@ -0,0 +1,21 @@ +from typing import Any, Dict, Optional + +from nexla_sdk.models.base import BaseModel + + +class CatalogConfigCreate(BaseModel): + name: str + description: Optional[str] = None + data_credentials_id: Optional[int] = None + config: Optional[Dict[str, Any]] = None + templates: Optional[Dict[str, Any]] = None + mode: Optional[str] = None + + +class CatalogConfigUpdate(BaseModel): + name: Optional[str] = None + description: Optional[str] = None + data_credentials_id: Optional[int] = None + config: Optional[Dict[str, Any]] = None + templates: Optional[Dict[str, Any]] = None + mode: Optional[str] = None diff --git a/nexla_sdk/models/catalog_configs/responses.py b/nexla_sdk/models/catalog_configs/responses.py new file mode 100644 index 0000000..c1f789e --- /dev/null +++ b/nexla_sdk/models/catalog_configs/responses.py @@ -0,0 +1,22 @@ +from datetime import datetime +from typing import Any, Dict, Optional + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.common import Organization, Owner +from nexla_sdk.models.credentials.responses import Credential + + +class CatalogConfig(BaseModel): + id: int + owner: Optional[Owner] = None + org: Optional[Organization] = None + data_credentials: Optional[Credential] = None + name: Optional[str] = None + description: Optional[str] = None + status: Optional[str] = None + config: Optional[Dict[str, Any]] = None + templates: Optional[Dict[str, Any]] = None + mode: Optional[str] = None + job_id: Optional[str] = None + updated_at: Optional[datetime] = None + created_at: Optional[datetime] = None diff --git a/nexla_sdk/models/catalog_refs/__init__.py b/nexla_sdk/models/catalog_refs/__init__.py new file mode 100644 index 0000000..f3106f8 --- /dev/null +++ b/nexla_sdk/models/catalog_refs/__init__.py @@ -0,0 +1,8 @@ +from nexla_sdk.models.catalog_refs.requests import CatalogRefCreate, CatalogRefUpdate +from nexla_sdk.models.catalog_refs.responses import CatalogRef + +__all__ = [ + "CatalogRef", + "CatalogRefCreate", + "CatalogRefUpdate", +] diff --git a/nexla_sdk/models/catalog_refs/requests.py b/nexla_sdk/models/catalog_refs/requests.py new file mode 100644 index 0000000..d22af99 --- /dev/null +++ b/nexla_sdk/models/catalog_refs/requests.py @@ -0,0 +1,16 @@ +from typing import Optional + +from nexla_sdk.models.base import BaseModel + + +class CatalogRefCreate(BaseModel): + data_set_id: int + catalog_config_id: int + reference_id: Optional[str] = None + link: Optional[str] = None + + +class CatalogRefUpdate(BaseModel): + reference_id: Optional[str] = None + link: Optional[str] = None + status: Optional[str] = None diff --git a/nexla_sdk/models/catalog_refs/responses.py b/nexla_sdk/models/catalog_refs/responses.py new file mode 100644 index 0000000..eb172b3 --- /dev/null +++ b/nexla_sdk/models/catalog_refs/responses.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import Optional + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.catalog_configs.responses import CatalogConfig + + +class CatalogRef(BaseModel): + id: int + data_set_id: Optional[int] = None + status: Optional[str] = None + reference_id: Optional[str] = None + link: Optional[str] = None + error_msg: Optional[str] = None + updated_at: Optional[datetime] = None + created_at: Optional[datetime] = None + catalog_config: Optional[CatalogConfig] = None diff --git a/nexla_sdk/models/clusters/__init__.py b/nexla_sdk/models/clusters/__init__.py new file mode 100644 index 0000000..bec39ae --- /dev/null +++ b/nexla_sdk/models/clusters/__init__.py @@ -0,0 +1,25 @@ +"""Cluster models.""" + +from nexla_sdk.models.clusters.requests import ( + ClusterCreate, + ClusterEndpointCreate, + ClusterEndpointItem, + ClusterEndpointUpdate, + ClusterUpdate, +) +from nexla_sdk.models.clusters.responses import ( + Cluster, + ClusterEndpoint, + ClusterEndpointRef, +) + +__all__ = [ + "Cluster", + "ClusterCreate", + "ClusterUpdate", + "ClusterEndpoint", + "ClusterEndpointCreate", + "ClusterEndpointUpdate", + "ClusterEndpointItem", + "ClusterEndpointRef", +] diff --git a/nexla_sdk/models/clusters/requests.py b/nexla_sdk/models/clusters/requests.py new file mode 100644 index 0000000..9594ec3 --- /dev/null +++ b/nexla_sdk/models/clusters/requests.py @@ -0,0 +1,63 @@ +"""Cluster request models.""" + +from typing import List, Optional + +from nexla_sdk.models.base import BaseModel + + +class ClusterEndpointItem(BaseModel): + """Endpoint configuration for cluster create/update.""" + + service: str + protocol: Optional[str] = None + host: Optional[str] = None + port: Optional[int] = None + context: Optional[str] = None + header_host: Optional[str] = None + + +class ClusterCreate(BaseModel): + """Request model for creating a cluster.""" + + org_id: int + name: str + region: str # Required + description: Optional[str] = None + provider: Optional[str] = None # aws, gcp, azure, private + is_default: Optional[bool] = None + is_private: Optional[bool] = None + endpoints: Optional[List[ClusterEndpointItem]] = None + + +class ClusterUpdate(BaseModel): + """Request model for updating a cluster.""" + + name: Optional[str] = None + description: Optional[str] = None + provider: Optional[str] = None + is_default: Optional[bool] = None + is_private: Optional[bool] = None + endpoints: Optional[List[ClusterEndpointItem]] = None + + +class ClusterEndpointCreate(BaseModel): + """Request model for creating a cluster endpoint.""" + + cluster_id: int + service: str + protocol: Optional[str] = None + host: Optional[str] = None + port: Optional[int] = None + context: Optional[str] = None + header_host: Optional[str] = None + + +class ClusterEndpointUpdate(BaseModel): + """Request model for updating a cluster endpoint.""" + + service: Optional[str] = None + protocol: Optional[str] = None + host: Optional[str] = None + port: Optional[int] = None + context: Optional[str] = None + header_host: Optional[str] = None diff --git a/nexla_sdk/models/clusters/responses.py b/nexla_sdk/models/clusters/responses.py new file mode 100644 index 0000000..1609ec0 --- /dev/null +++ b/nexla_sdk/models/clusters/responses.py @@ -0,0 +1,64 @@ +"""Cluster response models.""" + +from datetime import datetime +from typing import List, Optional + +from pydantic import Field + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.common import Organization + + +class ClusterEndpointRef(BaseModel): + """Cluster endpoint reference for cluster response.""" + + id: int + service: Optional[str] = None + protocol: Optional[str] = None + host: Optional[str] = None + port: Optional[int] = None + context: Optional[str] = None + header_host: Optional[str] = None + + +class Cluster(BaseModel): + """Cluster response model. + + Clusters define infrastructure endpoints for processing data flows. + They contain multiple endpoints for different services. + """ + + id: int + org_id: Optional[int] = None + uid: Optional[str] = None + is_default: Optional[bool] = None + is_private: Optional[bool] = None + name: Optional[str] = None + description: Optional[str] = None + status: Optional[str] = None # INIT, ACTIVE, PAUSED + region: Optional[str] = None + provider: Optional[str] = None # aws, gcp, azure, private + org: Optional[Organization] = None + endpoints: Optional[List[ClusterEndpointRef]] = Field(default_factory=list) + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None + + +class ClusterEndpoint(BaseModel): + """Cluster endpoint response model. + + Endpoints define individual service connections within a cluster. + """ + + id: int + cluster_id: Optional[int] = None + org_id: Optional[int] = None + service: Optional[str] = None + protocol: Optional[str] = None + host: Optional[str] = None + port: Optional[int] = None + context: Optional[str] = None + header_host: Optional[str] = None + org: Optional[Organization] = None + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None diff --git a/nexla_sdk/models/common.py b/nexla_sdk/models/common.py index 23ec243..2199b5d 100644 --- a/nexla_sdk/models/common.py +++ b/nexla_sdk/models/common.py @@ -2,6 +2,10 @@ from typing import Any, Dict, List, Optional from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.flexible_enums import ( + FlexibleConnectionType, + FlexibleConnectorType, +) class Owner(BaseModel): @@ -35,8 +39,8 @@ class Connector(BaseModel): """Connector information.""" id: int - type: str - connection_type: str + type: FlexibleConnectorType + connection_type: FlexibleConnectionType name: str description: str nexset_api_compatible: bool diff --git a/nexla_sdk/models/connectors/__init__.py b/nexla_sdk/models/connectors/__init__.py new file mode 100644 index 0000000..7e8dfbe --- /dev/null +++ b/nexla_sdk/models/connectors/__init__.py @@ -0,0 +1,28 @@ +"""Connector models.""" + +from typing import TYPE_CHECKING, Any + +from nexla_sdk.models.connectors.enums import ConnectionType, ConnectorType + +if TYPE_CHECKING: + from nexla_sdk.models.connectors.requests import ConnectorUpdate + from nexla_sdk.models.connectors.responses import Connector + +__all__ = [ + "Connector", + "ConnectorType", + "ConnectionType", + "ConnectorUpdate", +] + + +def __getattr__(name: str) -> Any: + if name == "Connector": + from nexla_sdk.models.connectors.responses import Connector + + return Connector + if name == "ConnectorUpdate": + from nexla_sdk.models.connectors.requests import ConnectorUpdate + + return ConnectorUpdate + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/nexla_sdk/models/connectors/enums.py b/nexla_sdk/models/connectors/enums.py new file mode 100644 index 0000000..c40105f --- /dev/null +++ b/nexla_sdk/models/connectors/enums.py @@ -0,0 +1,106 @@ +"""Connector enums for the /connectors endpoint.""" + +from enum import Enum + + +class ConnectorType(str, Enum): + """All supported connector types. + + This is the comprehensive enum used by the /connectors endpoint. + It includes all source and destination connector types. + """ + + # File Systems + S3 = "s3" + GCS = "gcs" + AZURE_BLB = "azure_blb" + AZURE_DATA_LAKE = "azure_data_lake" + FTP = "ftp" + DROPBOX = "dropbox" + BOX = "box" + GDRIVE = "gdrive" + SHAREPOINT = "sharepoint" + MIN_IO_S3 = "min_io_s3" + WEBDAV = "webdav" + + # Databases - Traditional RDBMS + MYSQL = "mysql" + POSTGRES = "postgres" + SUPABASE = "supabase" + SQLSERVER = "sqlserver" + ORACLE = "oracle" + ORACLE_AUTONOMOUS = "oracle_autonomous" + AS400 = "as400" + DB2 = "db2" + SYBASE = "sybase" + HANA_JDBC = "hana_jdbc" + NETSUITE_JDBC = "netsuite_jdbc" + + # Databases - Cloud Data Warehouses + REDSHIFT = "redshift" + SNOWFLAKE = "snowflake" + SNOWFLAKE_DCR = "snowflake_dcr" + BIGQUERY = "bigquery" + DATABRICKS = "databricks" + AWS_ATHENA = "aws_athena" + AZURE_SYNAPSE = "azure_synapse" + FIREBOLT = "firebolt" + TERADATA = "teradata" + HIVE = "hive" + + # Databases - Google Cloud SQL + CLOUDSQL_MYSQL = "cloudsql_mysql" + CLOUDSQL_POSTGRES = "cloudsql_postgres" + CLOUDSQL_SQLSERVER = "cloudsql_sqlserver" + + # Databases - Google Cloud + GCP_ALLOYDB = "gcp_alloydb" + GCP_SPANNER = "gcp_spanner" + + # Delta Lake / Iceberg + DELTA_LAKE_AZURE_BLB = "delta_lake_azure_blb" + DELTA_LAKE_AZURE_DATA_LAKE = "delta_lake_azure_data_lake" + DELTA_LAKE_S3 = "delta_lake_s3" + S3_ICEBERG = "s3_iceberg" + + # NoSQL + MONGO = "mongo" + DYNAMODB = "dynamodb" + FIREBASE = "firebase" + + # Streaming / Messaging + KAFKA = "kafka" + CONFLUENT_KAFKA = "confluent_kafka" + GOOGLE_PUBSUB = "google_pubsub" + JMS = "jms" + TIBCO = "tibco" + + # APIs + REST = "rest" + SOAP = "soap" + NEXLA_REST = "nexla_rest" + + # Special + FILE_UPLOAD = "file_upload" + EMAIL = "email" + NEXLA_MONITOR = "nexla_monitor" + DATA_MAP = "data_map" + + # Vector Databases + PINECONE = "pinecone" + + +class ConnectionType(str, Enum): + """Connection type categories for connectors. + + Maps to the `connection_type` field in Connector responses. + """ + + FILE = "file" + DATABASE = "database" + NOSQL = "nosql" + STREAMING = "streaming" + API = "api" + VECTOR_DB = "vector_db" + SPECIAL = "special" + DATA_LAKE = "data_lake" diff --git a/nexla_sdk/models/connectors/requests.py b/nexla_sdk/models/connectors/requests.py new file mode 100644 index 0000000..7fbd3dc --- /dev/null +++ b/nexla_sdk/models/connectors/requests.py @@ -0,0 +1,15 @@ +"""Connector request models.""" + +from typing import Any, Dict, Optional + +from nexla_sdk.models.base import BaseModel + + +class ConnectorUpdate(BaseModel): + """Request model for updating a connector (super user only).""" + + name: Optional[str] = None + description: Optional[str] = None + nexset_api_compatible: Optional[bool] = None + sync_api_compatible: Optional[bool] = None + config: Optional[Dict[str, Any]] = None diff --git a/nexla_sdk/models/connectors/responses.py b/nexla_sdk/models/connectors/responses.py new file mode 100644 index 0000000..1436e97 --- /dev/null +++ b/nexla_sdk/models/connectors/responses.py @@ -0,0 +1,28 @@ +"""Connector response models.""" + +from datetime import datetime +from typing import Any, Dict, Optional + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.flexible_enums import ( + FlexibleConnectionType, + FlexibleConnectorType, +) + + +class Connector(BaseModel): + """Connector response model. + + Connectors define connection types for data sources and destinations. + """ + + id: int + type: Optional[FlexibleConnectorType] = None + connection_type: Optional[FlexibleConnectionType] = None + name: Optional[str] = None + description: Optional[str] = None + nexset_api_compatible: Optional[bool] = None + sync_api_compatible: Optional[bool] = None + config: Optional[Dict[str, Any]] = None + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None diff --git a/nexla_sdk/models/credentials/__init__.py b/nexla_sdk/models/credentials/__init__.py index 1e87da7..5bb5817 100644 --- a/nexla_sdk/models/credentials/__init__.py +++ b/nexla_sdk/models/credentials/__init__.py @@ -1,15 +1,19 @@ +from typing import TYPE_CHECKING, Any + from nexla_sdk.models.credentials.enums import CredentialType, VerifiedStatus -from nexla_sdk.models.credentials.requests import ( - CredentialCreate, - CredentialUpdate, - ProbeSampleRequest, - ProbeTreeRequest, -) -from nexla_sdk.models.credentials.responses import ( - Credential, - ProbeSampleResponse, - ProbeTreeResponse, -) + +if TYPE_CHECKING: + from nexla_sdk.models.credentials.requests import ( + CredentialCreate, + CredentialUpdate, + ProbeSampleRequest, + ProbeTreeRequest, + ) + from nexla_sdk.models.credentials.responses import ( + Credential, + ProbeSampleResponse, + ProbeTreeResponse, + ) __all__ = [ # Enums @@ -25,3 +29,20 @@ "ProbeTreeRequest", "ProbeSampleRequest", ] + + +def __getattr__(name: str) -> Any: + if name in { + "CredentialCreate", + "CredentialUpdate", + "ProbeTreeRequest", + "ProbeSampleRequest", + }: + from nexla_sdk.models.credentials import requests as _requests + + return getattr(_requests, name) + if name in {"Credential", "ProbeTreeResponse", "ProbeSampleResponse"}: + from nexla_sdk.models.credentials import responses as _responses + + return getattr(_responses, name) + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/nexla_sdk/models/credentials/responses.py b/nexla_sdk/models/credentials/responses.py index 06c115f..20432f4 100644 --- a/nexla_sdk/models/credentials/responses.py +++ b/nexla_sdk/models/credentials/responses.py @@ -5,6 +5,7 @@ from nexla_sdk.models.base import BaseModel from nexla_sdk.models.common import Connector, Organization, Owner +from nexla_sdk.models.flexible_enums import FlexibleCredentialType class Credential(BaseModel): @@ -12,7 +13,7 @@ class Credential(BaseModel): id: int name: str - credentials_type: str + credentials_type: FlexibleCredentialType owner: Optional[Owner] = None org: Optional[Organization] = None access_roles: Optional[List[str]] = None diff --git a/nexla_sdk/models/custom_data_flows/__init__.py b/nexla_sdk/models/custom_data_flows/__init__.py new file mode 100644 index 0000000..bae3cde --- /dev/null +++ b/nexla_sdk/models/custom_data_flows/__init__.py @@ -0,0 +1,11 @@ +from nexla_sdk.models.custom_data_flows.requests import ( + CustomDataFlowCreate, + CustomDataFlowUpdate, +) +from nexla_sdk.models.custom_data_flows.responses import CustomDataFlow + +__all__ = [ + "CustomDataFlow", + "CustomDataFlowCreate", + "CustomDataFlowUpdate", +] diff --git a/nexla_sdk/models/custom_data_flows/requests.py b/nexla_sdk/models/custom_data_flows/requests.py new file mode 100644 index 0000000..cb9dff8 --- /dev/null +++ b/nexla_sdk/models/custom_data_flows/requests.py @@ -0,0 +1,25 @@ +from typing import Any, Dict, List, Optional + +from nexla_sdk.models.base import BaseModel + + +class CustomDataFlowCreate(BaseModel): + name: str + description: Optional[str] = None + flow_type: Optional[str] = None + status: Optional[str] = None + managed: Optional[bool] = None + config: Optional[Dict[str, Any]] = None + code_container_ids: Optional[List[int]] = None + data_credentials_ids: Optional[List[int]] = None + + +class CustomDataFlowUpdate(BaseModel): + name: Optional[str] = None + description: Optional[str] = None + flow_type: Optional[str] = None + status: Optional[str] = None + managed: Optional[bool] = None + config: Optional[Dict[str, Any]] = None + code_container_ids: Optional[List[int]] = None + data_credentials_ids: Optional[List[int]] = None diff --git a/nexla_sdk/models/custom_data_flows/responses.py b/nexla_sdk/models/custom_data_flows/responses.py new file mode 100644 index 0000000..2f593c0 --- /dev/null +++ b/nexla_sdk/models/custom_data_flows/responses.py @@ -0,0 +1,26 @@ +from datetime import datetime +from typing import Any, Dict, List, Optional + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.code_containers.responses import CodeContainer +from nexla_sdk.models.common import Organization, Owner +from nexla_sdk.models.credentials.responses import Credential + + +class CustomDataFlow(BaseModel): + id: int + owner: Optional[Owner] = None + org: Optional[Organization] = None + name: Optional[str] = None + description: Optional[str] = None + flow_type: Optional[str] = None + status: Optional[str] = None + managed: Optional[bool] = None + config: Optional[Dict[str, Any]] = None + code_containers: Optional[List[CodeContainer]] = None + data_credentials: Optional[List[Credential]] = None + access_roles: Optional[List[str]] = None + tags: Optional[List[str]] = None + copied_from_id: Optional[int] = None + updated_at: Optional[datetime] = None + created_at: Optional[datetime] = None diff --git a/nexla_sdk/models/dashboard_transforms/__init__.py b/nexla_sdk/models/dashboard_transforms/__init__.py new file mode 100644 index 0000000..f1d6a83 --- /dev/null +++ b/nexla_sdk/models/dashboard_transforms/__init__.py @@ -0,0 +1,11 @@ +from nexla_sdk.models.dashboard_transforms.requests import ( + DashboardTransformCreate, + DashboardTransformUpdate, +) +from nexla_sdk.models.dashboard_transforms.responses import DashboardTransform + +__all__ = [ + "DashboardTransform", + "DashboardTransformCreate", + "DashboardTransformUpdate", +] diff --git a/nexla_sdk/models/dashboard_transforms/requests.py b/nexla_sdk/models/dashboard_transforms/requests.py new file mode 100644 index 0000000..c7c4aa5 --- /dev/null +++ b/nexla_sdk/models/dashboard_transforms/requests.py @@ -0,0 +1,11 @@ +from typing import Optional + +from nexla_sdk.models.base import BaseModel + + +class DashboardTransformCreate(BaseModel): + code_container_id: int + + +class DashboardTransformUpdate(BaseModel): + code_container_id: Optional[int] = None diff --git a/nexla_sdk/models/dashboard_transforms/responses.py b/nexla_sdk/models/dashboard_transforms/responses.py new file mode 100644 index 0000000..5ef72e7 --- /dev/null +++ b/nexla_sdk/models/dashboard_transforms/responses.py @@ -0,0 +1,14 @@ +from typing import Optional + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.code_containers.responses import CodeContainer +from nexla_sdk.models.common import Organization, Owner + + +class DashboardTransform(BaseModel): + id: int + owner: Optional[Owner] = None + org: Optional[Organization] = None + resource_type: Optional[str] = None + resource_id: Optional[int] = None + error_transform: Optional[CodeContainer] = None diff --git a/nexla_sdk/models/data_credentials_groups/__init__.py b/nexla_sdk/models/data_credentials_groups/__init__.py new file mode 100644 index 0000000..98f8c80 --- /dev/null +++ b/nexla_sdk/models/data_credentials_groups/__init__.py @@ -0,0 +1,13 @@ +from nexla_sdk.models.data_credentials_groups.requests import ( + DataCredentialsGroupCreate, + DataCredentialsGroupRemoveCredentials, + DataCredentialsGroupUpdate, +) +from nexla_sdk.models.data_credentials_groups.responses import DataCredentialsGroup + +__all__ = [ + "DataCredentialsGroup", + "DataCredentialsGroupCreate", + "DataCredentialsGroupUpdate", + "DataCredentialsGroupRemoveCredentials", +] diff --git a/nexla_sdk/models/data_credentials_groups/requests.py b/nexla_sdk/models/data_credentials_groups/requests.py new file mode 100644 index 0000000..7396aa9 --- /dev/null +++ b/nexla_sdk/models/data_credentials_groups/requests.py @@ -0,0 +1,21 @@ +from typing import List, Optional + +from nexla_sdk.models.base import BaseModel + + +class DataCredentialsGroupCreate(BaseModel): + name: str + description: Optional[str] = None + credentials_type: Optional[str] = None + data_credentials: Optional[List[int]] = None + + +class DataCredentialsGroupUpdate(BaseModel): + name: Optional[str] = None + description: Optional[str] = None + credentials_type: Optional[str] = None + data_credentials: Optional[List[int]] = None + + +class DataCredentialsGroupRemoveCredentials(BaseModel): + data_credentials: List[int] diff --git a/nexla_sdk/models/data_credentials_groups/responses.py b/nexla_sdk/models/data_credentials_groups/responses.py new file mode 100644 index 0000000..64ae81a --- /dev/null +++ b/nexla_sdk/models/data_credentials_groups/responses.py @@ -0,0 +1,17 @@ +from datetime import datetime +from typing import Optional + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.common import Organization, Owner + + +class DataCredentialsGroup(BaseModel): + id: int + name: Optional[str] = None + description: Optional[str] = None + credentials_type: Optional[str] = None + data_credentials_count: Optional[int] = None + owner: Optional[Owner] = None + org: Optional[Organization] = None + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None diff --git a/nexla_sdk/models/destinations/__init__.py b/nexla_sdk/models/destinations/__init__.py index ebba0f3..5ac80e9 100644 --- a/nexla_sdk/models/destinations/__init__.py +++ b/nexla_sdk/models/destinations/__init__.py @@ -1,18 +1,22 @@ +from typing import TYPE_CHECKING, Any + from nexla_sdk.models.destinations.enums import ( DestinationFormat, DestinationStatus, DestinationType, ) -from nexla_sdk.models.destinations.requests import ( - DestinationCopyOptions, - DestinationCreate, - DestinationUpdate, -) -from nexla_sdk.models.destinations.responses import ( - DataMapInfo, - DataSetInfo, - Destination, -) + +if TYPE_CHECKING: + from nexla_sdk.models.destinations.requests import ( + DestinationCopyOptions, + DestinationCreate, + DestinationUpdate, + ) + from nexla_sdk.models.destinations.responses import ( + DataMapInfo, + DataSetInfo, + Destination, + ) __all__ = [ # Enums @@ -28,3 +32,15 @@ "DestinationUpdate", "DestinationCopyOptions", ] + + +def __getattr__(name: str) -> Any: + if name in {"DestinationCreate", "DestinationUpdate", "DestinationCopyOptions"}: + from nexla_sdk.models.destinations import requests as _requests + + return getattr(_requests, name) + if name in {"Destination", "DataSetInfo", "DataMapInfo"}: + from nexla_sdk.models.destinations import responses as _responses + + return getattr(_responses, name) + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/nexla_sdk/models/destinations/responses.py b/nexla_sdk/models/destinations/responses.py index 481b183..a550e97 100644 --- a/nexla_sdk/models/destinations/responses.py +++ b/nexla_sdk/models/destinations/responses.py @@ -6,7 +6,10 @@ from nexla_sdk.models.base import BaseModel from nexla_sdk.models.common import Connector, Organization, Owner from nexla_sdk.models.credentials.responses import Credential -from nexla_sdk.models.destinations.enums import DestinationFormat +from nexla_sdk.models.flexible_enums import ( + FlexibleDestinationFormat, + FlexibleDestinationType, +) class DataSetInfo(BaseModel): @@ -41,8 +44,8 @@ class Destination(BaseModel): id: int name: str status: str - sink_type: str - connector_type: Optional[str] = None + sink_type: FlexibleDestinationType + connector_type: Optional[FlexibleDestinationType] = None owner: Optional[Owner] = None org: Optional[Organization] = None access_roles: Optional[List[str]] = None @@ -53,7 +56,7 @@ class Destination(BaseModel): data_set_id: Optional[int] = None data_map_id: Optional[int] = None data_source_id: Optional[int] = None - sink_format: Optional[DestinationFormat] = None + sink_format: Optional[FlexibleDestinationFormat] = None sink_config: Optional[Dict[str, Any]] = None sink_schedule: Optional[str] = None in_memory: bool = False diff --git a/nexla_sdk/models/flexible_enums.py b/nexla_sdk/models/flexible_enums.py new file mode 100644 index 0000000..79338bd --- /dev/null +++ b/nexla_sdk/models/flexible_enums.py @@ -0,0 +1,77 @@ +"""Helper utilities for flexible enum handling in Pydantic models. + +These utilities allow enums to gracefully accept unknown values from API responses +without raising validation errors. Unknown values are preserved as strings. +""" + +from enum import Enum +from typing import Annotated, Type, TypeVar, Union + +from pydantic import BeforeValidator + +# Import directly from .enums modules to avoid circular imports through __init__.py +from nexla_sdk.models.connectors.enums import ConnectionType, ConnectorType +from nexla_sdk.models.credentials.enums import CredentialType +from nexla_sdk.models.destinations.enums import DestinationFormat, DestinationType +from nexla_sdk.models.enums import ConnectorCategory +from nexla_sdk.models.sources.enums import SourceType + +E = TypeVar("E", bound=Enum) + + +def flexible_enum_validator(enum_cls: Type[E]): + """Create a validator that accepts enum values or unknown strings. + + Args: + enum_cls: The enum class to validate against + + Returns: + A validator function that returns the enum member for known values, + or the raw string for unknown values. + """ + + def validator(v): + if v is None: + return None + if isinstance(v, enum_cls): + return v + if isinstance(v, str): + try: + return enum_cls(v) + except ValueError: + # Unknown value - return as string + return v + return v + + return validator + + +def FlexibleEnum(enum_cls: Type[E]) -> Type[Union[E, str]]: + """Create a flexible enum type annotation for Pydantic models. + + This creates an Annotated type that accepts either a valid enum value + or any string. Known values are returned as enum members; unknown values + are returned as raw strings. + + Usage: + sink_type: Optional[FlexibleEnum(DestinationType)] = None + + Args: + enum_cls: The enum class to create a flexible version of + + Returns: + An Annotated type suitable for Pydantic model fields + """ + return Annotated[Union[enum_cls, str], BeforeValidator(flexible_enum_validator(enum_cls))] + + +# Pre-defined flexible connector types for convenience +FlexibleDestinationType = FlexibleEnum(DestinationType) +FlexibleDestinationFormat = FlexibleEnum(DestinationFormat) +FlexibleSourceType = FlexibleEnum(SourceType) +FlexibleCredentialType = FlexibleEnum(CredentialType) +FlexibleConnectorCategory = FlexibleEnum(ConnectorCategory) + +# Flexible types for connectors resource +FlexibleConnectorType = FlexibleEnum(ConnectorType) +FlexibleConnectionType = FlexibleEnum(ConnectionType) diff --git a/nexla_sdk/models/flow_triggers/__init__.py b/nexla_sdk/models/flow_triggers/__init__.py new file mode 100644 index 0000000..72471fb --- /dev/null +++ b/nexla_sdk/models/flow_triggers/__init__.py @@ -0,0 +1,9 @@ +"""Flow triggers models.""" + +from nexla_sdk.models.flow_triggers.requests import FlowTriggerCreate +from nexla_sdk.models.flow_triggers.responses import FlowTrigger + +__all__ = [ + "FlowTrigger", + "FlowTriggerCreate", +] diff --git a/nexla_sdk/models/flow_triggers/requests.py b/nexla_sdk/models/flow_triggers/requests.py new file mode 100644 index 0000000..f864dad --- /dev/null +++ b/nexla_sdk/models/flow_triggers/requests.py @@ -0,0 +1,39 @@ +"""Flow trigger request models.""" + +from typing import Optional + +from nexla_sdk.models.base import BaseModel + + +class FlowTriggerCreate(BaseModel): + """Request model for creating a flow trigger. + + Flow triggers define when one flow should trigger based on events + from another flow. The triggering and triggered resources must be + specified, along with the event types. + + Event Types: + - DATA_SINK_WRITE_DONE: Triggered when a data sink finishes writing + - DATA_SOURCE_READ_START: Triggered when a data source starts reading + - DATA_SOURCE_READ_DONE: Triggered when a data source finishes reading + """ + + # Event types + triggering_event_type: str # Required + triggered_event_type: str # Required + + # Triggering resource (one of these sets required) + triggering_flow_node_id: Optional[int] = None + triggering_resource_id: Optional[int] = None + triggering_resource_type: Optional[str] = None # data_source, data_sink + data_sink_id: Optional[int] = None # Shorthand for triggering data sink + + # Triggered resource (one of these sets required) + triggered_origin_node_id: Optional[int] = None + triggered_resource_id: Optional[int] = None + triggered_resource_type: Optional[str] = None # data_source, data_sink + data_source_id: Optional[int] = None # Shorthand for triggered data source + + # Optional owner/org + owner_id: Optional[int] = None + org_id: Optional[int] = None diff --git a/nexla_sdk/models/flow_triggers/responses.py b/nexla_sdk/models/flow_triggers/responses.py new file mode 100644 index 0000000..190e062 --- /dev/null +++ b/nexla_sdk/models/flow_triggers/responses.py @@ -0,0 +1,32 @@ +"""Flow trigger response models.""" + +from datetime import datetime +from typing import Optional + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.common import Organization, Owner + + +class FlowTrigger(BaseModel): + """Flow trigger response model. + + Flow triggers define orchestration events that trigger one flow + based on events from another flow. For example, triggering a + data source read when a data sink write completes. + """ + + id: int + owner: Optional[Owner] = None + org: Optional[Organization] = None + status: str # ACTIVE, PAUSED + triggering_event_type: str # DATA_SINK_WRITE_DONE, DATA_SOURCE_READ_START, DATA_SOURCE_READ_DONE + triggering_origin_node_id: Optional[int] = None + triggering_flow_node_id: Optional[int] = None + triggering_resource_type: Optional[str] = None # data_source, data_sink + triggering_resource_id: Optional[int] = None + triggered_event_type: str # DATA_SINK_WRITE_DONE, DATA_SOURCE_READ_START, DATA_SOURCE_READ_DONE + triggered_origin_node_id: Optional[int] = None + triggered_resource_type: Optional[str] = None # data_source, data_sink + triggered_resource_id: Optional[int] = None + updated_at: Optional[datetime] = None + created_at: Optional[datetime] = None diff --git a/nexla_sdk/models/nexsets/responses.py b/nexla_sdk/models/nexsets/responses.py index 3affaf3..a59f0e9 100644 --- a/nexla_sdk/models/nexsets/responses.py +++ b/nexla_sdk/models/nexsets/responses.py @@ -5,7 +5,7 @@ from nexla_sdk.models.base import BaseModel from nexla_sdk.models.common import Organization, Owner -from nexla_sdk.models.destinations.enums import DestinationType +from nexla_sdk.models.flexible_enums import FlexibleDestinationType from nexla_sdk.models.sources.responses import DataSetBrief, Source @@ -17,7 +17,7 @@ class DataSinkSimplified(BaseModel): org_id: Optional[int] = None name: str status: Optional[str] = None - sink_type: Optional[DestinationType] = Field(default=None, alias="sinkType") + sink_type: Optional[FlexibleDestinationType] = Field(default=None, alias="sinkType") class Nexset(BaseModel): diff --git a/nexla_sdk/models/notification_channel_settings/__init__.py b/nexla_sdk/models/notification_channel_settings/__init__.py new file mode 100644 index 0000000..c90da72 --- /dev/null +++ b/nexla_sdk/models/notification_channel_settings/__init__.py @@ -0,0 +1,13 @@ +from nexla_sdk.models.notification_channel_settings.requests import ( + NotificationChannelSettingCreate, + NotificationChannelSettingUpdate, +) +from nexla_sdk.models.notification_channel_settings.responses import ( + NotificationChannelSetting, +) + +__all__ = [ + "NotificationChannelSetting", + "NotificationChannelSettingCreate", + "NotificationChannelSettingUpdate", +] diff --git a/nexla_sdk/models/notification_channel_settings/requests.py b/nexla_sdk/models/notification_channel_settings/requests.py new file mode 100644 index 0000000..26aff82 --- /dev/null +++ b/nexla_sdk/models/notification_channel_settings/requests.py @@ -0,0 +1,14 @@ +from typing import Any, Dict, Optional + +from nexla_sdk.models.base import BaseModel + + +class NotificationChannelSettingCreate(BaseModel): + channel: str + config: Optional[Dict[str, Any]] = None + org_id: Optional[int] = None + + +class NotificationChannelSettingUpdate(BaseModel): + channel: Optional[str] = None + config: Optional[Dict[str, Any]] = None diff --git a/nexla_sdk/models/notification_channel_settings/responses.py b/nexla_sdk/models/notification_channel_settings/responses.py new file mode 100644 index 0000000..c404956 --- /dev/null +++ b/nexla_sdk/models/notification_channel_settings/responses.py @@ -0,0 +1,14 @@ +from datetime import datetime +from typing import Any, Dict, Optional + +from nexla_sdk.models.base import BaseModel + + +class NotificationChannelSetting(BaseModel): + id: int + channel: Optional[str] = None + config: Optional[Dict[str, Any]] = None + owner_id: Optional[int] = None + org_id: Optional[int] = None + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None diff --git a/nexla_sdk/models/notification_settings/__init__.py b/nexla_sdk/models/notification_settings/__init__.py new file mode 100644 index 0000000..0d39063 --- /dev/null +++ b/nexla_sdk/models/notification_settings/__init__.py @@ -0,0 +1,17 @@ +"""Notification Settings models.""" + +from nexla_sdk.models.notification_settings.requests import ( + NotificationSettingCreate, + NotificationSettingUpdate, +) +from nexla_sdk.models.notification_settings.responses import ( + NotificationSetting, + NotificationSettingBrief, +) + +__all__ = [ + "NotificationSetting", + "NotificationSettingBrief", + "NotificationSettingCreate", + "NotificationSettingUpdate", +] diff --git a/nexla_sdk/models/notification_settings/requests.py b/nexla_sdk/models/notification_settings/requests.py new file mode 100644 index 0000000..ea78fac --- /dev/null +++ b/nexla_sdk/models/notification_settings/requests.py @@ -0,0 +1,35 @@ +"""Notification Setting request models.""" + +from typing import Any, Dict, List, Optional + +from pydantic import ConfigDict, Field + +from nexla_sdk.models.base import BaseModel + + +class NotificationSettingCreate(BaseModel): + """Notification Setting creation request model.""" + + model_config = ConfigDict(populate_by_name=True) + + notification_type_id: int + resource_id: Optional[int] = None + resource_type: Optional[str] = None + channel: str = Field(..., description="Notification channel (e.g., email, slack)") + priority: int = Field(default=0, ge=0, le=100, description="Priority level") + status: str = Field( + default="ENABLED", + description="Status (ENABLED, DISABLED)", + ) + payload: Optional[Dict[str, Any]] = None + + +class NotificationSettingUpdate(BaseModel): + """Notification Setting update request model.""" + + model_config = ConfigDict(populate_by_name=True) + + channel: Optional[str] = None + priority: Optional[int] = Field(default=None, ge=0, le=100) + status: Optional[str] = None + payload: Optional[Dict[str, Any]] = None diff --git a/nexla_sdk/models/notification_settings/responses.py b/nexla_sdk/models/notification_settings/responses.py new file mode 100644 index 0000000..726d076 --- /dev/null +++ b/nexla_sdk/models/notification_settings/responses.py @@ -0,0 +1,37 @@ +"""Notification Setting response models.""" + +from datetime import datetime +from typing import Any, Dict, List, Optional + +from pydantic import Field + +from nexla_sdk.models.base import BaseModel + + +class NotificationSetting(BaseModel): + """Notification Setting response model.""" + + id: int + notification_type_id: int + resource_id: Optional[int] = None + resource_type: Optional[str] = None + channel: str + priority: int = 0 + status: str = "ENABLED" + payload: Optional[Dict[str, Any]] = None + owner_id: Optional[int] = None + org_id: Optional[int] = None + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None + + +class NotificationSettingBrief(BaseModel): + """Brief Notification Setting response model for list views.""" + + id: int + notification_type_id: int + channel: str + priority: int = 0 + status: str = "ENABLED" + resource_type: Optional[str] = None + resource_id: Optional[int] = None diff --git a/nexla_sdk/models/notification_types/__init__.py b/nexla_sdk/models/notification_types/__init__.py new file mode 100644 index 0000000..89c5080 --- /dev/null +++ b/nexla_sdk/models/notification_types/__init__.py @@ -0,0 +1,3 @@ +from nexla_sdk.models.notification_types.responses import NotificationType + +__all__ = ["NotificationType"] diff --git a/nexla_sdk/models/notification_types/responses.py b/nexla_sdk/models/notification_types/responses.py new file mode 100644 index 0000000..fd55aff --- /dev/null +++ b/nexla_sdk/models/notification_types/responses.py @@ -0,0 +1,14 @@ +from typing import Optional + +from nexla_sdk.models.base import BaseModel + + +class NotificationType(BaseModel): + id: int + name: Optional[str] = None + description: Optional[str] = None + category: Optional[str] = None + default: Optional[bool] = None + status: Optional[str] = None + event_type: Optional[str] = None + resource_type: Optional[str] = None diff --git a/nexla_sdk/models/notifications/__init__.py b/nexla_sdk/models/notifications/__init__.py index 9d24357..16465ef 100644 --- a/nexla_sdk/models/notifications/__init__.py +++ b/nexla_sdk/models/notifications/__init__.py @@ -9,6 +9,7 @@ NotificationChannelSetting, NotificationCount, NotificationSetting, + NotificationSettingBrief, NotificationType, ) @@ -18,6 +19,7 @@ "NotificationType", "NotificationChannelSetting", "NotificationSetting", + "NotificationSettingBrief", "NotificationCount", # Requests "NotificationChannelSettingCreate", diff --git a/nexla_sdk/models/notifications/responses.py b/nexla_sdk/models/notifications/responses.py index 5b46a53..8bdce61 100644 --- a/nexla_sdk/models/notifications/responses.py +++ b/nexla_sdk/models/notifications/responses.py @@ -74,3 +74,15 @@ class NotificationCount(BaseModel): """Notification count response.""" count: int + + +class NotificationSettingBrief(BaseModel): + """Brief Notification Setting response model for list views.""" + + id: int + notification_type_id: int + channel: str + priority: int = 0 + status: str = "ENABLED" + resource_type: Optional[str] = None + resource_id: Optional[int] = None diff --git a/nexla_sdk/models/org_tiers/__init__.py b/nexla_sdk/models/org_tiers/__init__.py new file mode 100644 index 0000000..e50c096 --- /dev/null +++ b/nexla_sdk/models/org_tiers/__init__.py @@ -0,0 +1,3 @@ +from nexla_sdk.models.org_tiers.responses import OrgTier + +__all__ = ["OrgTier"] diff --git a/nexla_sdk/models/org_tiers/responses.py b/nexla_sdk/models/org_tiers/responses.py new file mode 100644 index 0000000..6052397 --- /dev/null +++ b/nexla_sdk/models/org_tiers/responses.py @@ -0,0 +1,13 @@ +from typing import Optional + +from nexla_sdk.models.base import BaseModel + + +class OrgTier(BaseModel): + id: int + name: Optional[str] = None + display_name: Optional[str] = None + record_count_limit: Optional[int] = None + record_count_limit_time: Optional[str] = None + data_source_count_limit: Optional[int] = None + trial_period_days: Optional[int] = None diff --git a/nexla_sdk/models/quarantine_settings/__init__.py b/nexla_sdk/models/quarantine_settings/__init__.py new file mode 100644 index 0000000..6e7b15e --- /dev/null +++ b/nexla_sdk/models/quarantine_settings/__init__.py @@ -0,0 +1,11 @@ +from nexla_sdk.models.quarantine_settings.requests import ( + QuarantineSettingCreate, + QuarantineSettingUpdate, +) +from nexla_sdk.models.quarantine_settings.responses import QuarantineSetting + +__all__ = [ + "QuarantineSetting", + "QuarantineSettingCreate", + "QuarantineSettingUpdate", +] diff --git a/nexla_sdk/models/quarantine_settings/requests.py b/nexla_sdk/models/quarantine_settings/requests.py new file mode 100644 index 0000000..e5f5cc9 --- /dev/null +++ b/nexla_sdk/models/quarantine_settings/requests.py @@ -0,0 +1,17 @@ +from typing import Any, Dict, Optional + +from nexla_sdk.models.base import BaseModel + + +class QuarantineSettingCreate(BaseModel): + data_credentials_id: int + config: Dict[str, Any] + owner_id: Optional[int] = None + org_id: Optional[int] = None + + +class QuarantineSettingUpdate(BaseModel): + data_credentials_id: Optional[int] = None + config: Optional[Dict[str, Any]] = None + owner_id: Optional[int] = None + org_id: Optional[int] = None diff --git a/nexla_sdk/models/quarantine_settings/responses.py b/nexla_sdk/models/quarantine_settings/responses.py new file mode 100644 index 0000000..42a0786 --- /dev/null +++ b/nexla_sdk/models/quarantine_settings/responses.py @@ -0,0 +1,20 @@ +from datetime import datetime +from typing import Any, Dict, Optional + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.common import Organization, Owner +from nexla_sdk.models.credentials.responses import Credential + + +class QuarantineSetting(BaseModel): + id: int + owner: Optional[Owner] = None + org: Optional[Organization] = None + resource_type: Optional[str] = None + resource_id: Optional[int] = None + config: Optional[Dict[str, Any]] = None + data_credentials_id: Optional[int] = None + credentials_type: Optional[str] = None + data_credentials: Optional[Credential] = None + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None diff --git a/nexla_sdk/models/resource_parameters/__init__.py b/nexla_sdk/models/resource_parameters/__init__.py new file mode 100644 index 0000000..2934617 --- /dev/null +++ b/nexla_sdk/models/resource_parameters/__init__.py @@ -0,0 +1,11 @@ +from nexla_sdk.models.resource_parameters.requests import ( + ResourceParameterCreate, + ResourceParameterUpdate, +) +from nexla_sdk.models.resource_parameters.responses import ResourceParameter + +__all__ = [ + "ResourceParameter", + "ResourceParameterCreate", + "ResourceParameterUpdate", +] diff --git a/nexla_sdk/models/resource_parameters/requests.py b/nexla_sdk/models/resource_parameters/requests.py new file mode 100644 index 0000000..459eacc --- /dev/null +++ b/nexla_sdk/models/resource_parameters/requests.py @@ -0,0 +1,29 @@ +from typing import Any, Dict, List, Optional + +from nexla_sdk.models.base import BaseModel + + +class ResourceParameterCreate(BaseModel): + name: str + display_name: Optional[str] = None + description: Optional[str] = None + resource_type: Optional[str] = None + data_type: Optional[str] = None + order: Optional[int] = None + config: Optional[Dict[str, Any]] = None + global_param: Optional[bool] = None + vendor_endpoint_id: Optional[int] = None + allowed_values: Optional[List[Any]] = None + + +class ResourceParameterUpdate(BaseModel): + name: Optional[str] = None + display_name: Optional[str] = None + description: Optional[str] = None + resource_type: Optional[str] = None + data_type: Optional[str] = None + order: Optional[int] = None + config: Optional[Dict[str, Any]] = None + global_param: Optional[bool] = None + vendor_endpoint_id: Optional[int] = None + allowed_values: Optional[List[Any]] = None diff --git a/nexla_sdk/models/resource_parameters/responses.py b/nexla_sdk/models/resource_parameters/responses.py new file mode 100644 index 0000000..1c6c60b --- /dev/null +++ b/nexla_sdk/models/resource_parameters/responses.py @@ -0,0 +1,23 @@ +from datetime import datetime +from typing import Any, Dict, List, Optional + +from pydantic import Field + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.vendor_endpoints.responses import VendorEndpoint + + +class ResourceParameter(BaseModel): + id: int + name: Optional[str] = None + display_name: Optional[str] = None + description: Optional[str] = None + resource_type: Optional[str] = None + data_type: Optional[str] = None + order: Optional[int] = None + config: Optional[Dict[str, Any]] = None + global_param: Optional[bool] = Field(default=None, alias="global") + vendor_endpoint: Optional[VendorEndpoint] = None + allowed_values: Optional[List[Any]] = None + updated_at: Optional[datetime] = None + created_at: Optional[datetime] = None diff --git a/nexla_sdk/models/service_keys/__init__.py b/nexla_sdk/models/service_keys/__init__.py new file mode 100644 index 0000000..e26459a --- /dev/null +++ b/nexla_sdk/models/service_keys/__init__.py @@ -0,0 +1,13 @@ +"""Service keys models.""" + +from nexla_sdk.models.service_keys.requests import ( + ServiceKeyCreate, + ServiceKeyUpdate, +) +from nexla_sdk.models.service_keys.responses import ServiceKey + +__all__ = [ + "ServiceKey", + "ServiceKeyCreate", + "ServiceKeyUpdate", +] diff --git a/nexla_sdk/models/service_keys/requests.py b/nexla_sdk/models/service_keys/requests.py new file mode 100644 index 0000000..d57757d --- /dev/null +++ b/nexla_sdk/models/service_keys/requests.py @@ -0,0 +1,20 @@ +"""Service key request models.""" + +from typing import Optional + +from nexla_sdk.models.base import BaseModel + + +class ServiceKeyCreate(BaseModel): + """Request model for creating a service key.""" + + name: str + description: str + data_source_id: Optional[int] = None + + +class ServiceKeyUpdate(BaseModel): + """Request model for updating a service key.""" + + name: Optional[str] = None + description: Optional[str] = None diff --git a/nexla_sdk/models/service_keys/responses.py b/nexla_sdk/models/service_keys/responses.py new file mode 100644 index 0000000..36f6688 --- /dev/null +++ b/nexla_sdk/models/service_keys/responses.py @@ -0,0 +1,29 @@ +"""Service key response models.""" + +from datetime import datetime +from typing import Optional + +from nexla_sdk.models.base import BaseModel + + +class ServiceKey(BaseModel): + """Service key response model. + + Service keys are long-lived credentials used for programmatic API access. + They can be rotated and have lifecycle management (activate/pause). + """ + + id: int + owner_id: Optional[int] = None + org_id: Optional[int] = None + name: Optional[str] = None + description: Optional[str] = None + status: str # INIT, ACTIVE, PAUSED + api_key: str + last_rotated_key: Optional[str] = None + last_rotated_at: Optional[datetime] = None + data_source_id: Optional[int] = None + cluster_id: Optional[int] = None # Super user only + cluster_uid: Optional[str] = None # Super user only + updated_at: Optional[datetime] = None + created_at: Optional[datetime] = None diff --git a/nexla_sdk/models/sources/__init__.py b/nexla_sdk/models/sources/__init__.py index 6df9d18..2312cde 100644 --- a/nexla_sdk/models/sources/__init__.py +++ b/nexla_sdk/models/sources/__init__.py @@ -1,15 +1,19 @@ +from typing import TYPE_CHECKING, Any + from nexla_sdk.models.sources.enums import ( FlowType, IngestMethod, SourceStatus, SourceType, ) -from nexla_sdk.models.sources.requests import ( - SourceCopyOptions, - SourceCreate, - SourceUpdate, -) -from nexla_sdk.models.sources.responses import DataSetBrief, RunInfo, Source + +if TYPE_CHECKING: + from nexla_sdk.models.sources.requests import ( + SourceCopyOptions, + SourceCreate, + SourceUpdate, + ) + from nexla_sdk.models.sources.responses import DataSetBrief, RunInfo, Source __all__ = [ # Enums @@ -26,3 +30,15 @@ "SourceUpdate", "SourceCopyOptions", ] + + +def __getattr__(name: str) -> Any: + if name in {"SourceCreate", "SourceUpdate", "SourceCopyOptions"}: + from nexla_sdk.models.sources import requests as _requests + + return getattr(_requests, name) + if name in {"Source", "DataSetBrief", "RunInfo"}: + from nexla_sdk.models.sources import responses as _responses + + return getattr(_responses, name) + raise AttributeError(f"module {__name__!r} has no attribute {name!r}") diff --git a/nexla_sdk/models/sources/enums.py b/nexla_sdk/models/sources/enums.py index 49b6284..b87accd 100644 --- a/nexla_sdk/models/sources/enums.py +++ b/nexla_sdk/models/sources/enums.py @@ -20,31 +20,66 @@ class SourceType(str, Enum): S3 = "s3" GCS = "gcs" AZURE_BLB = "azure_blb" + AZURE_DATA_LAKE = "azure_data_lake" FTP = "ftp" DROPBOX = "dropbox" BOX = "box" GDRIVE = "gdrive" SHAREPOINT = "sharepoint" + MIN_IO_S3 = "min_io_s3" + WEBDAV = "webdav" - # Databases + # Databases - Traditional RDBMS MYSQL = "mysql" POSTGRES = "postgres" + SUPABASE = "supabase" SQLSERVER = "sqlserver" ORACLE = "oracle" + ORACLE_AUTONOMOUS = "oracle_autonomous" + AS400 = "as400" + DB2 = "db2" + SYBASE = "sybase" + HANA_JDBC = "hana_jdbc" + NETSUITE_JDBC = "netsuite_jdbc" + + # Databases - Cloud Data Warehouses REDSHIFT = "redshift" SNOWFLAKE = "snowflake" + SNOWFLAKE_DCR = "snowflake_dcr" BIGQUERY = "bigquery" DATABRICKS = "databricks" + AWS_ATHENA = "aws_athena" + AZURE_SYNAPSE = "azure_synapse" + FIREBOLT = "firebolt" + TERADATA = "teradata" + HIVE = "hive" + + # Databases - Google Cloud SQL + CLOUDSQL_MYSQL = "cloudsql_mysql" + CLOUDSQL_POSTGRES = "cloudsql_postgres" + CLOUDSQL_SQLSERVER = "cloudsql_sqlserver" + + # Databases - Google Cloud + GCP_ALLOYDB = "gcp_alloydb" + GCP_SPANNER = "gcp_spanner" + + # Delta Lake / Iceberg + DELTA_LAKE_AZURE_BLB = "delta_lake_azure_blb" + DELTA_LAKE_AZURE_DATA_LAKE = "delta_lake_azure_data_lake" + DELTA_LAKE_S3 = "delta_lake_s3" + S3_ICEBERG = "s3_iceberg" # NoSQL MONGO = "mongo" DYNAMODB = "dynamodb" FIREBASE = "firebase" - # Streaming + # Streaming / Messaging KAFKA = "kafka" CONFLUENT_KAFKA = "confluent_kafka" GOOGLE_PUBSUB = "google_pubsub" + JMS = "jms" + TIBCO = "tibco" # APIs REST = "rest" @@ -56,7 +91,8 @@ class SourceType(str, Enum): EMAIL = "email" NEXLA_MONITOR = "nexla_monitor" - # Add all other types from the spec... + # Vector Databases + PINECONE = "pinecone" class IngestMethod(str, Enum): diff --git a/nexla_sdk/models/sources/responses.py b/nexla_sdk/models/sources/responses.py index f3c415b..82d986d 100644 --- a/nexla_sdk/models/sources/responses.py +++ b/nexla_sdk/models/sources/responses.py @@ -6,6 +6,7 @@ from nexla_sdk.models.base import BaseModel from nexla_sdk.models.common import Connector, Organization, Owner from nexla_sdk.models.credentials.responses import Credential +from nexla_sdk.models.flexible_enums import FlexibleSourceType class DataSetBrief(BaseModel): @@ -34,8 +35,8 @@ class Source(BaseModel): id: int name: str status: str - source_type: str - connector_type: Optional[str] = None + source_type: FlexibleSourceType + connector_type: Optional[FlexibleSourceType] = None owner: Optional[Owner] = None org: Optional[Organization] = None access_roles: Optional[List[str]] = None diff --git a/nexla_sdk/models/user_settings/__init__.py b/nexla_sdk/models/user_settings/__init__.py new file mode 100644 index 0000000..9df0c87 --- /dev/null +++ b/nexla_sdk/models/user_settings/__init__.py @@ -0,0 +1,11 @@ +from nexla_sdk.models.user_settings.requests import ( + UserSettingCreate, + UserSettingUpdate, +) +from nexla_sdk.models.user_settings.responses import UserSetting + +__all__ = [ + "UserSetting", + "UserSettingCreate", + "UserSettingUpdate", +] diff --git a/nexla_sdk/models/user_settings/requests.py b/nexla_sdk/models/user_settings/requests.py new file mode 100644 index 0000000..65a3f63 --- /dev/null +++ b/nexla_sdk/models/user_settings/requests.py @@ -0,0 +1,16 @@ +from typing import Any, Dict, Optional + +from nexla_sdk.models.base import BaseModel + + +class UserSettingCreate(BaseModel): + user_settings_type: str + primary_key_value: str + description: Optional[str] = None + settings: Optional[Dict[str, Any]] = None + copied_from_id: Optional[int] = None + + +class UserSettingUpdate(BaseModel): + description: Optional[str] = None + settings: Optional[Dict[str, Any]] = None diff --git a/nexla_sdk/models/user_settings/responses.py b/nexla_sdk/models/user_settings/responses.py new file mode 100644 index 0000000..ade39f3 --- /dev/null +++ b/nexla_sdk/models/user_settings/responses.py @@ -0,0 +1,18 @@ +from datetime import datetime +from typing import Any, Dict, Optional + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.common import Organization, Owner + + +class UserSetting(BaseModel): + id: int + owner: Optional[Owner] = None + org: Optional[Organization] = None + user_settings_type: Optional[str] = None + primary_key_value: Optional[str] = None + description: Optional[str] = None + settings: Optional[Dict[str, Any]] = None + copied_from_id: Optional[int] = None + updated_at: Optional[datetime] = None + created_at: Optional[datetime] = None diff --git a/nexla_sdk/models/user_tiers/__init__.py b/nexla_sdk/models/user_tiers/__init__.py new file mode 100644 index 0000000..4a769a9 --- /dev/null +++ b/nexla_sdk/models/user_tiers/__init__.py @@ -0,0 +1,3 @@ +from nexla_sdk.models.user_tiers.responses import UserTier + +__all__ = ["UserTier"] diff --git a/nexla_sdk/models/user_tiers/responses.py b/nexla_sdk/models/user_tiers/responses.py new file mode 100644 index 0000000..c91e218 --- /dev/null +++ b/nexla_sdk/models/user_tiers/responses.py @@ -0,0 +1,13 @@ +from typing import Optional + +from nexla_sdk.models.base import BaseModel + + +class UserTier(BaseModel): + id: int + name: Optional[str] = None + display_name: Optional[str] = None + record_count_limit: Optional[int] = None + record_count_limit_time: Optional[str] = None + data_source_count_limit: Optional[int] = None + trial_period_days: Optional[int] = None diff --git a/nexla_sdk/models/users/__init__.py b/nexla_sdk/models/users/__init__.py index d92b1bf..25f2ad7 100644 --- a/nexla_sdk/models/users/__init__.py +++ b/nexla_sdk/models/users/__init__.py @@ -1,3 +1,4 @@ +from nexla_sdk.models.users.credits import UserCredit, UserCreditCreate from nexla_sdk.models.users.requests import UserCreate, UserUpdate from nexla_sdk.models.users.responses import ( AccountSummary, @@ -16,6 +17,8 @@ "DefaultOrg", "OrgMembership", "AccountSummary", + "UserCredit", + "UserCreditCreate", # Requests "UserCreate", "UserUpdate", diff --git a/nexla_sdk/models/users/credits.py b/nexla_sdk/models/users/credits.py new file mode 100644 index 0000000..37237c4 --- /dev/null +++ b/nexla_sdk/models/users/credits.py @@ -0,0 +1,28 @@ +from datetime import datetime +from typing import Optional + +from nexla_sdk.models.base import BaseModel + + +class UserCredit(BaseModel): + id: int + owner_id: Optional[int] = None + org_id: Optional[int] = None + grant_type: Optional[str] = None + credits_available: Optional[int] = None + credits: Optional[int] = None + credits_used: Optional[int] = None + credits_monthly: Optional[int] = None + credits_used_in_month: Optional[int] = None + granted_at: Optional[datetime] = None + refreshed_at: Optional[datetime] = None + expires_at: Optional[datetime] = None + updated_at: Optional[datetime] = None + created_at: Optional[datetime] = None + + +class UserCreditCreate(BaseModel): + grant_type: Optional[str] = None + credits: Optional[int] = None + credits_monthly: Optional[int] = None + expires_at: Optional[str] = None diff --git a/nexla_sdk/models/validators/__init__.py b/nexla_sdk/models/validators/__init__.py new file mode 100644 index 0000000..dbaa309 --- /dev/null +++ b/nexla_sdk/models/validators/__init__.py @@ -0,0 +1,15 @@ +"""Validators models.""" + +from nexla_sdk.models.validators.requests import ( + ValidatorCopyOptions, + ValidatorCreate, + ValidatorUpdate, +) +from nexla_sdk.models.validators.responses import Validator + +__all__ = [ + "Validator", + "ValidatorCreate", + "ValidatorUpdate", + "ValidatorCopyOptions", +] diff --git a/nexla_sdk/models/validators/requests.py b/nexla_sdk/models/validators/requests.py new file mode 100644 index 0000000..88daf12 --- /dev/null +++ b/nexla_sdk/models/validators/requests.py @@ -0,0 +1,58 @@ +"""Validator request models.""" + +from typing import Any, Dict, List, Optional + +from nexla_sdk.models.base import BaseModel + + +class ValidatorCreate(BaseModel): + """Request model for creating a validator.""" + + name: Optional[str] = None + description: Optional[str] = None + code_type: str # Required: jolt_standard, jolt_custom, python, javascript, etc. + code: Optional[Any] = None + code_config: Optional[Dict[str, Any]] = None + code_encoding: Optional[str] = None # none, base64 + custom_config: Optional[Dict[str, Any]] = None + resource_type: str = "validator" + output_type: Optional[str] = None # record, attribute, custom + reusable: Optional[bool] = None + public: Optional[bool] = None + owner_id: Optional[int] = None + org_id: Optional[int] = None + data_credentials_id: Optional[int] = None + runtime_data_credentials_id: Optional[int] = None + tags: Optional[List[str]] = None + repo_type: Optional[str] = None + repo_config: Optional[Dict[str, Any]] = None + ai_function_type: Optional[str] = None + + +class ValidatorUpdate(BaseModel): + """Request model for updating a validator.""" + + name: Optional[str] = None + description: Optional[str] = None + code_type: Optional[str] = None + code: Optional[Any] = None + code_config: Optional[Dict[str, Any]] = None + code_encoding: Optional[str] = None + custom_config: Optional[Dict[str, Any]] = None + output_type: Optional[str] = None + reusable: Optional[bool] = None + public: Optional[bool] = None + data_credentials_id: Optional[int] = None + runtime_data_credentials_id: Optional[int] = None + tags: Optional[List[str]] = None + repo_type: Optional[str] = None + repo_config: Optional[Dict[str, Any]] = None + + +class ValidatorCopyOptions(BaseModel): + """Options for copying a validator.""" + + owner_id: Optional[int] = None + org_id: Optional[int] = None + copy_access_controls: Optional[bool] = None + reuse_data_credentials: Optional[bool] = None diff --git a/nexla_sdk/models/validators/responses.py b/nexla_sdk/models/validators/responses.py new file mode 100644 index 0000000..9633319 --- /dev/null +++ b/nexla_sdk/models/validators/responses.py @@ -0,0 +1,51 @@ +"""Validator response models.""" + +from datetime import datetime +from typing import Any, Dict, List, Optional + +from pydantic import Field + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.common import Organization, Owner + + +class ValidatorCredential(BaseModel): + """Credential reference in validator response.""" + + id: int + name: Optional[str] = None + description: Optional[str] = None + updated_at: Optional[datetime] = None + created_at: Optional[datetime] = None + + +class Validator(BaseModel): + """Validator response model.""" + + id: int + name: Optional[str] = None + description: Optional[str] = None + resource_type: str = "validator" + code_type: Optional[str] = None + output_type: Optional[str] = None + code: Optional[Any] = None + code_config: Optional[Dict[str, Any]] = None + code_encoding: Optional[str] = None + code_error: Optional[str] = None + custom_config: Optional[Dict[str, Any]] = None + repo_config: Optional[Dict[str, Any]] = None + reusable: bool = True + public: bool = False + managed: bool = False + ai_function_type: Optional[str] = None + owner: Optional[Owner] = None + org: Optional[Organization] = None + access_roles: Optional[List[str]] = Field(default_factory=list) + data_credentials: Optional[ValidatorCredential] = None + runtime_data_credentials: Optional[ValidatorCredential] = None + data_sets: Optional[List[int]] = Field(default_factory=list) + tags: Optional[List[str]] = Field(default_factory=list) + referenced_resource_ids: Optional[Dict[str, List[int]]] = None + copied_from_id: Optional[int] = None + updated_at: Optional[datetime] = None + created_at: Optional[datetime] = None diff --git a/nexla_sdk/models/vendor_endpoints/__init__.py b/nexla_sdk/models/vendor_endpoints/__init__.py new file mode 100644 index 0000000..bba6287 --- /dev/null +++ b/nexla_sdk/models/vendor_endpoints/__init__.py @@ -0,0 +1,11 @@ +from nexla_sdk.models.vendor_endpoints.requests import ( + VendorEndpointCreate, + VendorEndpointUpdate, +) +from nexla_sdk.models.vendor_endpoints.responses import VendorEndpoint + +__all__ = [ + "VendorEndpoint", + "VendorEndpointCreate", + "VendorEndpointUpdate", +] diff --git a/nexla_sdk/models/vendor_endpoints/requests.py b/nexla_sdk/models/vendor_endpoints/requests.py new file mode 100644 index 0000000..6f88bae --- /dev/null +++ b/nexla_sdk/models/vendor_endpoints/requests.py @@ -0,0 +1,16 @@ +from typing import Any, Dict, Optional + +from nexla_sdk.models.base import BaseModel + + +class VendorEndpointCreate(BaseModel): + name: str + vendor_id: int + resource_type: Optional[str] = None + config: Optional[Dict[str, Any]] = None + + +class VendorEndpointUpdate(BaseModel): + name: Optional[str] = None + resource_type: Optional[str] = None + config: Optional[Dict[str, Any]] = None diff --git a/nexla_sdk/models/vendor_endpoints/responses.py b/nexla_sdk/models/vendor_endpoints/responses.py new file mode 100644 index 0000000..8e1698f --- /dev/null +++ b/nexla_sdk/models/vendor_endpoints/responses.py @@ -0,0 +1,16 @@ +from datetime import datetime +from typing import Any, Dict, Optional + +from nexla_sdk.models.base import BaseModel +from nexla_sdk.models.vendors.responses import VendorRef + + +class VendorEndpoint(BaseModel): + id: int + name: Optional[str] = None + resource_type: Optional[str] = None + config: Optional[Dict[str, Any]] = None + vendor: Optional[VendorRef] = None + vendor_id: Optional[int] = None + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None diff --git a/nexla_sdk/models/vendors/__init__.py b/nexla_sdk/models/vendors/__init__.py new file mode 100644 index 0000000..4176122 --- /dev/null +++ b/nexla_sdk/models/vendors/__init__.py @@ -0,0 +1,11 @@ +"""Vendor models.""" + +from nexla_sdk.models.vendors.requests import VendorCreate, VendorUpdate +from nexla_sdk.models.vendors.responses import Vendor, VendorRef + +__all__ = [ + "Vendor", + "VendorCreate", + "VendorUpdate", + "VendorRef", +] diff --git a/nexla_sdk/models/vendors/requests.py b/nexla_sdk/models/vendors/requests.py new file mode 100644 index 0000000..96f8504 --- /dev/null +++ b/nexla_sdk/models/vendors/requests.py @@ -0,0 +1,28 @@ +"""Vendor request models.""" + +from typing import Any, Dict, Optional + +from nexla_sdk.models.base import BaseModel + + +class VendorCreate(BaseModel): + """Request model for creating a vendor (super user only).""" + + name: str + display_name: Optional[str] = None + description: Optional[str] = None + config: Optional[Dict[str, Any]] = None + small_logo: Optional[str] = None + logo: Optional[str] = None + connector_id: Optional[int] = None + + +class VendorUpdate(BaseModel): + """Request model for updating a vendor (super user only).""" + + name: Optional[str] = None + display_name: Optional[str] = None + description: Optional[str] = None + config: Optional[Dict[str, Any]] = None + small_logo: Optional[str] = None + logo: Optional[str] = None diff --git a/nexla_sdk/models/vendors/responses.py b/nexla_sdk/models/vendors/responses.py new file mode 100644 index 0000000..f4379b7 --- /dev/null +++ b/nexla_sdk/models/vendors/responses.py @@ -0,0 +1,37 @@ +"""Vendor response models.""" + +from datetime import datetime +from typing import Any, Dict, List, Optional + +from pydantic import Field + +from nexla_sdk.models.base import BaseModel + + +class VendorRef(BaseModel): + """Minimal vendor reference.""" + + id: int + name: Optional[str] = None + display_name: Optional[str] = None + + +class Vendor(BaseModel): + """Vendor response model. + + Vendors represent third-party service providers that can be + connected via auth templates and endpoints. + """ + + id: int + name: Optional[str] = None + display_name: Optional[str] = None + description: Optional[str] = None + config: Optional[Dict[str, Any]] = None + small_logo: Optional[str] = None + logo: Optional[str] = None + connection_type: Optional[str] = None # From associated connector + auth_templates: Optional[List[Any]] = Field(default_factory=list) # IDs or objects + vendor_endpoints: Optional[List[Any]] = Field(default_factory=list) # IDs or objects + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None diff --git a/nexla_sdk/py.typed b/nexla_sdk/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/nexla_sdk/raw_operations.py b/nexla_sdk/raw_operations.py new file mode 100644 index 0000000..fdacce2 --- /dev/null +++ b/nexla_sdk/raw_operations.py @@ -0,0 +1,248 @@ +"""Typed operation-level access for the full OpenAPI surface.""" + +from __future__ import annotations + +import re +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Union + +from nexla_sdk.exceptions import ValidationError +from nexla_sdk.generated.operation_map import OPERATION_MAP, OperationId + +if TYPE_CHECKING: + from nexla_sdk.client import NexlaClient + + +_PATH_PARAM_RE = re.compile(r"\{([^}]+)\}") + + +@dataclass(frozen=True) +class OperationDefinition: + """Normalized operation metadata.""" + + operation_id: str + method: str + path: str + tags: List[str] + summary: str + path_params: List[str] + + +class RawOperationsClient: + """Low-level typed access to any OpenAPI operation.""" + + def __init__(self, client: "NexlaClient"): + self._client = client + + def list_operations(self) -> List[str]: + """Return sorted operation ids available in this SDK build.""" + return sorted(OPERATION_MAP.keys()) + + def get_operation( + self, operation_id: Union[OperationId, str] + ) -> OperationDefinition: + """Get metadata for a specific operation id.""" + spec = OPERATION_MAP.get(str(operation_id)) + if spec is None: + raise ValidationError( + f"Unknown operation_id: {operation_id}", + operation="raw_get_operation", + resource_type="operation", + resource_id=str(operation_id), + ) + + return OperationDefinition( + operation_id=str(operation_id), + method=spec["method"], + path=spec["path"], + tags=list(spec["tags"]), + summary=spec["summary"], + path_params=list(spec["path_params"]), + ) + + def call( + self, + operation_id: Union[OperationId, str], + *, + path_params: Optional[Mapping[str, Any]] = None, + query: Optional[Mapping[str, Any]] = None, + body: Any = None, + headers: Optional[Mapping[str, str]] = None, + ) -> Any: + """Call an operation by operation id.""" + operation = self.get_operation(operation_id) + path = self._render_path( + operation.path, operation.path_params, path_params or {} + ) + + kwargs: Dict[str, Any] = {} + if query is not None: + kwargs["params"] = dict(query) + if body is not None: + kwargs["json"] = body + if headers is not None: + kwargs["headers"] = dict(headers) + + return self._client.request(operation.method, path, **kwargs) + + def request( + self, + method: str, + path: str, + *, + query: Optional[Mapping[str, Any]] = None, + body: Any = None, + headers: Optional[Mapping[str, str]] = None, + ) -> Any: + """Send an arbitrary request to support non-spec or backend-only endpoints.""" + kwargs: Dict[str, Any] = {} + if query is not None: + kwargs["params"] = dict(query) + if body is not None: + kwargs["json"] = body + if headers is not None: + kwargs["headers"] = dict(headers) + return self._client.request(method.upper(), path, **kwargs) + + def get( + self, + path: str, + *, + query: Optional[Mapping[str, Any]] = None, + headers: Optional[Mapping[str, str]] = None, + ) -> Any: + kwargs: Dict[str, Any] = {} + if query is not None: + kwargs["params"] = dict(query) + if headers is not None: + kwargs["headers"] = dict(headers) + return self._client.request("GET", path, **kwargs) + + def post( + self, + path: str, + *, + query: Optional[Mapping[str, Any]] = None, + body: Any = None, + headers: Optional[Mapping[str, str]] = None, + ) -> Any: + kwargs: Dict[str, Any] = {} + if query is not None: + kwargs["params"] = dict(query) + if body is not None: + kwargs["json"] = body + if headers is not None: + kwargs["headers"] = dict(headers) + return self._client.request("POST", path, **kwargs) + + def put( + self, + path: str, + *, + query: Optional[Mapping[str, Any]] = None, + body: Any = None, + headers: Optional[Mapping[str, str]] = None, + ) -> Any: + kwargs: Dict[str, Any] = {} + if query is not None: + kwargs["params"] = dict(query) + if body is not None: + kwargs["json"] = body + if headers is not None: + kwargs["headers"] = dict(headers) + return self._client.request("PUT", path, **kwargs) + + def delete( + self, + path: str, + *, + query: Optional[Mapping[str, Any]] = None, + body: Any = None, + headers: Optional[Mapping[str, str]] = None, + ) -> Any: + kwargs: Dict[str, Any] = {} + if query is not None: + kwargs["params"] = dict(query) + if body is not None: + kwargs["json"] = body + if headers is not None: + kwargs["headers"] = dict(headers) + return self._client.request("DELETE", path, **kwargs) + + def patch( + self, + path: str, + *, + query: Optional[Mapping[str, Any]] = None, + body: Any = None, + headers: Optional[Mapping[str, str]] = None, + ) -> Any: + kwargs: Dict[str, Any] = {} + if query is not None: + kwargs["params"] = dict(query) + if body is not None: + kwargs["json"] = body + if headers is not None: + kwargs["headers"] = dict(headers) + return self._client.request("PATCH", path, **kwargs) + + def head( + self, + path: str, + *, + query: Optional[Mapping[str, Any]] = None, + headers: Optional[Mapping[str, str]] = None, + ) -> Any: + kwargs: Dict[str, Any] = {} + if query is not None: + kwargs["params"] = dict(query) + if headers is not None: + kwargs["headers"] = dict(headers) + return self._client.request("HEAD", path, **kwargs) + + def options( + self, + path: str, + *, + query: Optional[Mapping[str, Any]] = None, + headers: Optional[Mapping[str, str]] = None, + ) -> Any: + kwargs: Dict[str, Any] = {} + if query is not None: + kwargs["params"] = dict(query) + if headers is not None: + kwargs["headers"] = dict(headers) + return self._client.request("OPTIONS", path, **kwargs) + + def _render_path( + self, + path_template: str, + required_params: List[str], + provided_params: Mapping[str, Any], + ) -> str: + missing = [param for param in required_params if param not in provided_params] + if missing: + missing_str = ", ".join(missing) + raise ValidationError( + f"Missing required path params: {missing_str}", + operation="raw_call", + resource_type="operation_path_params", + context={"path": path_template, "required": required_params}, + ) + + def _replace(match: re.Match[str]) -> str: + key = match.group(1) + value = provided_params.get(key) + if value is None: + raise ValidationError( + f"Path param '{key}' cannot be None", + operation="raw_call", + resource_type="operation_path_params", + context={"path": path_template, "param": key}, + ) + return str(value) + + rendered = _PATH_PARAM_RE.sub(_replace, path_template) + if not rendered.startswith("/"): + rendered = f"/{rendered}" + return rendered diff --git a/nexla_sdk/resources/__init__.py b/nexla_sdk/resources/__init__.py index effd879..a9be318 100644 --- a/nexla_sdk/resources/__init__.py +++ b/nexla_sdk/resources/__init__.py @@ -1,32 +1,66 @@ +from nexla_sdk.resources.api_keys import ApiKeysResource from nexla_sdk.resources.approval_requests import ApprovalRequestsResource from nexla_sdk.resources.async_tasks import AsyncTasksResource from nexla_sdk.resources.attribute_transforms import AttributeTransformsResource +from nexla_sdk.resources.auth_parameters import AuthParametersResource +from nexla_sdk.resources.auth_templates import AuthTemplatesResource from nexla_sdk.resources.base_resource import BaseResource +from nexla_sdk.resources.catalog_configs import CatalogConfigsResource +from nexla_sdk.resources.cluster_endpoints import ClusterEndpointsResource +from nexla_sdk.resources.clusters import ClustersResource from nexla_sdk.resources.code_containers import CodeContainersResource +from nexla_sdk.resources.connectors import ConnectorsResource from nexla_sdk.resources.credentials import CredentialsResource +from nexla_sdk.resources.custom_data_flows import CustomDataFlowsResource +from nexla_sdk.resources.data_credentials_groups import DataCredentialsGroupsResource +from nexla_sdk.resources.data_flows import DataFlowsResource from nexla_sdk.resources.data_schemas import DataSchemasResource +from nexla_sdk.resources.dashboard_transforms import DashboardTransformsResource from nexla_sdk.resources.destinations import DestinationsResource from nexla_sdk.resources.doc_containers import DocContainersResource +from nexla_sdk.resources.flow_nodes import FlowNodesResource +from nexla_sdk.resources.flow_triggers import FlowTriggersResource from nexla_sdk.resources.flows import FlowsResource +from nexla_sdk.resources.cubejs import CubeJsResource from nexla_sdk.resources.genai import GenAIResource from nexla_sdk.resources.lookups import LookupsResource from nexla_sdk.resources.marketplace import MarketplaceResource from nexla_sdk.resources.metrics import MetricsResource from nexla_sdk.resources.nexsets import NexsetsResource from nexla_sdk.resources.notifications import NotificationsResource +from nexla_sdk.resources.notification_channel_settings import ( + NotificationChannelSettingsResource, +) +from nexla_sdk.resources.notification_settings import NotificationSettingsResource +from nexla_sdk.resources.notification_types import NotificationTypesResource from nexla_sdk.resources.org_auth_configs import OrgAuthConfigsResource +from nexla_sdk.resources.org_tiers import OrgTiersResource from nexla_sdk.resources.organizations import OrganizationsResource from nexla_sdk.resources.projects import ProjectsResource +from nexla_sdk.resources.quarantine_settings import QuarantineSettingsResource +from nexla_sdk.resources.resource_parameters import ResourceParametersResource from nexla_sdk.resources.runtimes import RuntimesResource +from nexla_sdk.resources.search_health import SearchHealthResource from nexla_sdk.resources.self_signup import SelfSignupResource +from nexla_sdk.resources.self_signup_blocked_domains import ( + SelfSignupBlockedDomainsResource, +) +from nexla_sdk.resources.service_keys import ServiceKeysResource from nexla_sdk.resources.sources import SourcesResource from nexla_sdk.resources.teams import TeamsResource +from nexla_sdk.resources.tokens import TokensResource from nexla_sdk.resources.transforms import TransformsResource from nexla_sdk.resources.users import UsersResource +from nexla_sdk.resources.user_settings import UserSettingsResource +from nexla_sdk.resources.user_tiers import UserTiersResource +from nexla_sdk.resources.validators import ValidatorsResource +from nexla_sdk.resources.vendor_endpoints import VendorEndpointsResource +from nexla_sdk.resources.vendors import VendorsResource __all__ = [ "BaseResource", "CredentialsResource", + "CustomDataFlowsResource", "FlowsResource", "SourcesResource", "DestinationsResource", @@ -37,6 +71,7 @@ "TeamsResource", "ProjectsResource", "NotificationsResource", + "NotificationSettingsResource", "MetricsResource", "CodeContainersResource", "TransformsResource", @@ -46,8 +81,38 @@ "RuntimesResource", "MarketplaceResource", "OrgAuthConfigsResource", + "OrgTiersResource", + "AuthParametersResource", + "ResourceParametersResource", + "CatalogConfigsResource", + "VendorEndpointsResource", "GenAIResource", "SelfSignupResource", + "SelfSignupBlockedDomainsResource", "DocContainersResource", "DataSchemasResource", + "DataCredentialsGroupsResource", + "DataFlowsResource", + "FlowNodesResource", + "DashboardTransformsResource", + "NotificationChannelSettingsResource", + "NotificationTypesResource", + "QuarantineSettingsResource", + "UserSettingsResource", + "UserTiersResource", + "TokensResource", + "SearchHealthResource", + "CubeJsResource", + # Phase 1 resources + "ValidatorsResource", + "ServiceKeysResource", + "FlowTriggersResource", + # Phase 3 resources + "ClustersResource", + "ClusterEndpointsResource", + # Phase 4 resources + "ApiKeysResource", + "ConnectorsResource", + "VendorsResource", + "AuthTemplatesResource", ] diff --git a/nexla_sdk/resources/api_keys.py b/nexla_sdk/resources/api_keys.py new file mode 100644 index 0000000..406ed83 --- /dev/null +++ b/nexla_sdk/resources/api_keys.py @@ -0,0 +1,153 @@ +"""Resource for managing API keys (read-only).""" + +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.api_keys.responses import ApiKey, ApiKeysIndex +from nexla_sdk.resources.base_resource import BaseResource + + +class ApiKeysResource(BaseResource): + """Resource for API keys (read-only access). + + API keys provide programmatic access to specific resources like + datasets, data sources, data sinks, and users. + + Note: + This resource only supports ``list()``, ``list_grouped()``, + ``get()``, and ``search()``. Write operations inherited from + ``BaseResource`` (e.g. ``create``, ``update``, ``delete``) are + not supported by the backend and will raise ``NotImplementedError``. + + Examples: + # List all API keys (grouped by type) + all_keys = client.api_keys.list_grouped() + + # Get a specific API key + key = client.api_keys.get(123) + + # Search API keys + keys = client.api_keys.search({"scope": "read"}) + """ + + _NOT_SUPPORTED_MSG = ( + "API keys are read-only. Create, update, and delete operations " + "are not supported. Manage API keys through their parent resources." + ) + + def __init__(self, client): + """Initialize the API keys resource. + + Args: + client: Nexla client instance + """ + super().__init__(client) + self._path = "/api_keys" + self._model_class = ApiKey + + def list( + self, + access_role: Optional[str] = None, + page: Optional[int] = None, + per_page: Optional[int] = None, + **kwargs, + ) -> List[ApiKey]: + """List API keys. + + Args: + access_role: Filter by access role + page: Page number (1-based) + per_page: Items per page + + Returns: + List of API keys + """ + params = kwargs.copy() + if access_role is not None: + params["access_role"] = access_role + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + response = self._make_request("GET", self._path, params=params) + return self._parse_response(response) + + def list_grouped( + self, + page: Optional[int] = None, + per_page: Optional[int] = None, + ) -> ApiKeysIndex: + """List API keys grouped by resource type. + + Returns API keys organized by their resource type: + data_sets, data_sinks, data_sources, and users. + + Args: + page: Page number (1-based) + per_page: Items per page + + Returns: + ApiKeysIndex with grouped API keys + """ + params = {} + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + response = self._make_request("GET", self._path, params=params) + return ApiKeysIndex.model_validate(response) + + def get(self, api_key_id: Union[int, str]) -> ApiKey: + """Get API key by ID or key value. + + Args: + api_key_id: API key ID (int) or api_key value (string) + + Returns: + ApiKey instance + """ + path = f"{self._path}/{api_key_id}" + response = self._make_request("GET", path) + return self._parse_response(response) + + def search( + self, + filters: Dict[str, Any], + page: Optional[int] = None, + per_page: Optional[int] = None, + ) -> List[ApiKey]: + """Search API keys with filters. + + Args: + filters: Search filters + page: Page number (1-based) + per_page: Items per page + + Returns: + List of matching API keys + """ + path = f"{self._path}/search" + params = {} + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + response = self._make_request("POST", path, json=filters, params=params) + return [ApiKey.model_validate(k) for k in response] + + def create(self, data=None): + raise NotImplementedError(self._NOT_SUPPORTED_MSG) + + def update(self, resource_id=None, data=None): + raise NotImplementedError(self._NOT_SUPPORTED_MSG) + + def delete(self, resource_id=None): + raise NotImplementedError(self._NOT_SUPPORTED_MSG) + + def copy(self, resource_id=None, options=None): + raise NotImplementedError(self._NOT_SUPPORTED_MSG) + + def activate(self, resource_id=None): + raise NotImplementedError(self._NOT_SUPPORTED_MSG) + + def pause(self, resource_id=None): + raise NotImplementedError(self._NOT_SUPPORTED_MSG) diff --git a/nexla_sdk/resources/approval_requests.py b/nexla_sdk/resources/approval_requests.py index 92623f1..1993d3b 100644 --- a/nexla_sdk/resources/approval_requests.py +++ b/nexla_sdk/resources/approval_requests.py @@ -17,11 +17,46 @@ def list_pending(self) -> List[ApprovalRequest]: response = self._make_request("GET", path) return self._parse_response(response) + def list_assigned(self) -> List[ApprovalRequest]: + path = f"{self._path}/assigned" + response = self._make_request("GET", path) + return self._parse_response(response) + def list_requested(self) -> List[ApprovalRequest]: path = f"{self._path}/requested" response = self._make_request("GET", path) return self._parse_response(response) + def list_access_requested(self) -> List[ApprovalRequest]: + path = f"{self._path}/access/requested" + response = self._make_request("GET", path) + return self._parse_response(response) + + def list_access_pending(self) -> List[ApprovalRequest]: + path = f"{self._path}/access/pending" + response = self._make_request("GET", path) + return self._parse_response(response) + + def list_listings_requested(self) -> List[ApprovalRequest]: + path = f"{self._path}/listings/requested" + response = self._make_request("GET", path) + return self._parse_response(response) + + def list_listings_pending(self) -> List[ApprovalRequest]: + path = f"{self._path}/listings/pending" + response = self._make_request("GET", path) + return self._parse_response(response) + + def get(self, request_id: int) -> ApprovalRequest: + path = f"{self._path}/{request_id}" + response = self._make_request("GET", path) + return self._parse_response(response) + + def show_data_set(self, data_set_id: int) -> List[ApprovalRequest]: + path = f"{self._path}/data_sets/{data_set_id}" + response = self._make_request("GET", path) + return self._parse_response(response) + def approve(self, request_id: int) -> ApprovalRequest: path = f"{self._path}/{request_id}/approve" response = self._make_request("PUT", path) @@ -30,5 +65,10 @@ def approve(self, request_id: int) -> ApprovalRequest: def reject(self, request_id: int, reason: str = "") -> ApprovalRequest: path = f"{self._path}/{request_id}/reject" body = {"reason": reason} if reason else {} - response = self._make_request("DELETE", path, json=body) + response = self._make_request("PUT", path, json=body) + return self._parse_response(response) + + def cancel(self, request_id: int) -> ApprovalRequest: + path = f"{self._path}/{request_id}/cancel" + response = self._make_request("PUT", path) return self._parse_response(response) diff --git a/nexla_sdk/resources/async_tasks.py b/nexla_sdk/resources/async_tasks.py index 0b171a7..10de2e2 100644 --- a/nexla_sdk/resources/async_tasks.py +++ b/nexla_sdk/resources/async_tasks.py @@ -13,9 +13,9 @@ def __init__(self, client): self._path = "/async_tasks" self._model_class = AsyncTask - def list(self) -> List[AsyncTask]: + def list(self, **params) -> List[AsyncTask]: """List asynchronous tasks.""" - response = self._make_request("GET", self._path) + response = self._make_request("GET", self._path, params=params) return self._parse_response(response) def create(self, payload: AsyncTaskCreate) -> AsyncTask: @@ -24,9 +24,9 @@ def create(self, payload: AsyncTaskCreate) -> AsyncTask: response = self._make_request("POST", self._path, json=serialized) return self._parse_response(response) - def list_of_type(self, task_type: str) -> List[AsyncTask]: + def list_of_type(self, task_type: str, **params) -> List[AsyncTask]: path = f"{self._path}/of_type/{task_type}" - response = self._make_request("GET", path) + response = self._make_request("GET", path, params=params) return self._parse_response(response) def list_by_status(self, status: str) -> List[AsyncTask]: diff --git a/nexla_sdk/resources/attribute_transforms.py b/nexla_sdk/resources/attribute_transforms.py index dc2f0c5..4ded835 100644 --- a/nexla_sdk/resources/attribute_transforms.py +++ b/nexla_sdk/resources/attribute_transforms.py @@ -59,3 +59,13 @@ def list_public(self) -> List[AttributeTransform]: path = f"{self._path}/public" response = self._make_request("GET", path) return self._parse_response(response) + + def copy(self, attribute_transform_id: int) -> AttributeTransform: + """Copy an attribute transform.""" + return super().copy(attribute_transform_id) + + def search(self, filters: Dict[str, Any], **params) -> List[AttributeTransform]: + return super().search(filters, **params) + + def search_tags(self, tags: List[str], **params) -> List[AttributeTransform]: + return super().search_tags(tags, **params) diff --git a/nexla_sdk/resources/auth_parameters.py b/nexla_sdk/resources/auth_parameters.py new file mode 100644 index 0000000..1f1fefd --- /dev/null +++ b/nexla_sdk/resources/auth_parameters.py @@ -0,0 +1,38 @@ +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.auth_parameters.requests import ( + AuthParameterCreate, + AuthParameterUpdate, +) +from nexla_sdk.models.auth_parameters.responses import AuthParameter +from nexla_sdk.resources.base_resource import BaseResource + + +class AuthParametersResource(BaseResource): + """Resource for managing auth parameters.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/auth_parameters" + self._model_class = AuthParameter + + def list(self, **kwargs) -> List[AuthParameter]: + return super().list(**kwargs) + + def get(self, auth_parameter_id: int) -> AuthParameter: + return super().get(auth_parameter_id) + + def create( + self, data: Union[AuthParameterCreate, Dict[str, Any]] + ) -> AuthParameter: + return super().create(data) + + def update( + self, + auth_parameter_id: int, + data: Union[AuthParameterUpdate, Dict[str, Any]], + ) -> AuthParameter: + return super().update(auth_parameter_id, data) + + def delete(self, auth_parameter_id: int) -> Dict[str, Any]: + return super().delete(auth_parameter_id) diff --git a/nexla_sdk/resources/auth_templates.py b/nexla_sdk/resources/auth_templates.py new file mode 100644 index 0000000..02653cf --- /dev/null +++ b/nexla_sdk/resources/auth_templates.py @@ -0,0 +1,170 @@ +"""Resource for managing auth templates.""" + +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.auth_templates.requests import ( + AuthTemplateCreate, + AuthTemplateUpdate, +) +from nexla_sdk.models.auth_templates.responses import AuthTemplate +from nexla_sdk.resources.base_resource import BaseResource + + +class AuthTemplatesResource(BaseResource): + """Resource for managing auth templates. + + Auth templates define authentication configurations for vendors. + Write operations (create, update, delete) require super user access. + + Examples: + # List all auth templates + templates = client.auth_templates.list() + + # Get an auth template by ID + template = client.auth_templates.get(123) + + # Get an auth template by name + template = client.auth_templates.get_by_name("oauth2_standard") + + # Create an auth template (super user only) + template = client.auth_templates.create(AuthTemplateCreate( + name="new_template", + vendor_id=456, + display_name="New Auth Template" + )) + + # Update an auth template (super user only) + template = client.auth_templates.update(123, AuthTemplateUpdate( + description="Updated description" + )) + + # Delete an auth template (super user only) + client.auth_templates.delete(123) + """ + + def __init__(self, client): + """Initialize the auth templates resource. + + Args: + client: Nexla client instance + """ + super().__init__(client) + self._path = "/auth_templates" + self._model_class = AuthTemplate + + def list( + self, + page: Optional[int] = None, + per_page: Optional[int] = None, + access_role: Optional[str] = None, + **kwargs, + ) -> List[AuthTemplate]: + """List auth templates. + + Args: + page: Page number (1-based) + per_page: Items per page + access_role: Filter by access role (owner, collaborator, operator, admin) + + Returns: + List of auth templates + """ + return super().list(page=page, per_page=per_page, access_role=access_role, **kwargs) + + def get(self, auth_template_id: int, expand: bool = False) -> AuthTemplate: + """Get auth template by ID. + + Args: + auth_template_id: Auth template ID + expand: Include expanded references (where supported) + + Returns: + AuthTemplate instance + """ + return super().get(auth_template_id, expand=expand) + + def get_by_name(self, auth_template_name: str) -> AuthTemplate: + """Get auth template by name. + + Args: + auth_template_name: Auth template name + + Returns: + AuthTemplate instance + """ + params = {"auth_template_name": auth_template_name} + response = self._make_request("GET", self._path, params=params) + return self._parse_response(response) + + def create(self, data: Union[AuthTemplateCreate, Dict[str, Any]]) -> AuthTemplate: + """Create a new auth template (super user only). + + Args: + data: Auth template creation data + + Returns: + Created auth template + """ + return super().create(data) + + def update( + self, auth_template_id: int, data: Union[AuthTemplateUpdate, Dict[str, Any]] + ) -> AuthTemplate: + """Update an auth template (super user only). + + Args: + auth_template_id: Auth template ID + data: Updated auth template data + + Returns: + Updated auth template + """ + return super().update(auth_template_id, data) + + def update_by_name( + self, auth_template_name: str, data: Union[AuthTemplateUpdate, Dict[str, Any]] + ) -> AuthTemplate: + """Update an auth template by name (super user only). + + Args: + auth_template_name: Auth template name + data: Updated auth template data + + Returns: + Updated auth template + """ + params = {"auth_template_name": auth_template_name} + serialized_data = self._serialize_data(data) + response = self._make_request("PUT", self._path, json=serialized_data, params=params) + return self._parse_response(response) + + def delete(self, auth_template_id: int) -> Dict[str, Any]: + """Delete an auth template (super user only). + + Args: + auth_template_id: Auth template ID + + Returns: + Response with status + """ + return super().delete(auth_template_id) + + def delete_by_name(self, auth_template_name: str) -> Dict[str, Any]: + """Delete an auth template by name (super user only). + + Args: + auth_template_name: Auth template name + + Returns: + Response with status + """ + params = {"auth_template_name": auth_template_name} + return self._make_request("DELETE", self._path, params=params) + + def update_all(self, payload: Dict[str, Any]) -> Dict[str, Any]: + """Update auth templates via collection endpoint.""" + return self._make_request("PUT", self._path, json=payload) + + def delete_all(self, payload: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + """Delete auth templates via collection endpoint.""" + return self._make_request("DELETE", self._path, json=payload or {}) diff --git a/nexla_sdk/resources/base_resource.py b/nexla_sdk/resources/base_resource.py index a97cd48..6ce7760 100644 --- a/nexla_sdk/resources/base_resource.py +++ b/nexla_sdk/resources/base_resource.py @@ -324,6 +324,244 @@ def copy( response = self._make_request("POST", path, json=serialized_options) return self._parse_response(response) + def list_public( + self, + page: Optional[int] = None, + per_page: Optional[int] = None, + **params, + ) -> List[T]: + """ + List publicly available resources for this type. + + Args: + page: Page number (1-based) + per_page: Items per page + **params: Additional query parameters + + Returns: + List of public resources + """ + path = f"{self._path}/public" + query_params: Dict[str, Any] = {} + if page is not None: + query_params["page"] = page + if per_page is not None: + query_params["per_page"] = per_page + query_params.update(params) + response = self._make_request("GET", path, params=query_params) + return self._parse_response(response) + + def list_accessible(self, **params) -> List[T]: + """ + List resources accessible to the current user (access_insights). + + Args: + **params: Optional query parameters + + Returns: + List of accessible resources + """ + path = f"{self._path}/accessible" + response = self._make_request("GET", path, params=params) + return self._parse_response(response) + + def get_access_insights(self, resource_id: int, **params) -> Dict[str, Any]: + """ + Explain why the current user can access a resource. + + Args: + resource_id: Resource ID + **params: Optional query parameters + + Returns: + Access insights payload + """ + path = f"{self._path}/{resource_id}/access" + return self._make_request("GET", path, params=params) + + def get_users_access_insights(self, resource_id: int, **params) -> Dict[str, Any]: + """ + Get access insights for all users with access to a resource. + + Args: + resource_id: Resource ID + **params: Optional query parameters + + Returns: + Users access insights payload + """ + path = f"{self._path}/{resource_id}/users_access_insights" + return self._make_request("GET", path, params=params) + + def search(self, filters: Dict[str, Any], **params) -> List[T]: + """ + Search resources using filter criteria. + + Args: + filters: Search filters payload + **params: Optional query parameters + + Returns: + List of matching resources + """ + path = f"{self._path}/search" + response = self._make_request("POST", path, json=filters, params=params) + return self._parse_response(response) + + def search_tags(self, tags: List[str], **params) -> List[T]: + """ + Search resources by tags. + + Args: + tags: List of tags to search for + **params: Optional query parameters + + Returns: + List of matching resources + """ + path = f"{self._path}/search_tags" + response = self._make_request("POST", path, json=tags, params=params) + return self._parse_response(response) + + def get_docs(self, resource_id: int) -> Dict[str, Any]: + """ + Get documentation entries for a resource. + + Args: + resource_id: Resource ID + + Returns: + Docs payload + """ + path = f"{self._path}/{resource_id}/docs" + return self._make_request("GET", path) + + def set_docs(self, resource_id: int, docs: List[Dict[str, Any]]) -> Dict[str, Any]: + """ + Replace documentation entries for a resource. + + Args: + resource_id: Resource ID + docs: Docs payload + + Returns: + Updated docs payload + """ + path = f"{self._path}/{resource_id}/docs" + return self._make_request("POST", path, json=docs) + + def add_docs(self, resource_id: int, docs: List[Dict[str, Any]]) -> Dict[str, Any]: + """ + Add documentation entries for a resource. + + Args: + resource_id: Resource ID + docs: Docs payload to add + + Returns: + Updated docs payload + """ + path = f"{self._path}/{resource_id}/docs" + return self._make_request("PUT", path, json=docs) + + def remove_docs( + self, resource_id: int, docs: Optional[List[Dict[str, Any]]] = None + ) -> Dict[str, Any]: + """ + Remove documentation entries for a resource. + + Args: + resource_id: Resource ID + docs: Docs payload to remove (optional) + + Returns: + Updated docs payload + """ + path = f"{self._path}/{resource_id}/docs" + return self._make_request("DELETE", path, json=docs or []) + + def get_referenced_by(self, resource_id: int) -> Dict[str, Any]: + """ + Get resources that reference this resource. + + Args: + resource_id: Resource ID + + Returns: + Referenced-by payload + """ + path = f"{self._path}/{resource_id}/referenced_by" + return self._make_request("GET", path) + + def get_control_event(self, resource_id: int, event: str) -> Dict[str, Any]: + """ + Get control event info for a resource. + + Args: + resource_id: Resource ID + event: Control event name + + Returns: + Control event payload + """ + path = f"{self._path}/{resource_id}/control/{event}" + return self._make_request("GET", path) + + def get_tags(self, resource_id: int) -> List[str]: + """ + Get tags for a resource. + + Args: + resource_id: Resource ID + + Returns: + List of tags + """ + path = f"{self._path}/{resource_id}/tags" + return self._make_request("GET", path) + + def set_tags(self, resource_id: int, tags: List[str]) -> List[str]: + """ + Replace all tags for a resource. + + Args: + resource_id: Resource ID + tags: Tags to set + + Returns: + Updated list of tags + """ + path = f"{self._path}/{resource_id}/tags" + return self._make_request("POST", path, json=tags) + + def add_tags(self, resource_id: int, tags: List[str]) -> List[str]: + """ + Add tags to a resource. + + Args: + resource_id: Resource ID + tags: Tags to add + + Returns: + Updated list of tags + """ + path = f"{self._path}/{resource_id}/tags" + return self._make_request("PUT", path, json=tags) + + def remove_tags(self, resource_id: int, tags: List[str]) -> List[str]: + """ + Remove tags from a resource. + + Args: + resource_id: Resource ID + tags: Tags to remove + + Returns: + Updated list of tags + """ + path = f"{self._path}/{resource_id}/tags" + return self._make_request("DELETE", path, json=tags) + def get_audit_log(self, resource_id: int) -> List[Dict[str, Any]]: """ Get audit log for resource. diff --git a/nexla_sdk/resources/catalog_configs.py b/nexla_sdk/resources/catalog_configs.py new file mode 100644 index 0000000..d1448de --- /dev/null +++ b/nexla_sdk/resources/catalog_configs.py @@ -0,0 +1,51 @@ +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.catalog_configs.requests import ( + CatalogConfigCreate, + CatalogConfigUpdate, +) +from nexla_sdk.models.catalog_configs.responses import CatalogConfig +from nexla_sdk.resources.base_resource import BaseResource + + +class CatalogConfigsResource(BaseResource): + """Resource for managing catalog configs.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/catalog_configs" + self._model_class = CatalogConfig + + def list(self, **kwargs) -> List[CatalogConfig]: + return super().list(**kwargs) + + def list_all(self, **params) -> List[CatalogConfig]: + response = self._make_request("GET", f"{self._path}/all", params=params) + return self._parse_response(response) + + def get(self, catalog_config_id: int) -> CatalogConfig: + return super().get(catalog_config_id) + + def create( + self, data: Union[CatalogConfigCreate, Dict[str, Any]] + ) -> CatalogConfig: + return super().create(data) + + def update( + self, catalog_config_id: int, data: Union[CatalogConfigUpdate, Dict[str, Any]] + ) -> CatalogConfig: + return super().update(catalog_config_id, data) + + def delete(self, catalog_config_id: int) -> Dict[str, Any]: + return super().delete(catalog_config_id) + + def mock_catalog_add(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", "/mock_catalog_add", json=payload) + + def check_job_status(self, catalog_config_id: int) -> Dict[str, Any]: + path = f"{self._path}/{catalog_config_id}/check_job_status" + return self._make_request("GET", path) + + def start_bulk_create_update(self, catalog_config_id: int) -> Dict[str, Any]: + path = f"{self._path}/{catalog_config_id}/bulk_create_update_refs" + return self._make_request("GET", path) diff --git a/nexla_sdk/resources/cluster_endpoints.py b/nexla_sdk/resources/cluster_endpoints.py new file mode 100644 index 0000000..8374647 --- /dev/null +++ b/nexla_sdk/resources/cluster_endpoints.py @@ -0,0 +1,138 @@ +"""Resource for managing cluster endpoints.""" + +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.clusters.requests import ( + ClusterEndpointCreate, + ClusterEndpointUpdate, +) +from nexla_sdk.models.clusters.responses import ClusterEndpoint +from nexla_sdk.resources.base_resource import BaseResource + + +class ClusterEndpointsResource(BaseResource): + """Resource for managing cluster endpoints. + + Cluster endpoints define individual service connections within a cluster. + Most operations require super user access. + + Examples: + # List cluster endpoints + endpoints = client.cluster_endpoints.list() + + # Get a specific endpoint + endpoint = client.cluster_endpoints.get(123) + + # Create an endpoint + endpoint = client.cluster_endpoints.create(ClusterEndpointCreate( + cluster_id=456, + service="data_ingestion", + protocol="https", + host="ingestion.example.com", + port=443 + )) + + # Update an endpoint + endpoint = client.cluster_endpoints.update(123, ClusterEndpointUpdate( + host="new-ingestion.example.com" + )) + + # Get audit log + audit_log = client.cluster_endpoints.get_audit_log(123) + """ + + def __init__(self, client): + """Initialize the cluster endpoints resource. + + Args: + client: Nexla client instance + """ + super().__init__(client) + self._path = "/cluster_endpoints" + self._model_class = ClusterEndpoint + + def list( + self, + page: Optional[int] = None, + per_page: Optional[int] = None, + **kwargs, + ) -> List[ClusterEndpoint]: + """List cluster endpoints. + + Args: + page: Page number (1-based) + per_page: Items per page + + Returns: + List of cluster endpoints + """ + return super().list(page=page, per_page=per_page, **kwargs) + + def get(self, endpoint_id: int) -> ClusterEndpoint: + """Get cluster endpoint by ID. + + Args: + endpoint_id: Endpoint ID + + Returns: + ClusterEndpoint instance + """ + path = f"{self._path}/{endpoint_id}" + response = self._make_request("GET", path) + return self._parse_response(response) + + def create( + self, data: Union[ClusterEndpointCreate, Dict[str, Any]] + ) -> ClusterEndpoint: + """Create a new cluster endpoint. + + Args: + data: Endpoint creation data (requires cluster_id and service) + + Returns: + Created cluster endpoint + """ + serialized_data = self._serialize_data(data) + response = self._make_request("POST", self._path, json=serialized_data) + return self._parse_response(response) + + def update( + self, endpoint_id: int, data: Union[ClusterEndpointUpdate, Dict[str, Any]] + ) -> ClusterEndpoint: + """Update a cluster endpoint. + + Args: + endpoint_id: Endpoint ID + data: Updated endpoint data + + Returns: + Updated cluster endpoint + """ + path = f"{self._path}/{endpoint_id}" + serialized_data = self._serialize_data(data) + response = self._make_request("PUT", path, json=serialized_data) + return self._parse_response(response) + + def get_audit_log( + self, + endpoint_id: int, + page: Optional[int] = None, + per_page: Optional[int] = None, + ) -> List[Dict[str, Any]]: + """Get audit log for a cluster endpoint. + + Args: + endpoint_id: Endpoint ID + page: Page number (1-based) + per_page: Items per page + + Returns: + List of audit log entries + """ + path = f"{self._path}/{endpoint_id}/audit_log" + params = {} + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + return self._make_request("GET", path, params=params) diff --git a/nexla_sdk/resources/clusters.py b/nexla_sdk/resources/clusters.py new file mode 100644 index 0000000..9c343f7 --- /dev/null +++ b/nexla_sdk/resources/clusters.py @@ -0,0 +1,156 @@ +"""Resource for managing clusters.""" + +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.clusters.requests import ( + ClusterCreate, + ClusterUpdate, +) +from nexla_sdk.models.clusters.responses import Cluster +from nexla_sdk.resources.base_resource import BaseResource + + +class ClustersResource(BaseResource): + """Resource for managing clusters. + + Clusters define infrastructure endpoints for processing data flows. + This resource requires super user access for most operations. + + Examples: + # List clusters + clusters = client.clusters.list() + + # Get a specific cluster + cluster = client.clusters.get(123) + + # Create a cluster + cluster = client.clusters.create(ClusterCreate( + org_id=1, + name="Production Cluster", + region="us-west-2", + provider="aws" + )) + + # Activate a cluster + client.clusters.activate(cluster.id) + + # Set as default cluster + client.clusters.set_default(cluster.id) + """ + + def __init__(self, client): + """Initialize the clusters resource. + + Args: + client: Nexla client instance + """ + super().__init__(client) + self._path = "/clusters" + self._model_class = Cluster + + def list( + self, + page: Optional[int] = None, + per_page: Optional[int] = None, + access_role: Optional[str] = None, + **kwargs, + ) -> List[Cluster]: + """List clusters. + + Args: + page: Page number (1-based) + per_page: Items per page + access_role: Filter by access role (owner, collaborator, operator, admin) + + Returns: + List of clusters + """ + return super().list(page=page, per_page=per_page, access_role=access_role, **kwargs) + + def get(self, cluster_id: int, expand: bool = False) -> Cluster: + """Get cluster by ID. + + Args: + cluster_id: Cluster ID + expand: Include expanded references + + Returns: + Cluster instance + """ + return super().get(cluster_id, expand=expand) + + def create(self, data: Union[ClusterCreate, Dict[str, Any]]) -> Cluster: + """Create a new cluster. + + Args: + data: Cluster creation data (requires org_id, name, and region) + + Returns: + Created cluster + """ + return super().create(data) + + def update( + self, cluster_id: int, data: Union[ClusterUpdate, Dict[str, Any]] + ) -> Cluster: + """Update a cluster. + + Args: + cluster_id: Cluster ID + data: Updated cluster data + + Returns: + Updated cluster + """ + return super().update(cluster_id, data) + + def delete(self, cluster_id: int) -> Dict[str, Any]: + """Delete a cluster. + + Args: + cluster_id: Cluster ID + + Returns: + Response with status + """ + return super().delete(cluster_id) + + def activate(self, cluster_id: int) -> Cluster: + """Activate a cluster. + + Args: + cluster_id: Cluster ID + + Returns: + Activated cluster + """ + return super().activate(cluster_id) + + def set_default(self, cluster_id: int) -> Cluster: + """Set a cluster as the default. + + The cluster must be available (active, belongs to Nexla org, + and is not private). + + Args: + cluster_id: Cluster ID + + Returns: + Updated cluster + """ + path = f"{self._path}/default/{cluster_id}" + response = self._make_request("PUT", path) + return self._parse_response(response) + + def delete_endpoint(self, cluster_id: int, endpoint_id: int) -> Dict[str, Any]: + """Delete an endpoint from a cluster. + + Args: + cluster_id: Cluster ID + endpoint_id: Endpoint ID + + Returns: + Response with status + """ + path = f"{self._path}/{cluster_id}/endpoints/{endpoint_id}" + return self._make_request("DELETE", path) diff --git a/nexla_sdk/resources/code_containers.py b/nexla_sdk/resources/code_containers.py index 2fa67af..980a299 100644 --- a/nexla_sdk/resources/code_containers.py +++ b/nexla_sdk/resources/code_containers.py @@ -73,3 +73,19 @@ def list_public(self) -> List[CodeContainer]: path = f"{self._path}/public" response = self._make_request("GET", path) return self._parse_response(response) + + def list_accessible(self, **params) -> List[CodeContainer]: + return super().list_accessible(**params) + + def repo(self, code_container_id: int) -> Dict[str, Any]: + path = f"{self._path}/{code_container_id}/repo" + return self._make_request("GET", path) + + def error_functions(self) -> Dict[str, Any]: + return self._make_request("GET", "/error_functions") + + def search(self, filters: Dict[str, Any], **params) -> List[CodeContainer]: + return super().search(filters, **params) + + def search_tags(self, tags: List[str], **params) -> List[CodeContainer]: + return super().search_tags(tags, **params) diff --git a/nexla_sdk/resources/connectors.py b/nexla_sdk/resources/connectors.py new file mode 100644 index 0000000..b6eb08a --- /dev/null +++ b/nexla_sdk/resources/connectors.py @@ -0,0 +1,137 @@ +"""Resource for managing connectors.""" + +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.connectors.requests import ConnectorUpdate +from nexla_sdk.models.connectors.responses import Connector +from nexla_sdk.resources.base_resource import BaseResource + + +class ConnectorsResource(BaseResource): + """Resource for managing connectors. + + Connectors define connection types for data sources and destinations. + The backend only supports list, get, and update operations. + + Read access requires Nexla admin org membership. + Update operations require super user access. + + Note: + This resource only supports ``list()``, ``get()``, and ``update()``. + Other write operations inherited from ``BaseResource`` (e.g. + ``create``, ``delete``, ``copy``) are not supported by the backend + and will raise ``NotImplementedError``. + + Examples: + # List all connectors + connectors = client.connectors.list() + + # Filter by API compatibility + nexset_connectors = client.connectors.list(nexset_api_compatible=True) + + # Get a specific connector by ID + connector = client.connectors.get(123) + + # Get a connector by type + connector = client.connectors.get("s3") + + # Update a connector (super user only) + connector = client.connectors.update(123, ConnectorUpdate( + description="Updated description" + )) + """ + + _NOT_SUPPORTED_MSG = ( + "Connectors only support list, get, and update operations. " + "This method is not supported by the backend." + ) + + def __init__(self, client): + """Initialize the connectors resource. + + Args: + client: Nexla client instance + """ + super().__init__(client) + self._path = "/connectors" + self._model_class = Connector + + def list( + self, + access_role: Optional[str] = None, + nexset_api_compatible: Optional[bool] = None, + sync_api_compatible: Optional[bool] = None, + page: Optional[int] = None, + per_page: Optional[int] = None, + **kwargs, + ) -> List[Connector]: + """List connectors. + + Args: + access_role: Filter by access role + nexset_api_compatible: Filter by Nexset API compatibility + sync_api_compatible: Filter by Sync API compatibility + page: Page number (1-based) + per_page: Items per page + + Returns: + List of connectors + """ + params = kwargs.copy() + if access_role is not None: + params["access_role"] = access_role + if nexset_api_compatible is not None: + params["nexset_api_compatible"] = str(nexset_api_compatible).lower() + if sync_api_compatible is not None: + params["sync_api_compatible"] = str(sync_api_compatible).lower() + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + response = self._make_request("GET", self._path, params=params) + return self._parse_response(response) + + def get(self, connector_id: Union[int, str]) -> Connector: + """Get connector by ID or type. + + Args: + connector_id: Connector ID (int) or type (string) + + Returns: + Connector instance + """ + path = f"{self._path}/{connector_id}" + response = self._make_request("GET", path) + return self._parse_response(response) + + def update( + self, connector_id: Union[int, str], data: Union[ConnectorUpdate, Dict[str, Any]] + ) -> Connector: + """Update a connector (super user only). + + Args: + connector_id: Connector ID or type + data: Updated connector data + + Returns: + Updated connector + """ + path = f"{self._path}/{connector_id}" + serialized_data = self._serialize_data(data) + response = self._make_request("PUT", path, json=serialized_data) + return self._parse_response(response) + + def create(self, data=None): + raise NotImplementedError(self._NOT_SUPPORTED_MSG) + + def delete(self, resource_id=None): + raise NotImplementedError(self._NOT_SUPPORTED_MSG) + + def copy(self, resource_id=None, options=None): + raise NotImplementedError(self._NOT_SUPPORTED_MSG) + + def activate(self, resource_id=None): + raise NotImplementedError(self._NOT_SUPPORTED_MSG) + + def pause(self, resource_id=None): + raise NotImplementedError(self._NOT_SUPPORTED_MSG) diff --git a/nexla_sdk/resources/credentials.py b/nexla_sdk/resources/credentials.py index 83bb436..e1e4356 100644 --- a/nexla_sdk/resources/credentials.py +++ b/nexla_sdk/resources/credentials.py @@ -56,6 +56,34 @@ def list( return super().list(**params) + def list_all(self, **params) -> List[Credential]: + """List all credentials (admin only).""" + response = self._make_request("GET", f"{self._path}/all", params=params) + return self._parse_response(response) + + def list_public(self, **params) -> List[Credential]: + response = self._make_request("GET", f"{self._path}/public", params=params) + return self._parse_response(response) + + def list_accessible(self, **params) -> List[Credential]: + return super().list_accessible(**params) + + def credentials_schema(self, source_type: Optional[str] = None) -> Dict[str, Any]: + path = f"{self._path}/format" + if source_type: + path = f"{path}/{source_type}" + return self._make_request("GET", path) + + def db_data_types(self, source_type: str) -> Dict[str, Any]: + path = f"{self._path}/db_data_types/{source_type}" + return self._make_request("GET", path) + + def search(self, filters: Dict[str, Any], **params) -> List[Credential]: + return super().search(filters, **params) + + def search_tags(self, tags: List[str], **params) -> List[Credential]: + return super().search_tags(tags, **params) + def get(self, credential_id: int, expand: bool = False) -> Credential: """ Get single credential by ID. @@ -152,6 +180,56 @@ def probe( else: return response + def probe_list_buckets(self, credential_id: int) -> Dict[str, Any]: + path = f"{self._path}/{credential_id}/probe/list" + return self._make_request("GET", path) + + def probe_summary(self, credential_id: int) -> Dict[str, Any]: + path = f"{self._path}/{credential_id}/probe/summary" + return self._make_request("GET", path) + + def probe_list_files( + self, credential_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{credential_id}/probe/buckets" + return self._make_request("POST", path, json=payload) + + def probe_tree_request( + self, credential_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{credential_id}/probe/tree" + return self._make_request("POST", path, json=payload) + + def probe_read_file( + self, credential_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{credential_id}/probe/files" + return self._make_request("POST", path, json=payload) + + def probe_detect_schemas( + self, credential_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{credential_id}/probe/schemas" + return self._make_request("POST", path, json=payload) + + def probe_read_sample( + self, credential_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{credential_id}/probe/sample" + return self._make_request("POST", path, json=payload) + + def probe_file_download( + self, credential_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{credential_id}/probe/file/download" + return self._make_request("POST", path, json=payload) + + def probe_search_path( + self, credential_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{credential_id}/probe/search_path" + return self._make_request("POST", path, json=payload) + def probe_tree( self, credential_id: int, @@ -207,3 +285,23 @@ def probe_sample( "POST", path, json=request.to_dict(), params=params ) return ProbeSampleResponse(**response) + + def refresh(self, credential_id: int) -> Dict[str, Any]: + path = f"{self._path}/{credential_id}/refresh" + return self._make_request("PUT", path) + + def usage(self, credential_id: int) -> Dict[str, Any]: + path = f"{self._path}/{credential_id}/usage" + return self._make_request("GET", path) + + def intent( + self, credential_id: int, payload: Dict[str, Any], method: str = "PUT" + ) -> Dict[str, Any]: + path = f"{self._path}/{credential_id}/intent" + return self._make_request(method.upper(), path, json=payload) + + def migrate_iceberg( + self, credential_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{credential_id}/migrate/iceberg" + return self._make_request("POST", path, json=payload) diff --git a/nexla_sdk/resources/cubejs.py b/nexla_sdk/resources/cubejs.py new file mode 100644 index 0000000..414cc4a --- /dev/null +++ b/nexla_sdk/resources/cubejs.py @@ -0,0 +1,15 @@ +from typing import Any, Dict + +from nexla_sdk.resources.base_resource import BaseResource + + +class CubeJsResource(BaseResource): + """Resource for Cube.js query endpoint.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/cubejs" + self._model_class = None + + def query(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", f"{self._path}/query", json=payload) diff --git a/nexla_sdk/resources/custom_data_flows.py b/nexla_sdk/resources/custom_data_flows.py new file mode 100644 index 0000000..d61a379 --- /dev/null +++ b/nexla_sdk/resources/custom_data_flows.py @@ -0,0 +1,76 @@ +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.custom_data_flows.requests import ( + CustomDataFlowCreate, + CustomDataFlowUpdate, +) +from nexla_sdk.models.custom_data_flows.responses import CustomDataFlow +from nexla_sdk.resources.base_resource import BaseResource + + +class CustomDataFlowsResource(BaseResource): + """Resource for managing custom data flows.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/custom_data_flows" + self._model_class = CustomDataFlow + + def list(self, **kwargs) -> List[CustomDataFlow]: + return super().list(**kwargs) + + def list_accessible(self, **params) -> List[CustomDataFlow]: + return super().list_accessible(**params) + + def get(self, custom_data_flow_id: int, expand: bool = False) -> CustomDataFlow: + return super().get(custom_data_flow_id, expand) + + def create( + self, data: Union[CustomDataFlowCreate, Dict[str, Any]] + ) -> CustomDataFlow: + return super().create(data) + + def update( + self, + custom_data_flow_id: int, + data: Union[CustomDataFlowUpdate, Dict[str, Any]], + ) -> CustomDataFlow: + return super().update(custom_data_flow_id, data) + + def delete(self, custom_data_flow_id: int) -> Dict[str, Any]: + return super().delete(custom_data_flow_id) + + def copy(self, custom_data_flow_id: int, payload: Optional[Dict[str, Any]] = None) -> CustomDataFlow: + return super().copy(custom_data_flow_id, payload) + + def activate(self, custom_data_flow_id: int, activate: bool = True) -> CustomDataFlow: + action = "activate" if activate else "pause" + path = f"{self._path}/{custom_data_flow_id}/{action}" + response = self._make_request("PUT", path) + return self._parse_response(response) + + def get_metrics(self, custom_data_flow_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{custom_data_flow_id}/metrics" + return self._make_request("GET", path, params=params) + + def edit_code_containers( + self, custom_data_flow_id: int, payload: Dict[str, Any], mode: str + ) -> Dict[str, Any]: + path = f"{self._path}/{custom_data_flow_id}/code_containers" + method_map = {"list": "GET", "reset": "POST", "add": "PUT", "remove": "DELETE"} + method = method_map.get(mode, "GET") + return self._make_request(method, path, json=payload) + + def edit_data_credentials( + self, custom_data_flow_id: int, payload: Dict[str, Any], mode: str + ) -> Dict[str, Any]: + path = f"{self._path}/{custom_data_flow_id}/data_credentials" + method_map = {"list": "GET", "reset": "POST", "add": "PUT", "remove": "DELETE"} + method = method_map.get(mode, "GET") + return self._make_request(method, path, json=payload) + + def search(self, filters: Dict[str, Any], **params) -> List[CustomDataFlow]: + return super().search(filters, **params) + + def search_tags(self, tags: List[str], **params) -> List[CustomDataFlow]: + return super().search_tags(tags, **params) diff --git a/nexla_sdk/resources/dashboard_transforms.py b/nexla_sdk/resources/dashboard_transforms.py new file mode 100644 index 0000000..a852a64 --- /dev/null +++ b/nexla_sdk/resources/dashboard_transforms.py @@ -0,0 +1,42 @@ +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.dashboard_transforms.requests import ( + DashboardTransformCreate, + DashboardTransformUpdate, +) +from nexla_sdk.models.dashboard_transforms.responses import DashboardTransform +from nexla_sdk.resources.base_resource import BaseResource + + +class DashboardTransformsResource(BaseResource): + """Resource for dashboard transforms (global endpoints).""" + + def __init__(self, client): + super().__init__(client) + self._path = "/dashboard_transforms" + self._model_class = DashboardTransform + + def list(self, **kwargs) -> List[DashboardTransform]: + return super().list(**kwargs) + + def list_all(self, **params) -> List[DashboardTransform]: + response = self._make_request("GET", f"{self._path}/all", params=params) + return self._parse_response(response) + + def get(self, dashboard_transform_id: int) -> DashboardTransform: + return super().get(dashboard_transform_id) + + def create( + self, data: Union[DashboardTransformCreate, Dict[str, Any]] + ) -> DashboardTransform: + return super().create(data) + + def update( + self, + dashboard_transform_id: int, + data: Union[DashboardTransformUpdate, Dict[str, Any]], + ) -> DashboardTransform: + return super().update(dashboard_transform_id, data) + + def delete(self, dashboard_transform_id: int) -> Dict[str, Any]: + return super().delete(dashboard_transform_id) diff --git a/nexla_sdk/resources/data_credentials_groups.py b/nexla_sdk/resources/data_credentials_groups.py new file mode 100644 index 0000000..52bb67b --- /dev/null +++ b/nexla_sdk/resources/data_credentials_groups.py @@ -0,0 +1,54 @@ +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.data_credentials_groups.requests import ( + DataCredentialsGroupCreate, + DataCredentialsGroupRemoveCredentials, + DataCredentialsGroupUpdate, +) +from nexla_sdk.models.data_credentials_groups.responses import DataCredentialsGroup +from nexla_sdk.models.credentials.responses import Credential +from nexla_sdk.resources.base_resource import BaseResource + + +class DataCredentialsGroupsResource(BaseResource): + """Resource for managing data credentials groups.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/data_credentials_groups" + self._model_class = DataCredentialsGroup + + def list(self, **kwargs) -> List[DataCredentialsGroup]: + return super().list(**kwargs) + + def get(self, group_id: int, expand: bool = False) -> DataCredentialsGroup: + return super().get(group_id, expand) + + def create( + self, data: Union[DataCredentialsGroupCreate, Dict[str, Any]] + ) -> DataCredentialsGroup: + return super().create(data) + + def update( + self, + group_id: int, + data: Union[DataCredentialsGroupUpdate, Dict[str, Any]], + ) -> DataCredentialsGroup: + return super().update(group_id, data) + + def delete(self, group_id: int) -> Dict[str, Any]: + return super().delete(group_id) + + def list_credentials(self, group_id: int, **params) -> List[Credential]: + path = f"{self._path}/{group_id}/data_credentials" + response = self._make_request("GET", path, params=params) + return [Credential.model_validate(item) for item in (response or [])] + + def remove_credentials( + self, + group_id: int, + payload: Union[DataCredentialsGroupRemoveCredentials, Dict[str, Any]], + ) -> Dict[str, Any]: + path = f"{self._path}/{group_id}/data_credentials" + data = self._serialize_data(payload) + return self._make_request("DELETE", path, json=data) diff --git a/nexla_sdk/resources/data_flows.py b/nexla_sdk/resources/data_flows.py new file mode 100644 index 0000000..f40e918 --- /dev/null +++ b/nexla_sdk/resources/data_flows.py @@ -0,0 +1,60 @@ +from typing import Any, Dict, List, Optional + +from nexla_sdk.resources.base_resource import BaseResource + + +class DataFlowsResource(BaseResource): + """Resource for legacy data_flows endpoints.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/data_flows" + self._model_class = None + + def list(self, **params) -> List[Dict[str, Any]]: + response = self._make_request("GET", self._path, params=params) + return response or [] + + def get(self, data_flow_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{data_flow_id}" + return self._make_request("GET", path, params=params) + + def create(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", self._path, json=payload) + + def get_audit_log(self, data_flow_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{data_flow_id}/audit_log" + return self._make_request("GET", path, params=params) + + def list_data_source_flows(self, **params) -> List[Dict[str, Any]]: + return self._make_request("GET", f"{self._path}/data_source", params=params) or [] + + def get_data_source_flow(self, data_source_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/data_source/{data_source_id}" + return self._make_request("GET", path, params=params) + + def create_data_source_flow(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", f"{self._path}/data_source", json=payload) + + def list_data_sink_flows(self, **params) -> List[Dict[str, Any]]: + return self._make_request("GET", f"{self._path}/data_sink", params=params) or [] + + def get_data_sink_flow(self, data_sink_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/data_sink/{data_sink_id}" + return self._make_request("GET", path, params=params) + + def create_data_sink_flow(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", f"{self._path}/data_sink", json=payload) + + def get_flow_audit_log(self, flow_node_id: int) -> Dict[str, Any]: + return self._make_request("GET", f"/flows/{flow_node_id}/audit_log") + + def get_data_source_audit_log(self, data_source_id: int) -> Dict[str, Any]: + return self._make_request( + "GET", f"{self._path}/data_source/{data_source_id}/audit_log" + ) + + def get_data_sink_audit_log(self, data_sink_id: int) -> Dict[str, Any]: + return self._make_request( + "GET", f"{self._path}/data_sink/{data_sink_id}/audit_log" + ) diff --git a/nexla_sdk/resources/data_schemas.py b/nexla_sdk/resources/data_schemas.py index efdf54f..81506c4 100644 --- a/nexla_sdk/resources/data_schemas.py +++ b/nexla_sdk/resources/data_schemas.py @@ -1,16 +1,60 @@ -from typing import List +from typing import Any, Dict, List, Optional from nexla_sdk.models.common import LogEntry +from nexla_sdk.models.data_schemas.responses import DataSchema from nexla_sdk.resources.base_resource import BaseResource class DataSchemasResource(BaseResource): - """Resource for data schemas (accessors + audit log only).""" + """Resource for data schemas.""" def __init__(self, client): super().__init__(client) self._path = "/data_schemas" - self._model_class = None + self._model_class = DataSchema + + def list(self, **kwargs) -> List[DataSchema]: + return super().list(**kwargs) + + def list_all(self, **params) -> List[DataSchema]: + response = self._make_request("GET", f"{self._path}/all", params=params) + return self._parse_response(response) + + def list_public(self, **params) -> List[DataSchema]: + response = self._make_request("GET", f"{self._path}/public", params=params) + return self._parse_response(response) + + def list_accessible(self, **params) -> List[DataSchema]: + return super().list_accessible(**params) + + def get(self, schema_id: int, expand: bool = False) -> DataSchema: + return super().get(schema_id, expand) + + def create(self, data: Dict[str, Any]) -> DataSchema: + return super().create(data) + + def update(self, schema_id: int, data: Dict[str, Any]) -> DataSchema: + return super().update(schema_id, data) + + def delete(self, schema_id: int) -> Dict[str, Any]: + return super().delete(schema_id) + + def get_metrics( + self, schema_id: int, metrics_name: Optional[str] = None, **params + ) -> Dict[str, Any]: + path = f"{self._path}/{schema_id}/metrics" + if metrics_name: + path = f"{path}/{metrics_name}" + return self._make_request("GET", path, params=params) + + def search(self, filters: Dict[str, Any], **params) -> List[DataSchema]: + return super().search(filters, **params) + + def search_tags(self, tags: List[str], **params) -> List[DataSchema]: + return super().search_tags(tags, **params) + + def copy(self, schema_id: int, payload: Optional[Dict[str, Any]] = None) -> DataSchema: + return super().copy(schema_id, payload) def get_audit_log(self, schema_id: int, **params) -> List[LogEntry]: path = f"{self._path}/{schema_id}/audit_log" diff --git a/nexla_sdk/resources/destinations.py b/nexla_sdk/resources/destinations.py index 35b8da9..50d8c80 100644 --- a/nexla_sdk/resources/destinations.py +++ b/nexla_sdk/resources/destinations.py @@ -35,6 +35,36 @@ def list(self, **kwargs) -> List[Destination]: """ return super().list(**kwargs) + def search(self, filters: Dict[str, Any], **params) -> List[Destination]: + return super().search(filters, **params) + + def search_tags(self, tags: List[str], **params) -> List[Destination]: + return super().search_tags(tags, **params) + + def list_all(self, **params) -> List[Destination]: + response = self._make_request("GET", f"{self._path}/all", params=params) + return self._parse_response(response) + + def list_all_condensed(self, **params) -> List[Dict[str, Any]]: + return self._make_request("GET", f"{self._path}/all/condensed", params=params) + + def list_all_ids(self, **params) -> List[int]: + return self._make_request("GET", f"{self._path}/all/ids", params=params) + + def list_all_by_data_set(self, **params) -> List[Destination]: + response = self._make_request("GET", f"{self._path}/all/data_set", params=params) + return self._parse_response(response) + + def list_accessible(self, **params) -> List[Destination]: + return super().list_accessible(**params) + + def script_sink_config(self) -> Dict[str, Any]: + return self._make_request("GET", f"{self._path}/script_sink_config") + + def update_runtime_status(self, sink_id: int, status: str) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/runtime_status/{status}" + return self._make_request("PUT", path) + def get(self, sink_id: int, expand: bool = False) -> Destination: """ Get single destination by ID. @@ -91,6 +121,53 @@ def delete(self, sink_id: int) -> Dict[str, Any]: """ return super().delete(sink_id) + def get_flow(self, sink_id: int) -> Dict[str, Any]: + return self._make_request("GET", f"{self._path}/{sink_id}/flow") + + def get_flow_dashboard(self, sink_id: int, **params) -> Dict[str, Any]: + return self._make_request( + "GET", f"{self._path}/{sink_id}/flow/dashboard", params=params + ) + + def get_flow_status_metrics(self, sink_id: int, **params) -> Dict[str, Any]: + return self._make_request( + "GET", f"{self._path}/{sink_id}/flow/status_metrics", params=params + ) + + def get_flow_metrics(self, sink_id: int, **params) -> Dict[str, Any]: + return self._make_request( + "GET", f"{self._path}/{sink_id}/flow/metrics", params=params + ) + + def get_flow_logs(self, sink_id: int, **params) -> Dict[str, Any]: + return self._make_request( + "GET", f"{self._path}/{sink_id}/flow/logs", params=params + ) + + def get_metrics( + self, sink_id: int, metrics_name: Optional[str] = None, **params + ) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/metrics" + if metrics_name: + path = f"{path}/{metrics_name}" + return self._make_request("GET", path, params=params) + + def get_quarantine_offset( + self, sink_id: int, data_set_id: Optional[int] = None + ) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/quarantine/offset" + if data_set_id: + path = f"{path}/{data_set_id}" + return self._make_request("GET", path) + + def get_offset( + self, sink_id: int, data_set_id: Optional[int] = None + ) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/offset" + if data_set_id: + path = f"{path}/{data_set_id}" + return self._make_request("GET", path) + def activate(self, sink_id: int) -> Destination: """ Activate destination. @@ -130,3 +207,150 @@ def copy( """ data = options.to_dict() if options else {} return super().copy(sink_id, data) + + def probe_list_buckets(self, sink_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/probe" + return self._make_request("GET", path) + + def probe_summary(self, sink_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/probe/summary" + return self._make_request("GET", path) + + def probe_authenticate(self, sink_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/probe/authenticate" + return self._make_request("GET", path) + + def probe_list_files(self, sink_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/probe/buckets" + return self._make_request("POST", path, json=payload) + + def probe_tree(self, sink_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/probe/tree" + return self._make_request("POST", path, json=payload) + + def probe_read_file(self, sink_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/probe/files" + return self._make_request("POST", path, json=payload) + + def probe_detect_schemas(self, sink_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/probe/schemas" + return self._make_request("POST", path, json=payload) + + def probe_quarantine_sample(self, sink_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/probe/quarantine/sample" + return self._make_request("POST", path, json=payload) + + def get_quarantine_settings(self, sink_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/quarantine_settings" + return self._make_request("GET", path) + + def create_quarantine_settings(self, sink_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/quarantine_settings" + return self._make_request("POST", path, json=payload) + + def update_quarantine_settings(self, sink_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/quarantine_settings" + return self._make_request("PUT", path, json=payload) + + def delete_quarantine_settings(self, sink_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/quarantine_settings" + return self._make_request("DELETE", path) + + def get_dashboard_transforms(self, sink_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/dashboard_transforms" + return self._make_request("GET", path) + + def create_dashboard_transforms(self, sink_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/dashboard_transforms" + return self._make_request("POST", path, json=payload) + + def update_dashboard_transforms(self, sink_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/dashboard_transforms" + return self._make_request("PUT", path, json=payload) + + def delete_dashboard_transforms(self, sink_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/dashboard_transforms" + return self._make_request("DELETE", path) + + def validate_config(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", f"{self._path}/config/validate", json=payload) + + def test_config(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", f"{self._path}/test_config", json=payload) + + def run_status(self, sink_id: int, run_id: Optional[int] = None) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/run_status" + if run_id is not None: + path = f"{path}/{run_id}" + return self._make_request("GET", path) + + def run_analysis(self, sink_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/run_analysis" + return self._make_request("GET", path, params=params) + + def list_flow_triggers(self, sink_id: int) -> List[Dict[str, Any]]: + path = f"{self._path}/{sink_id}/flow_triggers" + return self._make_request("GET", path) or [] + + def edit_flow_triggers( + self, sink_id: int, payload: Dict[str, Any], mode: str, all_triggers: bool = False + ) -> List[Dict[str, Any]]: + path = f"{self._path}/{sink_id}/flow_triggers" + if mode in {"pause", "activate"}: + path = f"{path}/{mode}" + if all_triggers: + path = f"{path}/all" + method = "PUT" if mode in {"add", "pause", "activate"} else "POST" + if mode == "remove": + method = "DELETE" + return self._make_request(method, path, json=payload) or [] + + def list_api_keys(self, sink_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/api_keys" + return self._make_request("GET", path, params=params) + + def search_api_keys(self, sink_id: int, filters: Dict[str, Any], **params) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/api_keys/search" + return self._make_request("POST", path, json=filters, params=params) + + def get_api_key(self, sink_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/api_keys/{api_key_id}" + return self._make_request("GET", path) + + def create_api_key(self, sink_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/api_keys" + return self._make_request("POST", path, json=payload) + + def update_api_key(self, sink_id: int, api_key_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/api_keys/{api_key_id}" + return self._make_request("PUT", path, json=payload) + + def rotate_api_key(self, sink_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/api_keys/{api_key_id}/rotate" + return self._make_request("PUT", path) + + def activate_api_key(self, sink_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/api_keys/{api_key_id}/activate" + return self._make_request("PUT", path) + + def pause_api_key(self, sink_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/api_keys/{api_key_id}/pause" + return self._make_request("PUT", path) + + def pause_all_api_keys(self, sink_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/api_keys/pause" + return self._make_request("PUT", path) + + def delete_api_key(self, sink_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/api_keys/{api_key_id}" + return self._make_request("DELETE", path) + + def trigger_quarantine_aggregation( + self, sink_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/trigger_quarantine_aggregation" + return self._make_request("POST", path, json=payload) + + def get_quarantine_aggregation(self, sink_id: int) -> Dict[str, Any]: + path = f"{self._path}/{sink_id}/quarantine_aggregation" + return self._make_request("GET", path) diff --git a/nexla_sdk/resources/doc_containers.py b/nexla_sdk/resources/doc_containers.py index ff2df5d..343dd0f 100644 --- a/nexla_sdk/resources/doc_containers.py +++ b/nexla_sdk/resources/doc_containers.py @@ -1,16 +1,41 @@ -from typing import List +from typing import Any, Dict, List, Optional from nexla_sdk.models.common import LogEntry +from nexla_sdk.models.doc_containers.responses import DocContainer from nexla_sdk.resources.base_resource import BaseResource class DocContainersResource(BaseResource): - """Resource for document containers accessors and audit logs.""" + """Resource for document containers.""" def __init__(self, client): super().__init__(client) self._path = "/doc_containers" - self._model_class = None + self._model_class = DocContainer + + def list(self, **kwargs) -> List[DocContainer]: + return super().list(**kwargs) + + def get(self, doc_container_id: int, expand: bool = False) -> DocContainer: + return super().get(doc_container_id, expand) + + def create(self, data: Dict[str, Any]) -> DocContainer: + return super().create(data) + + def update(self, doc_container_id: int, data: Dict[str, Any]) -> DocContainer: + return super().update(doc_container_id, data) + + def delete(self, doc_container_id: int) -> Dict[str, Any]: + return super().delete(doc_container_id) + + def copy(self, doc_container_id: int, payload: Optional[Dict[str, Any]] = None) -> DocContainer: + return super().copy(doc_container_id, payload) + + def search(self, filters: Dict[str, Any], **params) -> List[DocContainer]: + return super().search(filters, **params) + + def search_tags(self, tags: List[str], **params) -> List[DocContainer]: + return super().search_tags(tags, **params) def get_audit_log(self, doc_container_id: int, **params) -> List[LogEntry]: path = f"{self._path}/{doc_container_id}/audit_log" diff --git a/nexla_sdk/resources/flow_nodes.py b/nexla_sdk/resources/flow_nodes.py new file mode 100644 index 0000000..ed15c6d --- /dev/null +++ b/nexla_sdk/resources/flow_nodes.py @@ -0,0 +1,30 @@ +from typing import Any, Dict, List, Optional + +from nexla_sdk.models.common import FlowNode +from nexla_sdk.resources.base_resource import BaseResource + + +class FlowNodesResource(BaseResource): + """Resource for flow nodes.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/flow_nodes" + self._model_class = FlowNode + + def get(self, flow_node_id: int) -> FlowNode: + return super().get(flow_node_id) + + def update(self, flow_node_id: int, data: Dict[str, Any]) -> FlowNode: + return super().update(flow_node_id, data) + + def list_origin_nodes_condensed(self, **params) -> Dict[str, Any]: + return self._make_request("GET", "/flows/all/condensed", params=params) + + def list_flows_minimal(self, **params) -> Dict[str, Any]: + return self._make_request("GET", "/flows/all/minimal", params=params) + + def get_access_insights(self, flow_node_id: int, **params) -> Dict[str, Any]: + return self._make_request( + "GET", f"/flows/{flow_node_id}/access", params=params + ) diff --git a/nexla_sdk/resources/flow_triggers.py b/nexla_sdk/resources/flow_triggers.py new file mode 100644 index 0000000..0b4e61e --- /dev/null +++ b/nexla_sdk/resources/flow_triggers.py @@ -0,0 +1,164 @@ +"""Resource for managing flow triggers.""" + +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.flow_triggers.requests import FlowTriggerCreate +from nexla_sdk.models.flow_triggers.responses import FlowTrigger +from nexla_sdk.resources.base_resource import BaseResource + + +class FlowTriggersResource(BaseResource): + """Resource for managing flow triggers (orchestration events). + + Flow triggers define when one flow should trigger based on events + from another flow. They are immutable once created - to change a + trigger, delete and recreate it. + + Examples: + # List flow triggers + triggers = client.flow_triggers.list() + + # List all triggers (super user only) + all_triggers = client.flow_triggers.list_all() + + # Create a trigger: start data source when sink completes + trigger = client.flow_triggers.create(FlowTriggerCreate( + triggering_event_type="DATA_SINK_WRITE_DONE", + triggered_event_type="DATA_SOURCE_READ_START", + data_sink_id=123, # Triggering sink + data_source_id=456 # Triggered source + )) + + # Pause a trigger + client.flow_triggers.pause(trigger.id) + + # Activate a trigger + client.flow_triggers.activate(trigger.id) + + # Delete a trigger + client.flow_triggers.delete(trigger.id) + """ + + def __init__(self, client): + """Initialize the flow triggers resource. + + Args: + client: Nexla client instance + """ + super().__init__(client) + self._path = "/flow_triggers" + self._model_class = FlowTrigger + + def list( + self, + page: Optional[int] = None, + per_page: Optional[int] = None, + access_role: Optional[str] = None, + **kwargs, + ) -> List[FlowTrigger]: + """List flow triggers accessible by current user. + + Args: + page: Page number (1-based) + per_page: Items per page + access_role: Filter by access role (owner, collaborator, operator, admin) + + Returns: + List of flow triggers + """ + return super().list(page=page, per_page=per_page, access_role=access_role, **kwargs) + + def list_all( + self, + page: Optional[int] = None, + per_page: Optional[int] = None, + ) -> List[FlowTrigger]: + """List all flow triggers (super user only). + + Args: + page: Page number (1-based) + per_page: Items per page (max: 100) + + Returns: + List of all flow triggers + + Raises: + AuthorizationError: If user is not a super user + """ + path = f"{self._path}/all" + params = {} + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + response = self._make_request("GET", path, params=params) + return self._parse_response(response) + + def get(self, trigger_id: int, expand: bool = False) -> FlowTrigger: + """Get flow trigger by ID. + + Args: + trigger_id: Flow trigger ID + expand: Include expanded references (where supported) + + Returns: + FlowTrigger instance + """ + return super().get(trigger_id, expand=expand) + + def create(self, data: Union[FlowTriggerCreate, Dict[str, Any]]) -> FlowTrigger: + """Create a new flow trigger. + + Note: Flow triggers are immutable - update is not supported. + To change a trigger, delete and recreate it. + + Args: + data: Flow trigger creation data + + Returns: + Created flow trigger + + Raises: + ValidationError: If trigger would create a cycle or duplicate + """ + return super().create(data) + + def delete(self, trigger_id: int) -> Dict[str, Any]: + """Delete a flow trigger. + + Args: + trigger_id: Flow trigger ID + + Returns: + Response with status + """ + return super().delete(trigger_id) + + def activate(self, trigger_id: int) -> FlowTrigger: + """Activate a flow trigger. + + Args: + trigger_id: Flow trigger ID + + Returns: + Activated flow trigger + """ + return super().activate(trigger_id) + + def pause(self, trigger_id: int) -> FlowTrigger: + """Pause a flow trigger. + + Args: + trigger_id: Flow trigger ID + + Returns: + Paused flow trigger + """ + return super().pause(trigger_id) + + def update(self, resource_id=None, data=None): + """Flow triggers are immutable. To change a trigger, delete and recreate it.""" + raise NotImplementedError( + "Flow triggers are immutable once created. " + "To change a trigger, delete it and create a new one." + ) diff --git a/nexla_sdk/resources/flows.py b/nexla_sdk/resources/flows.py index 4c6291d..c5cbc7e 100644 --- a/nexla_sdk/resources/flows.py +++ b/nexla_sdk/resources/flows.py @@ -252,6 +252,132 @@ def pause_by_resource( response = self._make_request("PUT", path, params=params) return self._parse_response(response) + def update_by_resource(self, resource_type: str, resource_id: int, payload: Dict[str, Any]) -> FlowResponse: + path = f"/{resource_type}/{resource_id}/flow" + response = self._make_request("PUT", path, json=payload) + return self._parse_response(response) + + def copy_by_resource(self, resource_type: str, resource_id: int, payload: Optional[Dict[str, Any]] = None) -> FlowResponse: + path = f"/{resource_type}/{resource_id}/flow/copy" + response = self._make_request("POST", path, json=payload or {}) + return self._parse_response(response) + + def accessors_by_resource( + self, resource_type: str, resource_id: int, mode: str = "list", payload: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + path = f"/{resource_type}/{resource_id}/flow/accessors" + method_map = {"list": "GET", "reset": "POST", "add": "PUT", "remove": "DELETE"} + method = method_map.get(mode, "GET") + return self._make_request(method, path, json=payload or {}) + + def docs_by_resource( + self, resource_type: str, resource_id: int, mode: str = "list", payload: Optional[List[Dict[str, Any]]] = None + ) -> Dict[str, Any]: + path = f"/{resource_type}/{resource_id}/flow/docs" + method_map = {"list": "GET", "reset": "POST", "add": "PUT", "remove": "DELETE"} + method = method_map.get(mode, "GET") + return self._make_request(method, path, json=payload or []) + + def run_status_by_resource(self, resource_type: str, resource_id: int) -> Dict[str, Any]: + path = f"/{resource_type}/{resource_id}/flow/run_status" + return self._make_request("GET", path) + + def run_profiles_activate(self, flow_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/run_profiles/activate" + return self._make_request("POST", path, json=payload) + + def run_now(self, flow_id: int, method: str = "POST") -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/run_now" + return self._make_request(method.upper(), path) + + def flow_logs(self, flow_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/logs" + return self._make_request("GET", path, params=params) + + def flow_logs_v2(self, flow_id: int, payload: Dict[str, Any], **params) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/logs_v2" + return self._make_request("POST", path, json=payload, params=params) + + def flow_metrics(self, flow_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/metrics" + return self._make_request("GET", path, params=params) + + def list_linked_flows(self, flow_id: int) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/links" + return self._make_request("GET", path) + + def create_linked_flows(self, flow_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/links" + return self._make_request("POST", path, json=payload) + + def update_linked_flows(self, flow_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/links" + return self._make_request("PUT", path, json=payload) + + def delete_linked_flows(self, flow_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/links" + return self._make_request("DELETE", path, json=payload) + + def delete_all_linked_flows(self, flow_id: int) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/links/all" + return self._make_request("DELETE", path) + + def insert_flow_node(self, flow_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/insert_flow_node" + return self._make_request("POST", path, json=payload) + + def remove_flow_node(self, flow_id: int, payload: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/remove_flow_node" + return self._make_request("POST", path, json=payload or {}) + + def update_samples(self, flow_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/samples" + return self._make_request("PUT", path, json=payload) + + def publish_rag(self, flow_id: int) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/rag/publish" + return self._make_request("PUT", path) + + def update_archival_status(self, flow_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/archival/status" + return self._make_request("POST", path, json=payload) + + def restore_archival(self, flow_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/archival/restore" + return self._make_request("POST", path, json=payload) + + def run_status(self, flow_id: int, run_id: Optional[int] = None) -> Dict[str, Any]: + path = f"{self._path}/{flow_id}/run_status" + if run_id is not None: + path = f"{path}/{run_id}" + return self._make_request("GET", path) + + def search(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", f"{self._path}/search", json=payload) + + def bulk_assign_project(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("PUT", f"{self._path}/project", json=payload) + + def import_flow(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", f"{self._path}/import", json=payload) + + def publish_raw(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", f"{self._path}/raw", json=payload) + + def daily_metrics(self, **params) -> Dict[str, Any]: + return self._make_request("GET", "/data_flows/metrics/daily", params=params) + + def total_metrics(self, **params) -> Dict[str, Any]: + return self._make_request("GET", "/data_flows/metrics/total", params=params) + + def active_flows_metrics(self, **params) -> Dict[str, Any]: + return self._make_request( + "GET", "/data_flows/metrics/active_flows_metrics", params=params + ) + + def get_resources_access(self, flow_id: int) -> Dict[str, Any]: + return self._make_request("GET", f"{self._path}/{flow_id}/resources_access") + def docs_recommendation( self, flow_id: int ) -> Union[DocsRecommendation, Dict[str, Any]]: @@ -277,9 +403,9 @@ def get_logs( resource_id: int, run_id: int, from_ts: int, - to_ts: int = None, - page: int = None, - per_page: int = None, + to_ts: Optional[int] = None, + page: Optional[int] = None, + per_page: Optional[int] = None, ) -> Union[FlowLogsResponse, Dict[str, Any]]: """Get flow execution logs for a specific run id of a flow. @@ -318,11 +444,11 @@ def get_metrics( resource_type: str, resource_id: int, from_date: str, - to_date: str = None, - groupby: str = None, - orderby: str = None, - page: int = None, - per_page: int = None, + to_date: Optional[str] = None, + groupby: Optional[str] = None, + orderby: Optional[str] = None, + page: Optional[int] = None, + per_page: Optional[int] = None, ) -> Union[FlowMetricsApiResponse, Dict[str, Any]]: """Get flow metrics for a flow node keyed by resource id. diff --git a/nexla_sdk/resources/genai.py b/nexla_sdk/resources/genai.py index 1fd519b..52fd830 100644 --- a/nexla_sdk/resources/genai.py +++ b/nexla_sdk/resources/genai.py @@ -74,11 +74,23 @@ def get_org_setting(self, gen_ai_org_setting_id: int) -> GenAiOrgSetting: ) return GenAiOrgSetting.model_validate(response) + def update_org_setting( + self, gen_ai_org_setting_id: int, payload: GenAiOrgSettingPayload + ) -> GenAiOrgSetting: + data = self._serialize_data(payload) + response = self._make_request( + "PUT", f"/gen_ai_org_settings/{gen_ai_org_setting_id}", json=data + ) + return GenAiOrgSetting.model_validate(response) + def delete_org_setting(self, gen_ai_org_setting_id: int) -> Dict[str, Any]: return self._make_request( "DELETE", f"/gen_ai_org_settings/{gen_ai_org_setting_id}" ) + def delete_org_settings(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("DELETE", "/gen_ai_org_settings", json=payload) + def show_active_config(self, gen_ai_usage: str) -> ActiveConfigView: response = self._make_request( "GET", diff --git a/nexla_sdk/resources/lookups.py b/nexla_sdk/resources/lookups.py index 5c8dad0..7a20303 100644 --- a/nexla_sdk/resources/lookups.py +++ b/nexla_sdk/resources/lookups.py @@ -37,6 +37,13 @@ def list(self, **kwargs) -> List[Lookup]: """ return super().list(**kwargs) + def list_public(self, **params) -> List[Lookup]: + response = self._make_request("GET", f"{self._path}/public", params=params) + return self._parse_response(response) + + def list_accessible(self, **params) -> List[Lookup]: + return super().list_accessible(**params) + def get(self, data_map_id: int, expand: bool = False) -> Lookup: """ Get single lookup by ID. @@ -93,6 +100,21 @@ def delete(self, data_map_id: int) -> Dict[str, Any]: """ return super().delete(data_map_id) + def search(self, filters: Dict[str, Any], **params) -> List[Lookup]: + return super().search(filters, **params) + + def search_tags(self, tags: List[str], **params) -> List[Lookup]: + return super().search_tags(tags, **params) + + def validate(self, data_map_id: int) -> Dict[str, Any]: + path = f"{self._path}/{data_map_id}/validate" + return self._make_request("GET", path) + + def download_map(self, data_map_id: int) -> str: + path = f"{self._path}/{data_map_id}/download_map" + response = self._make_request("GET", path) + return response # plain text response + def upsert_entries( self, data_map_id: int, entries: List[Dict[str, Any]] ) -> List[Dict[str, Any]]: @@ -134,6 +156,12 @@ def get_entries( path = f"/data_maps/{data_map_id}/entries/{keys_str}" return self._make_request("GET", path) + def get_entries_by_body( + self, data_map_id: int, payload: Dict[str, Any] + ) -> List[Dict[str, Any]]: + path = f"{self._path}/{data_map_id}/get_entries" + return self._make_request("POST", path, json=payload) + def delete_entries( self, data_map_id: int, entry_keys: Union[str, List[str]] ) -> Dict[str, Any]: @@ -154,3 +182,13 @@ def delete_entries( path = f"/data_maps/{data_map_id}/entries/{keys_str}" return self._make_request("DELETE", path) + + def delete_entries_by_body( + self, data_map_id: int, entry_keys: List[str] + ) -> Dict[str, Any]: + path = f"{self._path}/{data_map_id}/entries" + return self._make_request("DELETE", path, json=entry_keys) + + def probe_sample(self, data_map_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{data_map_id}/probe/sample" + return self._make_request("POST", path, json=payload) diff --git a/nexla_sdk/resources/marketplace.py b/nexla_sdk/resources/marketplace.py index e1e9465..8985231 100644 --- a/nexla_sdk/resources/marketplace.py +++ b/nexla_sdk/resources/marketplace.py @@ -58,6 +58,11 @@ def create_domain(self, data: MarketplaceDomainCreate) -> MarketplaceDomain: def delete_domain(self, domain_id: int) -> Dict[str, Any]: return self._make_request("DELETE", f"{self._path}/domains/{domain_id}") + def get_domain_audit_log(self, domain_id: int, **params) -> Dict[str, Any]: + return self._make_request( + "GET", f"{self._path}/domains/{domain_id}/audit_log", params=params + ) + # Items def list_domain_items(self, domain_id: int) -> List[MarketplaceDomainsItem]: response = self._make_request("GET", f"{self._path}/domains/{domain_id}/items") @@ -72,6 +77,44 @@ def create_domain_item( ) return self._parse_response(response, MarketplaceDomainsItem) # type: ignore[arg-type] + def get_domain_item(self, domain_id: int, item_id: int) -> MarketplaceDomainsItem: + response = self._make_request( + "GET", f"{self._path}/domains/{domain_id}/items/{item_id}" + ) + return self._parse_response(response, MarketplaceDomainsItem) # type: ignore[arg-type] + + def search_domain_items( + self, domain_id: int, payload: Dict[str, Any] + ) -> List[MarketplaceDomainsItem]: + response = self._make_request( + "POST", f"{self._path}/domains/{domain_id}/items/search", json=payload + ) + return self._parse_response(response, MarketplaceDomainsItem) # type: ignore[arg-type] + + def delist_domain_item(self, domain_id: int, item_id: int) -> Dict[str, Any]: + return self._make_request( + "DELETE", f"{self._path}/domains/{domain_id}/items/{item_id}" + ) + + def request_item_access(self, domain_id: int, item_id: int) -> Dict[str, Any]: + return self._make_request( + "POST", + f"{self._path}/domains/{domain_id}/items/{item_id}/request_access", + ) + + # Global items + def list_items(self) -> List[MarketplaceDomainsItem]: + response = self._make_request("GET", f"{self._path}/items") + return self._parse_response(response, MarketplaceDomainsItem) # type: ignore[arg-type] + + def get_item(self, item_id: int) -> MarketplaceDomainsItem: + response = self._make_request("GET", f"{self._path}/items/{item_id}") + return self._parse_response(response, MarketplaceDomainsItem) # type: ignore[arg-type] + + def search_items(self, payload: Dict[str, Any]) -> List[MarketplaceDomainsItem]: + response = self._make_request("POST", f"{self._path}/items/search", json=payload) + return self._parse_response(response, MarketplaceDomainsItem) # type: ignore[arg-type] + # Custodians def list_domain_custodians(self, domain_id: int) -> List[CustodianUser]: response = self._make_request( diff --git a/nexla_sdk/resources/metrics.py b/nexla_sdk/resources/metrics.py index 1a43b55..dcd1482 100644 --- a/nexla_sdk/resources/metrics.py +++ b/nexla_sdk/resources/metrics.py @@ -92,17 +92,21 @@ def get_rate_limits(self) -> Dict[str, Any]: path = "/limits" return self._make_request("GET", path) + def publish_raw(self, payload: Dict[str, Any]) -> Dict[str, Any]: + """Publish raw metrics (super user only).""" + return self._make_request("POST", "/metrics/raw", json=payload) + # Convenience wrappers for flow-level logs/metrics def get_flow_metrics( self, resource_type: str, resource_id: int, from_date: str, - to_date: str = None, - groupby: str = None, - orderby: str = None, - page: int = None, - per_page: int = None, + to_date: Optional[str] = None, + groupby: Optional[str] = None, + orderby: Optional[str] = None, + page: Optional[int] = None, + per_page: Optional[int] = None, ) -> Dict[str, Any]: path = f"/data_flows/{resource_type}/{resource_id}/metrics" params = {"from": from_date} @@ -124,9 +128,9 @@ def get_flow_logs( resource_id: int, run_id: int, from_ts: int, - to_ts: int = None, - page: int = None, - per_page: int = None, + to_ts: Optional[int] = None, + page: Optional[int] = None, + per_page: Optional[int] = None, ) -> Dict[str, Any]: path = f"/data_flows/{resource_type}/{resource_id}/logs" params = {"run_id": run_id, "from": from_ts} diff --git a/nexla_sdk/resources/nexsets.py b/nexla_sdk/resources/nexsets.py index 5fcfd31..87fbfa5 100644 --- a/nexla_sdk/resources/nexsets.py +++ b/nexla_sdk/resources/nexsets.py @@ -5,6 +5,7 @@ NexsetCreate, NexsetUpdate, ) +from nexla_sdk.models.catalog_refs.responses import CatalogRef from nexla_sdk.models.nexsets.responses import Nexset, NexsetSample from nexla_sdk.resources.base_resource import BaseResource @@ -35,6 +36,56 @@ def list(self, **kwargs) -> List[Nexset]: """ return super().list(**kwargs) + def list_all(self, **params) -> List[Nexset]: + response = self._make_request("GET", f"{self._path}/all", params=params) + return self._parse_response(response) + + def list_all_condensed(self, **params) -> List[Dict[str, Any]]: + return self._make_request("GET", f"{self._path}/all/condensed", params=params) + + def list_all_ids(self, **params) -> List[int]: + return self._make_request("GET", f"{self._path}/all/ids", params=params) + + def list_available(self, **params) -> List[Nexset]: + response = self._make_request("GET", f"{self._path}/available", params=params) + return self._parse_response(response) + + def search_available(self, filters: Dict[str, Any], **params) -> List[Nexset]: + path = f"{self._path}/available/search" + response = self._make_request("POST", path, json=filters, params=params) + return self._parse_response(response) + + def list_shared(self, **params) -> List[Nexset]: + response = self._make_request("GET", f"{self._path}/shared", params=params) + return self._parse_response(response) + + def list_public(self, **params) -> List[Nexset]: + response = self._make_request("GET", f"{self._path}/public", params=params) + return self._parse_response(response) + + def search_public_tags(self, tags: List[str], **params) -> List[Nexset]: + path = f"{self._path}/public/search_tags" + response = self._make_request("POST", path, json=tags, params=params) + return self._parse_response(response) + + def list_characteristics_search(self, **params) -> Dict[str, Any]: + return self._make_request( + "GET", f"{self._path}/characteristics/search", params=params + ) + + def list_summary(self, **params) -> Dict[str, Any]: + return self._make_request("GET", f"{self._path}/summary", params=params) + + def list_nexset_api_compatible(self, **params) -> List[Nexset]: + response = self._make_request( + "GET", f"{self._path}/nexset_api_compatible", params=params + ) + return self._parse_response(response) + + def update_runtime_status(self, set_id: int, status: str) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/runtime_status/{status}" + return self._make_request("PUT", path) + def get(self, set_id: int, expand: bool = False) -> Nexset: """ Get single nexset by ID. @@ -144,6 +195,23 @@ def get_samples( return [NexsetSample(**item) for item in response] return response + def update_samples( + self, set_id: int, samples: Any, replace: bool = False + ) -> List[NexsetSample]: + path = f"{self._path}/{set_id}/samples" + params = {"replace": replace} + response = self._make_request("PUT", path, json=samples, params=params) + if isinstance(response, list): + return [NexsetSample(**item) for item in response] + return response + + def add_samples(self, set_id: int, samples: Any) -> List[NexsetSample]: + path = f"{self._path}/{set_id}/samples" + response = self._make_request("POST", path, json=samples) + if isinstance(response, list): + return [NexsetSample(**item) for item in response] + return response + def copy(self, set_id: int, options: Optional[NexsetCopyOptions] = None) -> Nexset: """ Copy a nexset. @@ -158,6 +226,216 @@ def copy(self, set_id: int, options: Optional[NexsetCopyOptions] = None) -> Nexs data = options.to_dict() if options else {} return super().copy(set_id, data) + def sync_with_catalog(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/sync_with_catalog" + return self._make_request("POST", path) + + def get_flow(self, set_id: int) -> Dict[str, Any]: + return self._make_request("GET", f"{self._path}/{set_id}/flow") + + def get_flow_dashboard(self, set_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/flow/dashboard" + return self._make_request("GET", path, params=params) + + def get_flow_status_metrics(self, set_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/flow/status_metrics" + return self._make_request("GET", path, params=params) + + def get_flow_metrics(self, set_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/flow/metrics" + return self._make_request("GET", path, params=params) + + def get_flow_logs(self, set_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/flow/logs" + return self._make_request("GET", path, params=params) + + def get_metrics( + self, set_id: int, metrics_name: Optional[str] = None, **params + ) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/metrics" + if metrics_name: + path = f"{path}/{metrics_name}" + return self._make_request("GET", path, params=params) + + def catalog_add(self, set_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/catalog" + return self._make_request("POST", path, json=payload) + + def semantic_schemas(self, set_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/semantic_schemas" + return self._make_request("GET", path, params=params) + + def transform(self, set_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/transform" + return self._make_request("POST", path, json=payload) + + def get_quarantine_settings(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/quarantine_settings" + return self._make_request("GET", path) + + def create_quarantine_settings(self, set_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/quarantine_settings" + return self._make_request("POST", path, json=payload) + + def update_quarantine_settings(self, set_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/quarantine_settings" + return self._make_request("PUT", path, json=payload) + + def delete_quarantine_settings(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/quarantine_settings" + return self._make_request("DELETE", path) + + def get_dashboard_transforms(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/dashboard_transforms" + return self._make_request("GET", path) + + def create_dashboard_transforms(self, set_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/dashboard_transforms" + return self._make_request("POST", path, json=payload) + + def update_dashboard_transforms(self, set_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/dashboard_transforms" + return self._make_request("PUT", path, json=payload) + + def delete_dashboard_transforms(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/dashboard_transforms" + return self._make_request("DELETE", path) + + def list_sharers(self, set_id: int) -> List[Dict[str, Any]]: + path = f"{self._path}/{set_id}/sharers" + return self._make_request("GET", path) or [] + + def set_sharers(self, set_id: int, payload: Dict[str, Any]) -> List[Dict[str, Any]]: + path = f"{self._path}/{set_id}/sharers" + return self._make_request("POST", path, json=payload) or [] + + def add_sharers(self, set_id: int, payload: Dict[str, Any]) -> List[Dict[str, Any]]: + path = f"{self._path}/{set_id}/sharers" + return self._make_request("PUT", path, json=payload) or [] + + def remove_sharers(self, set_id: int, payload: Dict[str, Any]) -> List[Dict[str, Any]]: + path = f"{self._path}/{set_id}/sharers" + return self._make_request("DELETE", path, json=payload) or [] + + def mark_shared(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/sharers/shared" + return self._make_request("PUT", path) + + def probe_quarantine_sample(self, set_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/probe/quarantine/sample" + return self._make_request("POST", path, json=payload) + + def get_quarantine_offset(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/quarantine/offset" + return self._make_request("GET", path) + + def get_offset(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/offset" + return self._make_request("GET", path) + + def get_data_update_time(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/data_update_time" + return self._make_request("GET", path) + + def get_characteristics(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/characteristics" + return self._make_request("GET", path) + + def summary(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/summary" + return self._make_request("GET", path) + + def search(self, filters: Dict[str, Any], **params) -> List[Nexset]: + return super().search(filters, **params) + + def search_tags(self, tags: List[str], **params) -> List[Nexset]: + return super().search_tags(tags, **params) + + def vote(self, set_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/vote" + return self._make_request("POST", path, json=payload) + + def unvote(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/unvote" + return self._make_request("DELETE", path) + + def list_catalog_refs(self, **params) -> List[CatalogRef]: + path = f"{self._path}/catalog_refs" + response = self._make_request("GET", path, params=params) + return [CatalogRef.model_validate(item) for item in (response or [])] + + def get_catalog_ref(self, ref_id: int) -> CatalogRef: + path = f"{self._path}/catalog_refs/{ref_id}" + response = self._make_request("GET", path) + return CatalogRef.model_validate(response) + + def create_catalog_ref(self, payload: Dict[str, Any]) -> CatalogRef: + path = f"{self._path}/catalog_refs" + response = self._make_request("POST", path, json=payload) + return CatalogRef.model_validate(response) + + def update_catalog_ref(self, ref_id: int, payload: Dict[str, Any]) -> CatalogRef: + path = f"{self._path}/catalog_refs/{ref_id}" + response = self._make_request("PUT", path, json=payload) + return CatalogRef.model_validate(response) + + def delete_catalog_ref(self, ref_id: int) -> Dict[str, Any]: + path = f"{self._path}/catalog_refs/{ref_id}" + return self._make_request("DELETE", path) + + def bulk_update_catalog_refs(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("PUT", "/catalog_refs/bulk_update_refs", json=payload) + + def list_api_keys(self, set_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/api_keys" + return self._make_request("GET", path, params=params) + + def search_api_keys(self, set_id: int, filters: Dict[str, Any], **params) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/api_keys/search" + return self._make_request("POST", path, json=filters, params=params) + + def get_api_key(self, set_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/api_keys/{api_key_id}" + return self._make_request("GET", path) + + def create_api_key(self, set_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/api_keys" + return self._make_request("POST", path, json=payload) + + def update_api_key(self, set_id: int, api_key_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/api_keys/{api_key_id}" + return self._make_request("PUT", path, json=payload) + + def rotate_api_key(self, set_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/api_keys/{api_key_id}/rotate" + return self._make_request("PUT", path) + + def activate_api_key(self, set_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/api_keys/{api_key_id}/activate" + return self._make_request("PUT", path) + + def pause_api_key(self, set_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/api_keys/{api_key_id}/pause" + return self._make_request("PUT", path) + + def pause_all_api_keys(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/api_keys/pause" + return self._make_request("PUT", path) + + def delete_api_key(self, set_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/api_keys/{api_key_id}" + return self._make_request("DELETE", path) + + def trigger_quarantine_aggregation( + self, set_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/trigger_quarantine_aggregation" + return self._make_request("POST", path, json=payload) + + def get_quarantine_aggregation(self, set_id: int) -> Dict[str, Any]: + path = f"{self._path}/{set_id}/quarantine_aggregation" + return self._make_request("GET", path) + def docs_recommendation(self, set_id: int) -> Dict[str, Any]: """Generate AI suggestion for Nexset documentation.""" path = f"{self._path}/{set_id}/docs/recommendation" diff --git a/nexla_sdk/resources/notification_channel_settings.py b/nexla_sdk/resources/notification_channel_settings.py new file mode 100644 index 0000000..aa772b8 --- /dev/null +++ b/nexla_sdk/resources/notification_channel_settings.py @@ -0,0 +1,40 @@ +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.notification_channel_settings.requests import ( + NotificationChannelSettingCreate, + NotificationChannelSettingUpdate, +) +from nexla_sdk.models.notification_channel_settings.responses import ( + NotificationChannelSetting, +) +from nexla_sdk.resources.base_resource import BaseResource + + +class NotificationChannelSettingsResource(BaseResource): + """Resource for managing notification channel settings.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/notification_channel_settings" + self._model_class = NotificationChannelSetting + + def list(self, **kwargs) -> List[NotificationChannelSetting]: + return super().list(**kwargs) + + def get(self, setting_id: int) -> NotificationChannelSetting: + return super().get(setting_id) + + def create( + self, data: Union[NotificationChannelSettingCreate, Dict[str, Any]] + ) -> NotificationChannelSetting: + return super().create(data) + + def update( + self, + setting_id: int, + data: Union[NotificationChannelSettingUpdate, Dict[str, Any]], + ) -> NotificationChannelSetting: + return super().update(setting_id, data) + + def delete(self, setting_id: int) -> Dict[str, Any]: + return super().delete(setting_id) diff --git a/nexla_sdk/resources/notification_settings.py b/nexla_sdk/resources/notification_settings.py new file mode 100644 index 0000000..14a99ee --- /dev/null +++ b/nexla_sdk/resources/notification_settings.py @@ -0,0 +1,339 @@ +"""Resource for managing notification settings.""" + +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.notification_settings.requests import ( + NotificationSettingCreate, + NotificationSettingUpdate, +) +from nexla_sdk.models.notification_settings.responses import ( + NotificationSetting, +) +from nexla_sdk.resources.base_resource import BaseResource + + +class NotificationSettingsResource(BaseResource): + """Resource for managing notification settings. + + Notification settings control how and when users receive notifications + for different events and resources. + + Examples: + # List notification settings + settings = client.notification_settings.list() + + # Get a specific setting + setting = client.notification_settings.get(123) + + # Create a new notification setting + setting = client.notification_settings.create(NotificationSettingCreate( + notification_type_id=1, + channel="email", + priority=5 + )) + + # Update a setting + setting = client.notification_settings.update(123, NotificationSettingUpdate( + priority=10 + )) + """ + + def __init__(self, client): + """Initialize the notification settings resource. + + Args: + client: Nexla client instance + """ + super().__init__(client) + self._path = "/notification_settings" + self._model_class = NotificationSetting + + def list( + self, + page: Optional[int] = None, + per_page: Optional[int] = None, + notification_resource_type: Optional[str] = None, + resource_id: Optional[int] = None, + sort_by: Optional[str] = None, + sort_order: Optional[str] = None, + **kwargs, + ) -> List[NotificationSetting]: + """List notification settings for the current user. + + Args: + page: Page number (1-based) + per_page: Items per page + notification_resource_type: Filter by resource type + resource_id: Filter by resource ID + sort_by: Sort field (default: priority) + sort_order: Sort order (ASC or DESC) + **kwargs: Additional query parameters + + Returns: + List of notification settings + """ + params = kwargs.copy() + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + if notification_resource_type is not None: + params["notification_resource_type"] = notification_resource_type + if resource_id is not None: + params["resource_id"] = resource_id + if sort_by is not None: + params["sort_by"] = sort_by + if sort_order is not None: + params["sort_order"] = sort_order + + response = self._make_request("GET", self._path, params=params) + return self._parse_response(response) + + def list_all( + self, + page: Optional[int] = None, + per_page: Optional[int] = None, + resource_type: Optional[str] = None, + event_type: Optional[str] = None, + status: Optional[str] = None, + ) -> List[NotificationSetting]: + """List all notification settings (super user only). + + Args: + page: Page number (1-based) + per_page: Items per page + resource_type: Filter by resource type + event_type: Filter by event type + status: Filter by status + + Returns: + List of all notification settings + """ + params = {} + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + if resource_type is not None: + params["resource_type"] = resource_type + if event_type is not None: + params["event_type"] = event_type + if status is not None: + params["status"] = status + + response = self._make_request("GET", f"{self._path}/all", params=params) + return self._parse_response(response) + + def get(self, setting_id: int, expand: bool = False) -> NotificationSetting: + """Get notification setting by ID. + + Args: + setting_id: Notification setting ID + expand: Include expanded details + + Returns: + Notification setting instance + """ + return super().get(setting_id, expand=expand) + + def create( + self, data: Union[NotificationSettingCreate, Dict[str, Any]] + ) -> NotificationSetting: + """Create a new notification setting. + + Args: + data: Notification setting creation data + + Returns: + Created notification setting + """ + return super().create(data) + + def update( + self, + setting_id: int, + data: Union[NotificationSettingUpdate, Dict[str, Any]], + ) -> NotificationSetting: + """Update a notification setting. + + Args: + setting_id: Notification setting ID + data: Updated notification setting data + + Returns: + Updated notification setting + """ + return super().update(setting_id, data) + + def delete(self, setting_id: int) -> Dict[str, Any]: + """Delete a notification setting. + + Args: + setting_id: Notification setting ID + + Returns: + Response with status + """ + return super().delete(setting_id) + + def show_resource_settings( + self, + resource_type: str, + resource_id: int, + notification_type_id: Optional[int] = None, + page: Optional[int] = None, + per_page: Optional[int] = None, + filter_overridden_settings: bool = False, + ) -> List[NotificationSetting]: + """Get notification settings for a specific resource. + + Args: + resource_type: Resource type (data_sources, data_sets, data_sinks) + resource_id: Resource ID + notification_type_id: Optional notification type ID to filter + page: Page number (1-based) + per_page: Items per page + filter_overridden_settings: Filter overridden settings + + Returns: + List of notification settings for the resource + """ + params = {} + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + if notification_type_id is not None: + params["notification_type_id"] = notification_type_id + if filter_overridden_settings: + params["filter_overridden_settings"] = "true" + + path = f"/notification_settings/{resource_type}/{resource_id}" + response = self._make_request("GET", path, params=params) + return self._parse_response(response) + + def show_type_settings( + self, + notification_type_id: int, + page: Optional[int] = None, + per_page: Optional[int] = None, + sort_by: Optional[str] = None, + sort_order: Optional[str] = None, + ) -> List[NotificationSetting]: + """Get notification settings for a specific notification type. + + Args: + notification_type_id: Notification type ID + page: Page number (1-based) + per_page: Items per page + sort_by: Sort field (default: priority) + sort_order: Sort order (ASC or DESC) + + Returns: + List of notification settings for the type + """ + params = {} + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + if sort_by is not None: + params["sort_by"] = sort_by + if sort_order is not None: + params["sort_order"] = sort_order + + path = f"/notification_settings/notification_types/{notification_type_id}" + response = self._make_request("GET", path, params=params) + return self._parse_response(response) + + def list_by( + self, payload: Dict[str, Any], method: str = "POST" + ) -> List[NotificationSetting]: + path = "/notification_setting/list" + response = self._make_request(method.upper(), path, json=payload) + return self._parse_response(response) + + def org_index( + self, + org_id: int, + page: Optional[int] = None, + per_page: Optional[int] = None, + sort_by: Optional[str] = None, + sort_order: Optional[str] = None, + ) -> List[NotificationSetting]: + """List notification settings for an organization. + + Args: + org_id: Organization ID + page: Page number (1-based) + per_page: Items per page + sort_by: Sort field (default: priority) + sort_order: Sort order (ASC or DESC) + + Returns: + List of organization notification settings + """ + params = {} + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + if sort_by is not None: + params["sort_by"] = sort_by + if sort_order is not None: + params["sort_order"] = sort_order + + path = f"/orgs/{org_id}/notification_settings" + response = self._make_request("GET", path, params=params) + return self._parse_response(response) + + def org_create( + self, org_id: int, data: Union[NotificationSettingCreate, Dict[str, Any]] + ) -> NotificationSetting: + """Create a notification setting for an organization. + + Args: + org_id: Organization ID + data: Notification setting creation data + + Returns: + Created notification setting + """ + serialized_data = self._serialize_data(data) + path = f"/orgs/{org_id}/notification_settings" + response = self._make_request("POST", path, json=serialized_data) + return self._parse_response(response) + + def org_update( + self, + org_id: int, + notification_settings_id: int, + data: Union[NotificationSettingUpdate, Dict[str, Any]], + ) -> NotificationSetting: + """Update an organization notification setting. + + Args: + org_id: Organization ID + notification_settings_id: Notification setting ID + data: Updated notification setting data + + Returns: + Updated notification setting + """ + serialized_data = self._serialize_data(data) + path = f"/orgs/{org_id}/notification_settings/{notification_settings_id}" + response = self._make_request("PUT", path, json=serialized_data) + return self._parse_response(response) + + def org_delete(self, org_id: int, notification_settings_id: int) -> Dict[str, Any]: + """Delete an organization notification setting. + + Args: + org_id: Organization ID + notification_settings_id: Notification setting ID + + Returns: + Response with status + """ + path = f"/orgs/{org_id}/notification_settings/{notification_settings_id}" + return self._make_request("DELETE", path) diff --git a/nexla_sdk/resources/notification_types.py b/nexla_sdk/resources/notification_types.py new file mode 100644 index 0000000..f9a19a7 --- /dev/null +++ b/nexla_sdk/resources/notification_types.py @@ -0,0 +1,21 @@ +from typing import List + +from nexla_sdk.models.notification_types.responses import NotificationType +from nexla_sdk.resources.base_resource import BaseResource + + +class NotificationTypesResource(BaseResource): + """Resource for listing notification types.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/notification_types" + self._model_class = NotificationType + + def list(self) -> List[NotificationType]: + response = self._make_request("GET", self._path) + return self._parse_response(response) + + def list_all(self) -> List[NotificationType]: + response = self._make_request("GET", f"{self._path}/list") + return self._parse_response(response) diff --git a/nexla_sdk/resources/notifications.py b/nexla_sdk/resources/notifications.py index 9430aee..c6d0b88 100644 --- a/nexla_sdk/resources/notifications.py +++ b/nexla_sdk/resources/notifications.py @@ -53,6 +53,8 @@ def list( self, read: Optional[int] = None, level: Optional[str] = None, + resource_id: Optional[int] = None, + resource_type: Optional[str] = None, from_timestamp: Optional[int] = None, to_timestamp: Optional[int] = None, **kwargs, @@ -80,6 +82,10 @@ def list( params["read"] = read if level: params["level"] = level + if resource_id is not None: + params["resource_id"] = resource_id + if resource_type is not None: + params["resource_type"] = resource_type if from_timestamp: params["from"] = from_timestamp if to_timestamp: @@ -87,7 +93,9 @@ def list( return super().list(**params) - def delete_all(self) -> Dict[str, Any]: + def delete_all( + self, payload: Optional[Dict[str, Any]] = None, async_mode: bool = False + ) -> Dict[str, Any]: """ Delete all notifications. @@ -95,7 +103,10 @@ def delete_all(self) -> Dict[str, Any]: Response status """ path = f"{self._path}/all" - return self._make_request("DELETE", path) + params = {"async": 1} if async_mode else None + return self._make_request( + "DELETE", path, json=payload or {}, params=params or {} + ) def get_count(self, read: Optional[int] = None) -> NotificationCount: """ @@ -148,6 +159,19 @@ def mark_unread(self, notification_ids: Union[List[int], str]) -> Dict[str, Any] else: return self._make_request("PUT", path, json=notification_ids) + def mark_read_for(self, notification_id: Union[int, str], payload: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + path = f"{self._path}/{notification_id}/mark_read" + return self._make_request("PUT", path, json=payload or {}) + + def mark_unread_for( + self, notification_id: Union[int, str], payload: Optional[Dict[str, Any]] = None + ) -> Dict[str, Any]: + path = f"{self._path}/{notification_id}/mark_unread" + return self._make_request("PUT", path, json=payload or {}) + + def publish_raw(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", f"{self._path}/raw", json=payload) + # Notification Types def get_types(self, status: Optional[str] = None) -> List[NotificationType]: """ diff --git a/nexla_sdk/resources/org_auth_configs.py b/nexla_sdk/resources/org_auth_configs.py index 31fa650..dbd25b6 100644 --- a/nexla_sdk/resources/org_auth_configs.py +++ b/nexla_sdk/resources/org_auth_configs.py @@ -23,6 +23,18 @@ def list_all(self) -> List[AuthConfig]: response = self._make_request("GET", f"{self._path}/all") return self._parse_response(response) + def list_sign_on_options(self) -> Dict[str, Any]: + """List sign-on options (public).""" + return self._make_request("GET", "/sign_on_options") + + def client_config(self) -> Dict[str, Any]: + """Get client config for API auth.""" + return self._make_request("GET", f"{self._path}/client_config") + + def info(self) -> Dict[str, Any]: + """Get API auth info.""" + return self._make_request("GET", f"{self._path}/info") + def get(self, auth_config_id: int) -> AuthConfig: """Get a specific authentication configuration by ID.""" response = self._make_request("GET", f"{self._path}/{auth_config_id}") diff --git a/nexla_sdk/resources/org_tiers.py b/nexla_sdk/resources/org_tiers.py new file mode 100644 index 0000000..34a21a5 --- /dev/null +++ b/nexla_sdk/resources/org_tiers.py @@ -0,0 +1,28 @@ +from typing import Any, Dict, List + +from nexla_sdk.models.org_tiers.responses import OrgTier +from nexla_sdk.resources.base_resource import BaseResource + + +class OrgTiersResource(BaseResource): + """Resource for organization tiers.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/org_tiers" + self._model_class = OrgTier + + def list(self, **kwargs) -> List[OrgTier]: + return super().list(**kwargs) + + def get(self, org_tier_id: int) -> OrgTier: + return super().get(org_tier_id) + + def create(self, data: Dict[str, Any]) -> OrgTier: + return super().create(data) + + def update(self, org_tier_id: int, data: Dict[str, Any]) -> OrgTier: + return super().update(org_tier_id, data) + + def delete(self, org_tier_id: int) -> Dict[str, Any]: + return super().delete(org_tier_id) diff --git a/nexla_sdk/resources/organizations.py b/nexla_sdk/resources/organizations.py index 6ea79cc..904393e 100644 --- a/nexla_sdk/resources/organizations.py +++ b/nexla_sdk/resources/organizations.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, List +from typing import Any, Dict, List, Optional from nexla_sdk.models.common import LogEntry from nexla_sdk.models.organizations.custodians import OrgCustodiansPayload @@ -44,6 +44,10 @@ def list(self, **kwargs) -> List[Organization]: """ return super().list(**kwargs) + def list_all(self, **params) -> List[Organization]: + response = self._make_request("GET", f"{self._path}/all", params=params) + return self._parse_response(response) + def get(self, org_id: int, expand: bool = False) -> Organization: """ Get single organization by ID. @@ -94,6 +98,19 @@ def delete(self, org_id: int) -> Dict[str, Any]: """ return super().delete(org_id) + def get_login_history(self, org_id: int, **params) -> List[Dict[str, Any]]: + path = f"{self._path}/{org_id}/login_history" + response = self._make_request("GET", path, params=params) + return response or [] + + def get_metrics( + self, org_id: int, metrics_name: Optional[str] = None, **params + ) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/metrics" + if metrics_name: + path = f"{path}/{metrics_name}" + return self._make_request("GET", path, params=params) + def get_members(self, org_id: int) -> List[OrgMember]: """ Get all members in organization. @@ -211,6 +228,89 @@ def get_current_account_summary(self) -> AccountSummary: response = self._make_request("GET", path) return AccountSummary.model_validate(response) + def get_account_rate_limited(self, org_id: int) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/account_rate_limited" + return self._make_request("GET", path) + + def activate_rate_limited_sources( + self, org_id: int, status: Optional[str] = None, activate: bool = True + ) -> Dict[str, Any]: + action = "source_activate" if activate else "source_pause" + path = f"{self._path}/{org_id}/{action}" + if status: + path = f"{path}/{status}" + return self._make_request("PUT", path) + + def activate_org(self, org_id: int, activate: bool = True) -> Dict[str, Any]: + path = ( + f"{self._path}/{org_id}/activate" + if activate + else f"{self._path}/{org_id}/deactivate" + ) + return self._make_request("PUT", path) + + def set_rate_limits(self, org_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/rate_limits" + return self._make_request("PUT", path, json=payload) + + def throttle(self, org_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/throttle" + return self._make_request("PUT", path, json=payload) + + def get_flows_report(self, org_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/flows_report" + return self._make_request("GET", path, params=params) + + def get_clusters(self, org_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/clusters" + return self._make_request("GET", path, params=params) + + def update_cluster(self, org_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/cluster" + return self._make_request("PUT", path, json=payload) + + def activate_cluster(self, org_id: int) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/cluster/activate" + return self._make_request("PUT", path) + + def revert_cluster(self, org_id: int) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/cluster/revert" + return self._make_request("PUT", path) + + def list_notification_settings(self, org_id: int, **params) -> List[Dict[str, Any]]: + path = f"{self._path}/{org_id}/notification_settings" + return self._make_request("GET", path, params=params) or [] + + def create_notification_settings(self, org_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/notification_settings" + return self._make_request("POST", path, json=payload) + + def update_notification_settings( + self, org_id: int, notification_settings_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/notification_settings/{notification_settings_id}" + return self._make_request("PUT", path, json=payload) + + def delete_notification_settings(self, org_id: int, notification_settings_id: int) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/notification_settings/{notification_settings_id}" + return self._make_request("DELETE", path) + + def list_data_sets_catalog_configs(self, org_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/data_sets_catalog_configs" + return self._make_request("GET", path, params=params) + + def get_data_sets_catalog_config(self, config_id: int) -> Dict[str, Any]: + path = f"{self._path}/data_sets_catalog_configs/{config_id}" + return self._make_request("GET", path) + + def enable_feature(self, org_id: int, feature_name: str) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/features/{feature_name}/enable" + return self._make_request("PUT", path) + + def disable_feature(self, org_id: int, feature_name: str) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/features/{feature_name}/disable" + return self._make_request("PUT", path) + def get_org_flow_account_metrics( self, org_id: int, from_date: str, to_date: str = None ) -> Dict[str, Any]: @@ -221,6 +321,34 @@ def get_org_flow_account_metrics( params["to"] = to_date return self._make_request("GET", path, params=params) + def get_dashboard_transforms(self, org_id: int) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/dashboard_transforms" + return self._make_request("GET", path) + + def create_dashboard_transforms(self, org_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/dashboard_transforms" + return self._make_request("POST", path, json=payload) + + def update_dashboard_transforms(self, org_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/dashboard_transforms" + return self._make_request("PUT", path, json=payload) + + def delete_dashboard_transforms(self, org_id: int) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/dashboard_transforms" + return self._make_request("DELETE", path) + + def get_flows_dashboard(self, org_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/flows/dashboard" + return self._make_request("GET", path, params=params) + + def get_flows_status_metrics(self, org_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/flows/status_metrics" + return self._make_request("GET", path, params=params) + + def get_flows_account_metrics(self, org_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{org_id}/flows/account_metrics" + return self._make_request("GET", path, params=params) + def get_audit_log(self, org_id: int, **params) -> List[LogEntry]: """ Get audit log for an organization. @@ -305,6 +433,16 @@ def update_custodians( def add_custodians( self, org_id: int, payload: OrgCustodiansPayload + ) -> List[CustodianUser]: + path = f"{self._path}/{org_id}/custodians" + data = self._serialize_data(payload) + response = self._make_request("PUT", path, json=data) + if isinstance(response, list): + return [CustodianUser.model_validate(item) for item in response] + return [] + + def reset_custodians( + self, org_id: int, payload: OrgCustodiansPayload ) -> List[CustodianUser]: path = f"{self._path}/{org_id}/custodians" data = self._serialize_data(payload) diff --git a/nexla_sdk/resources/projects.py b/nexla_sdk/resources/projects.py index 1961f66..85ed2ae 100644 --- a/nexla_sdk/resources/projects.py +++ b/nexla_sdk/resources/projects.py @@ -96,6 +96,20 @@ def delete(self, project_id: int) -> Dict[str, Any]: """ return super().delete(project_id) + def copy( + self, project_id: int, payload: Optional[Dict[str, Any]] = None + ) -> Project: + return super().copy(project_id, payload) + + def search(self, filters: Dict[str, Any], **params) -> List[Project]: + return super().search(filters, **params) + + def search_tags(self, tags: List[str], **params) -> List[Project]: + return super().search_tags(tags, **params) + + def get_resources_access(self, project_id: int) -> Dict[str, Any]: + return self._make_request("GET", f"{self._path}/{project_id}/resources_access") + def get_flows(self, project_id: int) -> FlowResponse: """ Get flows in project. @@ -108,7 +122,7 @@ def get_flows(self, project_id: int) -> FlowResponse: """ path = f"{self._path}/{project_id}/flows" response = self._make_request("GET", path) - return FlowResponse(**response) + return FlowResponse.model_validate(response) def add_flows( self, project_id: int, flows: ProjectFlowList @@ -177,6 +191,17 @@ def add_data_flows( """ return self.add_flows(project_id, flows) + def get_data_flows_legacy(self, project_id: int) -> List[ProjectDataFlow]: + """ + Legacy project flow listing endpoint. + + This calls '/projects/{id}/data_flows', which the backend still supports + for backward compatibility. + """ + path = f"{self._path}/{project_id}/data_flows" + response = self._make_request("GET", path) + return [ProjectDataFlow.model_validate(item) for item in response] + def replace_data_flows( self, project_id: int, flows: ProjectFlowList ) -> List[ProjectDataFlow]: @@ -187,6 +212,24 @@ def replace_data_flows( """ return self.replace_flows(project_id, flows) + def add_data_flows_legacy( + self, project_id: int, flows: ProjectFlowList + ) -> List[ProjectDataFlow]: + """Legacy add endpoint: '/projects/{id}/data_flows'.""" + path = f"{self._path}/{project_id}/data_flows" + payload = self._serialize_data(flows) + response = self._make_request("PUT", path, json=payload) + return [ProjectDataFlow.model_validate(item) for item in response] + + def replace_data_flows_legacy( + self, project_id: int, flows: ProjectFlowList + ) -> List[ProjectDataFlow]: + """Legacy replace endpoint: '/projects/{id}/data_flows'.""" + path = f"{self._path}/{project_id}/data_flows" + payload = self._serialize_data(flows) + response = self._make_request("POST", path, json=payload) + return [ProjectDataFlow.model_validate(item) for item in response] + def remove_data_flows( self, project_id: int, flows: Optional[ProjectFlowList] = None ) -> List[ProjectDataFlow]: @@ -197,6 +240,15 @@ def remove_data_flows( """ return self.remove_flows(project_id, flows) + def remove_data_flows_legacy( + self, project_id: int, flows: Optional[ProjectFlowList] = None + ) -> List[ProjectDataFlow]: + """Legacy remove endpoint: '/projects/{id}/data_flows'.""" + path = f"{self._path}/{project_id}/data_flows" + data = self._serialize_data(flows) if flows else None + response = self._make_request("DELETE", path, json=data) + return [ProjectDataFlow.model_validate(item) for item in response] + def search_flows( self, project_id: int, filters: List[Dict[str, Any]] ) -> FlowResponse: @@ -213,4 +265,4 @@ def search_flows( path = f"{self._path}/{project_id}/flows/search" payload = {"filters": filters} response = self._make_request("POST", path, json=payload) - return FlowResponse(**response) + return FlowResponse.model_validate(response) diff --git a/nexla_sdk/resources/quarantine_settings.py b/nexla_sdk/resources/quarantine_settings.py new file mode 100644 index 0000000..5fa20c9 --- /dev/null +++ b/nexla_sdk/resources/quarantine_settings.py @@ -0,0 +1,23 @@ +from typing import List + +from nexla_sdk.models.quarantine_settings.responses import QuarantineSetting +from nexla_sdk.resources.base_resource import BaseResource + + +class QuarantineSettingsResource(BaseResource): + """Resource for listing quarantine settings (global endpoints).""" + + def __init__(self, client): + super().__init__(client) + self._path = "/quarantine_settings" + self._model_class = QuarantineSetting + + def list(self, **kwargs) -> List[QuarantineSetting]: + return super().list(**kwargs) + + def get(self, quarantine_setting_id: int) -> QuarantineSetting: + return super().get(quarantine_setting_id) + + def list_all(self, **params) -> List[QuarantineSetting]: + response = self._make_request("GET", f"{self._path}/all", params=params) + return self._parse_response(response) diff --git a/nexla_sdk/resources/resource_parameters.py b/nexla_sdk/resources/resource_parameters.py new file mode 100644 index 0000000..239c45f --- /dev/null +++ b/nexla_sdk/resources/resource_parameters.py @@ -0,0 +1,38 @@ +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.resource_parameters.requests import ( + ResourceParameterCreate, + ResourceParameterUpdate, +) +from nexla_sdk.models.resource_parameters.responses import ResourceParameter +from nexla_sdk.resources.base_resource import BaseResource + + +class ResourceParametersResource(BaseResource): + """Resource for managing resource parameters.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/resource_parameters" + self._model_class = ResourceParameter + + def list(self, **kwargs) -> List[ResourceParameter]: + return super().list(**kwargs) + + def get(self, resource_parameter_id: int) -> ResourceParameter: + return super().get(resource_parameter_id) + + def create( + self, data: Union[ResourceParameterCreate, Dict[str, Any]] + ) -> ResourceParameter: + return super().create(data) + + def update( + self, + resource_parameter_id: int, + data: Union[ResourceParameterUpdate, Dict[str, Any]], + ) -> ResourceParameter: + return super().update(resource_parameter_id, data) + + def delete(self, resource_parameter_id: int) -> Dict[str, Any]: + return super().delete(resource_parameter_id) diff --git a/nexla_sdk/resources/search_health.py b/nexla_sdk/resources/search_health.py new file mode 100644 index 0000000..fb0ba37 --- /dev/null +++ b/nexla_sdk/resources/search_health.py @@ -0,0 +1,18 @@ +from typing import Any, Dict + +from nexla_sdk.resources.base_resource import BaseResource + + +class SearchHealthResource(BaseResource): + """Resource for search health endpoints.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/search_health" + self._model_class = None + + def get(self) -> Dict[str, Any]: + return self._make_request("GET", self._path) + + def test(self) -> Dict[str, Any]: + return self._make_request("GET", f"{self._path}/test") diff --git a/nexla_sdk/resources/self_signup.py b/nexla_sdk/resources/self_signup.py index b5fd778..e632ed3 100644 --- a/nexla_sdk/resources/self_signup.py +++ b/nexla_sdk/resources/self_signup.py @@ -27,8 +27,9 @@ def list_requests(self) -> List[SelfSignupRequest]: return [SelfSignupRequest.model_validate(item) for item in (response or [])] def approve_request(self, request_id: str) -> SelfSignupRequest: + # Backend routes use POST; OpenAPI may advertise PUT in some versions. response = self._make_request( - "PUT", f"/self_signup_requests/{request_id}/approve" + "POST", f"/self_signup_requests/{request_id}/approve" ) return SelfSignupRequest.model_validate(response) diff --git a/nexla_sdk/resources/self_signup_blocked_domains.py b/nexla_sdk/resources/self_signup_blocked_domains.py new file mode 100644 index 0000000..1c79c7e --- /dev/null +++ b/nexla_sdk/resources/self_signup_blocked_domains.py @@ -0,0 +1,24 @@ +from typing import Any, Dict, List + +from nexla_sdk.resources.base_resource import BaseResource + + +class SelfSignupBlockedDomainsResource(BaseResource): + """Resource for self-signup blocked domains.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/self_signup_blocked_domains" + self._model_class = None + + def list(self, **kwargs) -> List[Dict[str, Any]]: + return self._make_request("GET", self._path, params=kwargs) or [] + + def create(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", self._path, json=payload) + + def update(self, domain_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("PUT", f"{self._path}/{domain_id}", json=payload) + + def delete(self, domain_id: int) -> Dict[str, Any]: + return self._make_request("DELETE", f"{self._path}/{domain_id}") diff --git a/nexla_sdk/resources/service_keys.py b/nexla_sdk/resources/service_keys.py new file mode 100644 index 0000000..e6b41a9 --- /dev/null +++ b/nexla_sdk/resources/service_keys.py @@ -0,0 +1,180 @@ +"""Resource for managing service keys.""" + +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.service_keys.requests import ( + ServiceKeyCreate, + ServiceKeyUpdate, +) +from nexla_sdk.models.service_keys.responses import ServiceKey +from nexla_sdk.resources.base_resource import BaseResource + + +class ServiceKeysResource(BaseResource): + """Resource for managing service keys. + + Service keys are long-lived credentials for programmatic API access. + Unlike session tokens, service keys don't expire but can be rotated + and have lifecycle management (activate/pause). + + Examples: + # List service keys + keys = client.service_keys.list() + + # List all keys in org (admin only) + all_keys = client.service_keys.list(all_keys=True) + + # Create a service key + key = client.service_keys.create(ServiceKeyCreate( + name="My Service Key", + description="For automated pipelines" + )) + + # Rotate a key + rotated_key = client.service_keys.rotate(key.id) + + # Pause a key + client.service_keys.pause(key.id) + """ + + def __init__(self, client): + """Initialize the service keys resource. + + Args: + client: Nexla client instance + """ + super().__init__(client) + self._path = "/service_keys" + self._model_class = ServiceKey + + def list( + self, + access_role: Optional[str] = None, + all_keys: bool = False, + page: Optional[int] = None, + per_page: Optional[int] = None, + **kwargs, + ) -> List[ServiceKey]: + """List service keys. + + Args: + access_role: Filter by access role + all_keys: If True and user has admin access, list all keys in org. + Super users can see all keys across orgs. + page: Page number (1-based) + per_page: Items per page + + Returns: + List of service keys + """ + params = kwargs.copy() + if access_role is not None: + params["access_role"] = access_role + if all_keys: + params["all"] = True + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + response = self._make_request("GET", self._path, params=params) + return self._parse_response(response) + + def get(self, key_id: Union[int, str]) -> ServiceKey: + """Get service key by ID or key value. + + Args: + key_id: Service key ID (int) or the api_key string itself + + Returns: + ServiceKey instance + """ + path = f"{self._path}/{key_id}" + response = self._make_request("GET", path) + return self._parse_response(response) + + def create(self, data: Union[ServiceKeyCreate, Dict[str, Any]]) -> ServiceKey: + """Create a new service key. + + Args: + data: Service key creation data (name and description required) + + Returns: + Created service key with the api_key value + """ + return super().create(data) + + def update( + self, key_id: Union[int, str], data: Union[ServiceKeyUpdate, Dict[str, Any]] + ) -> ServiceKey: + """Update a service key. + + Args: + key_id: Service key ID or api_key string + data: Updated service key data + + Returns: + Updated service key + """ + path = f"{self._path}/{key_id}" + serialized_data = self._serialize_data(data) + response = self._make_request("PUT", path, json=serialized_data) + return self._parse_response(response) + + def delete(self, key_id: Union[int, str]) -> Dict[str, Any]: + """Delete a service key. + + Note: Cannot delete a service key that is associated with an active + data source/flow. Returns 405 Method Not Allowed in that case. + + Args: + key_id: Service key ID or api_key string + + Returns: + Response with status + """ + path = f"{self._path}/{key_id}" + return self._make_request("DELETE", path) + + def rotate(self, key_id: Union[int, str]) -> ServiceKey: + """Rotate a service key to generate a new key value. + + The old key becomes invalid immediately. The previous key value + is stored in `last_rotated_key` for reference. + + Args: + key_id: Service key ID or api_key string + + Returns: + Service key with the new api_key value + """ + path = f"{self._path}/{key_id}/rotate" + response = self._make_request("PUT", path) + return self._parse_response(response) + + def activate(self, key_id: Union[int, str]) -> ServiceKey: + """Activate a paused service key. + + Args: + key_id: Service key ID or api_key string + + Returns: + Activated service key + """ + path = f"{self._path}/{key_id}/activate" + response = self._make_request("PUT", path) + return self._parse_response(response) + + def pause(self, key_id: Union[int, str]) -> ServiceKey: + """Pause a service key (temporarily disable). + + A paused key cannot be used for authentication until reactivated. + + Args: + key_id: Service key ID or api_key string + + Returns: + Paused service key + """ + path = f"{self._path}/{key_id}/pause" + response = self._make_request("PUT", path) + return self._parse_response(response) diff --git a/nexla_sdk/resources/sources.py b/nexla_sdk/resources/sources.py index bf4f8ea..4a761ea 100644 --- a/nexla_sdk/resources/sources.py +++ b/nexla_sdk/resources/sources.py @@ -39,6 +39,32 @@ def list(self, **kwargs) -> List[Source]: """ return super().list(**kwargs) + def search(self, filters: Dict[str, Any], **params) -> List[Source]: + return super().search(filters, **params) + + def search_tags(self, tags: List[str], **params) -> List[Source]: + return super().search_tags(tags, **params) + + def list_all(self, **params) -> List[Source]: + response = self._make_request("GET", f"{self._path}/all", params=params) + return self._parse_response(response) + + def list_all_condensed(self, **params) -> List[Dict[str, Any]]: + return self._make_request("GET", f"{self._path}/all/condensed", params=params) + + def list_all_ids(self, **params) -> List[int]: + return self._make_request("GET", f"{self._path}/all/ids", params=params) + + def list_accessible(self, **params) -> List[Source]: + return super().list_accessible(**params) + + def script_source_config(self) -> Dict[str, Any]: + return self._make_request("GET", f"{self._path}/script_source_config") + + def update_runtime_status(self, source_id: int, status: str) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/runtime_status/{status}" + return self._make_request("PUT", path) + def get(self, source_id: int, expand: bool = False) -> Source: """ Get single source by ID. @@ -95,6 +121,53 @@ def delete(self, source_id: int) -> Dict[str, Any]: """ return super().delete(source_id) + def get_flow(self, source_id: int) -> Dict[str, Any]: + return self._make_request("GET", f"{self._path}/{source_id}/flow") + + def get_flow_dashboard(self, source_id: int, **params) -> Dict[str, Any]: + return self._make_request( + "GET", f"{self._path}/{source_id}/flow/dashboard", params=params + ) + + def get_flow_status_metrics(self, source_id: int, **params) -> Dict[str, Any]: + return self._make_request( + "GET", f"{self._path}/{source_id}/flow/status_metrics", params=params + ) + + def get_flow_metrics(self, source_id: int, **params) -> Dict[str, Any]: + return self._make_request( + "GET", f"{self._path}/{source_id}/flow/metrics", params=params + ) + + def get_flow_logs(self, source_id: int, **params) -> Dict[str, Any]: + return self._make_request( + "GET", f"{self._path}/{source_id}/flow/logs", params=params + ) + + def get_metrics( + self, source_id: int, metrics_name: Optional[str] = None, **params + ) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/metrics" + if metrics_name: + path = f"{path}/{metrics_name}" + return self._make_request("GET", path, params=params) + + def get_quarantine_offset( + self, source_id: int, data_set_id: Optional[int] = None + ) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/quarantine/offset" + if data_set_id: + path = f"{path}/{data_set_id}" + return self._make_request("GET", path) + + def get_offset( + self, source_id: int, data_set_id: Optional[int] = None + ) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/offset" + if data_set_id: + path = f"{path}/{data_set_id}" + return self._make_request("GET", path) + def activate(self, source_id: int) -> Source: """ Activate source. @@ -134,3 +207,168 @@ def copy( """ data = options.to_dict() if options else {} return super().copy(source_id, data) + + def probe_list_buckets(self, source_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/probe" + return self._make_request("GET", path) + + def probe_summary(self, source_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/probe/summary" + return self._make_request("GET", path) + + def probe_authenticate(self, source_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/probe/authenticate" + return self._make_request("GET", path) + + def probe_list_files(self, source_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/probe/buckets" + return self._make_request("POST", path, json=payload) + + def probe_tree(self, source_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/probe/tree" + return self._make_request("POST", path, json=payload) + + def probe_read_file(self, source_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/probe/files" + return self._make_request("POST", path, json=payload) + + def probe_detect_schemas(self, source_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/probe/schemas" + return self._make_request("POST", path, json=payload) + + def probe_quarantine_sample(self, source_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/probe/quarantine/sample" + return self._make_request("POST", path, json=payload) + + def probe_sample(self, source_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/probe/sample" + return self._make_request("POST", path, json=payload) + + def get_reingested_files(self, source_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/file/ingest" + return self._make_request("GET", path, params=params) + + def reingest_files(self, source_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/file/ingest" + return self._make_request("POST", path, json=payload) + + def get_quarantine_settings(self, source_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/quarantine_settings" + return self._make_request("GET", path) + + def create_quarantine_settings(self, source_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/quarantine_settings" + return self._make_request("POST", path, json=payload) + + def update_quarantine_settings(self, source_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/quarantine_settings" + return self._make_request("PUT", path, json=payload) + + def delete_quarantine_settings(self, source_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/quarantine_settings" + return self._make_request("DELETE", path) + + def get_dashboard_transforms(self, source_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/dashboard_transforms" + return self._make_request("GET", path) + + def create_dashboard_transforms(self, source_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/dashboard_transforms" + return self._make_request("POST", path, json=payload) + + def update_dashboard_transforms(self, source_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/dashboard_transforms" + return self._make_request("PUT", path, json=payload) + + def delete_dashboard_transforms(self, source_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/dashboard_transforms" + return self._make_request("DELETE", path) + + def validate_config(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", f"{self._path}/config/validate", json=payload) + + def test_config(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", f"{self._path}/test_config", json=payload) + + def list_data_sinks(self, source_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/data_sinks" + return self._make_request("GET", path, params=params) + + def run_now(self, source_id: int, method: str = "POST") -> Dict[str, Any]: + path = f"{self._path}/{source_id}/run_now" + return self._make_request(method.upper(), path) + + def ready(self, source_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/ready" + return self._make_request("POST", path) + + def list_runs(self, source_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/runs" + return self._make_request("GET", path, params=params) + + def list_flow_triggers(self, source_id: int) -> List[Dict[str, Any]]: + path = f"{self._path}/{source_id}/flow_triggers" + return self._make_request("GET", path) or [] + + def edit_flow_triggers( + self, source_id: int, payload: Dict[str, Any], mode: str, all_triggers: bool = False + ) -> List[Dict[str, Any]]: + path = f"{self._path}/{source_id}/flow_triggers" + if mode in {"pause", "activate"}: + path = f"{path}/{mode}" + if all_triggers: + path = f"{path}/all" + method = "PUT" if mode in {"add", "pause", "activate"} else "POST" + if mode == "remove": + method = "DELETE" + return self._make_request(method, path, json=payload) or [] + + def list_api_keys(self, source_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/api_keys" + return self._make_request("GET", path, params=params) + + def search_api_keys(self, source_id: int, filters: Dict[str, Any], **params) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/api_keys/search" + return self._make_request("POST", path, json=filters, params=params) + + def get_api_key(self, source_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/api_keys/{api_key_id}" + return self._make_request("GET", path) + + def create_api_key(self, source_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/api_keys" + return self._make_request("POST", path, json=payload) + + def update_api_key(self, source_id: int, api_key_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/api_keys/{api_key_id}" + return self._make_request("PUT", path, json=payload) + + def rotate_api_key(self, source_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/api_keys/{api_key_id}/rotate" + return self._make_request("PUT", path) + + def activate_api_key(self, source_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/api_keys/{api_key_id}/activate" + return self._make_request("PUT", path) + + def pause_api_key(self, source_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/api_keys/{api_key_id}/pause" + return self._make_request("PUT", path) + + def pause_all_api_keys(self, source_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/api_keys/pause" + return self._make_request("PUT", path) + + def delete_api_key(self, source_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/api_keys/{api_key_id}" + return self._make_request("DELETE", path) + + def trigger_quarantine_aggregation( + self, source_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/trigger_quarantine_aggregation" + return self._make_request("POST", path, json=payload) + + def get_quarantine_aggregation(self, source_id: int) -> Dict[str, Any]: + path = f"{self._path}/{source_id}/quarantine_aggregation" + return self._make_request("GET", path) diff --git a/nexla_sdk/resources/teams.py b/nexla_sdk/resources/teams.py index 162828b..8eb0170 100644 --- a/nexla_sdk/resources/teams.py +++ b/nexla_sdk/resources/teams.py @@ -1,6 +1,7 @@ from typing import Any, Dict, List, Optional from nexla_sdk.models.teams.requests import TeamCreate, TeamMemberList, TeamUpdate +from nexla_sdk.models.common import LogEntry from nexla_sdk.models.teams.responses import Team, TeamMember from nexla_sdk.resources.base_resource import BaseResource @@ -150,3 +151,8 @@ def remove_members( data = members.to_dict() if members else None response = self._make_request("DELETE", path, json=data) return [TeamMember(**member) for member in response] + + def get_audit_log(self, team_id: int, **params) -> List[LogEntry]: + path = f"{self._path}/{team_id}/audit_log" + response = self._make_request("GET", path, params=params) + return [LogEntry.model_validate(item) for item in (response or [])] diff --git a/nexla_sdk/resources/tokens.py b/nexla_sdk/resources/tokens.py new file mode 100644 index 0000000..136f384 --- /dev/null +++ b/nexla_sdk/resources/tokens.py @@ -0,0 +1,43 @@ +from typing import Any, Dict, Optional + +from nexla_sdk.resources.base_resource import BaseResource + + +class TokensResource(BaseResource): + """Resource for auth/token endpoints.""" + + def __init__(self, client): + super().__init__(client) + self._path = "" + self._model_class = None + + def create_token(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", "/token", json=payload) + + def update_token(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("PUT", "/token", json=payload) + + def create_google_token(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", "/gtoken", json=payload) + + def refresh_token(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", "/token/refresh", json=payload) + + def logout(self, payload: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + return self._make_request("POST", "/token/logout", json=payload or {}) + + def create_idp_token( + self, uid: Optional[str], payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = "/token" if uid is None else f"/token/{uid}" + return self._make_request("POST", path, json=payload) + + def metadata(self, uid: Optional[str] = None) -> Dict[str, Any]: + path = "/metadata" if uid is None else f"/metadata/{uid}" + return self._make_request("GET", path) + + def aws_marketplace_token(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", "/aws_marketplace_token", json=payload) + + def resource_authorize(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", "/resource_authorize", json=payload) diff --git a/nexla_sdk/resources/transforms.py b/nexla_sdk/resources/transforms.py index c9b086b..a071dfd 100644 --- a/nexla_sdk/resources/transforms.py +++ b/nexla_sdk/resources/transforms.py @@ -56,3 +56,19 @@ def list_public(self) -> List[Transform]: path = f"{self._path}/public" response = self._make_request("GET", path) return self._parse_response(response) + + def search(self, filters: Dict[str, Any], **params) -> List[Transform]: + return super().search(filters, **params) + + def search_tags(self, tags: List[str], **params) -> List[Transform]: + return super().search_tags(tags, **params) + + def transform(self, transform_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{transform_id}/transform" + return self._make_request("POST", path, json=payload) + + def transform_features(self) -> Dict[str, Any]: + return self._make_request("GET", "/transform/features") + + def transform_data(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", "/transform", json=payload) diff --git a/nexla_sdk/resources/user_settings.py b/nexla_sdk/resources/user_settings.py new file mode 100644 index 0000000..0dd2cf8 --- /dev/null +++ b/nexla_sdk/resources/user_settings.py @@ -0,0 +1,43 @@ +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.user_settings.requests import ( + UserSettingCreate, + UserSettingUpdate, +) +from nexla_sdk.models.user_settings.responses import UserSetting +from nexla_sdk.resources.base_resource import BaseResource + + +class UserSettingsResource(BaseResource): + """Resource for managing user settings.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/user_settings" + self._model_class = UserSetting + + def list(self, **kwargs) -> List[UserSetting]: + return super().list(**kwargs) + + def search(self, filters: Dict[str, Any], **params) -> List[UserSetting]: + path = f"{self._path}/search" + response = self._make_request("POST", path, json=filters, params=params) + return self._parse_response(response) + + def get(self, user_setting_id: int) -> UserSetting: + return super().get(user_setting_id) + + def create( + self, data: Union[UserSettingCreate, Dict[str, Any]] + ) -> UserSetting: + return super().create(data) + + def update( + self, + user_setting_id: int, + data: Union[UserSettingUpdate, Dict[str, Any]], + ) -> UserSetting: + return super().update(user_setting_id, data) + + def delete(self, user_setting_id: int) -> Dict[str, Any]: + return super().delete(user_setting_id) diff --git a/nexla_sdk/resources/user_tiers.py b/nexla_sdk/resources/user_tiers.py new file mode 100644 index 0000000..56e348f --- /dev/null +++ b/nexla_sdk/resources/user_tiers.py @@ -0,0 +1,28 @@ +from typing import Any, Dict, List + +from nexla_sdk.models.user_tiers.responses import UserTier +from nexla_sdk.resources.base_resource import BaseResource + + +class UserTiersResource(BaseResource): + """Resource for user tiers.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/user_tiers" + self._model_class = UserTier + + def list(self, **kwargs) -> List[UserTier]: + return super().list(**kwargs) + + def get(self, user_tier_id: int) -> UserTier: + return super().get(user_tier_id) + + def create(self, data: Dict[str, Any]) -> UserTier: + return super().create(data) + + def update(self, user_tier_id: int, data: Dict[str, Any]) -> UserTier: + return super().update(user_tier_id, data) + + def delete(self, user_tier_id: int) -> Dict[str, Any]: + return super().delete(user_tier_id) diff --git a/nexla_sdk/resources/users.py b/nexla_sdk/resources/users.py index 1bff4d8..3ad02dd 100644 --- a/nexla_sdk/resources/users.py +++ b/nexla_sdk/resources/users.py @@ -1,6 +1,7 @@ from typing import Any, Dict, List, Optional from nexla_sdk.models.metrics.enums import UserMetricResourceType +from nexla_sdk.models.users.credits import UserCredit, UserCreditCreate from nexla_sdk.models.users.requests import UserCreate, UserUpdate from nexla_sdk.models.users.responses import User, UserExpanded, UserSettings from nexla_sdk.resources.base_resource import BaseResource @@ -40,6 +41,9 @@ def list(self, expand: bool = False, **kwargs) -> List[User]: return super().list(**kwargs) + def list_sso_options(self) -> Dict[str, Any]: + return self._make_request("GET", "/users/sso_options") + def get(self, user_id: int, expand: bool = False) -> User: """ Get user by ID. @@ -102,6 +106,40 @@ def delete(self, user_id: int) -> Dict[str, Any]: """ return super().delete(user_id) + # Account summary methods + def get_account_summary(self) -> Dict[str, Any]: + """Get current user's account summary.""" + return self._make_request("GET", "/users/account_summary") + + def get_user_account_summary(self, user_id: int) -> Dict[str, Any]: + """Get account summary for a specific user.""" + path = f"{self._path}/{user_id}/account_summary" + return self._make_request("GET", path) + + # Password and authentication methods + def reset_password(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", "/reset_password", json=payload) + + def set_password(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", "/set_password", json=payload) + + def password_entropy(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", "/password_entropy", json=payload) + + def send_invite(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("POST", "/users/send_invite", json=payload) + + def change_password(self, user_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/change_password" + return self._make_request("PUT", path, json=payload) + + def get_sso_options(self, email: Optional[str] = None) -> Dict[str, Any]: + params = {} + if email: + params["email"] = email + return self._make_request("GET", "/users/sso_options", params=params) + + # User settings and current user def get_settings(self) -> List[UserSettings]: """ Get current user's settings. @@ -118,6 +156,48 @@ def get_current(self) -> Dict[str, Any]: path = "/users/current" return self._make_request("GET", path) + # Audit and history methods + def get_audit_history(self, user_id: int, **params) -> List[Dict[str, Any]]: + path = f"{self._path}/{user_id}/audit_history" + return self._make_request("GET", path, params=params) or [] + + def get_login_history(self, user_id: int, **params) -> List[Dict[str, Any]]: + path = f"{self._path}/{user_id}/login_history" + return self._make_request("GET", path, params=params) or [] + + def get_api_key_events(self, user_id: int, **params) -> List[Dict[str, Any]]: + path = f"{self._path}/{user_id}/api_keys/events" + return self._make_request("GET", path, params=params) or [] + + def get_audit_log(self, user_id: int, **params) -> List[Dict[str, Any]]: + """Get audit log for a user.""" + path = f"{self._path}/{user_id}/audit_log" + response = self._make_request("GET", path, params=params) + if isinstance(response, list): + return response + return [] + + def get_resource_audit_log( + self, user_id: int, resource_type: str, **params + ) -> List[Dict[str, Any]]: + path = f"{self._path}/{user_id}/{resource_type}/audit_log" + response = self._make_request("GET", path, params=params) + return response or [] + + # Metrics methods + def get_metrics( + self, user_id: int, metrics_name: Optional[str] = None, **params + ) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/metrics" + if metrics_name: + path = f"{path}/{metrics_name}" + return self._make_request("GET", path, params=params) + + def get_orgs(self, user_id: int, **params) -> List[Dict[str, Any]]: + path = f"{self._path}/{user_id}/orgs" + return self._make_request("GET", path, params=params) or [] + + # Quarantine settings def get_quarantine_settings(self, user_id: int) -> Dict[str, Any]: """ Get quarantine data export settings for user. @@ -178,14 +258,68 @@ def delete_quarantine_settings(self, user_id: int) -> Dict[str, Any]: path = f"{self._path}/{user_id}/quarantine_settings" return self._make_request("DELETE", path) - def get_audit_log(self, user_id: int, **params) -> List[Dict[str, Any]]: - """Get audit log for a user.""" - path = f"{self._path}/{user_id}/audit_log" - response = self._make_request("GET", path, params=params) - if isinstance(response, list): - return response - return [] + # Dashboard transforms + def get_dashboard_transforms(self, user_id: int) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/dashboard_transforms" + return self._make_request("GET", path) + + def create_dashboard_transforms( + self, user_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/dashboard_transforms" + return self._make_request("POST", path, json=payload) + + def update_dashboard_transforms( + self, user_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/dashboard_transforms" + return self._make_request("PUT", path, json=payload) + + def delete_dashboard_transforms(self, user_id: int) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/dashboard_transforms" + return self._make_request("DELETE", path) + + # Flows dashboard and metrics + def get_flows_dashboard(self, user_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/flows/dashboard" + return self._make_request("GET", path, params=params) + + def get_flows_status_metrics(self, user_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/flows/status_metrics" + return self._make_request("GET", path, params=params) + + def get_flows_account_metrics(self, user_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/flows/account_metrics" + return self._make_request("GET", path, params=params) + + # Account activation and locking + def activate(self, user_id: int, activate: bool = True) -> Dict[str, Any]: + action = "activate" if activate else "deactivate" + path = f"{self._path}/{user_id}/{action}" + return self._make_request("PUT", path) + + def lock_account(self, user_id: int) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/lock_account" + return self._make_request("PUT", path) + def unlock_account(self, user_id: int) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/unlock_account" + return self._make_request("PUT", path) + + def activate_rate_limited_sources( + self, user_id: int, status: Optional[str] = None, activate: bool = True + ) -> Dict[str, Any]: + action = "source_activate" if activate else "source_pause" + path = f"{self._path}/{user_id}/{action}" + if status: + path = f"{path}/{status}" + return self._make_request("PUT", path) + + def get_account_rate_limited(self, user_id: int) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/account_rate_limited" + return self._make_request("GET", path) + + # Resource transfer def get_transferable_resources(self, user_id: int, org_id: int) -> Dict[str, Any]: """ Get a list of resources owned by a user that can be transferred. @@ -219,6 +353,93 @@ def transfer_resources( data = {"org_id": org_id, "delegate_owner_id": delegate_owner_id} return self._make_request("PUT", path, json=data) + # User credits + def list_credits(self, user_id: int, **params) -> List[UserCredit]: + path = f"{self._path}/{user_id}/credits" + response = self._make_request("GET", path, params=params) + return [UserCredit.model_validate(item) for item in (response or [])] + + def create_credit(self, user_id: int, payload: UserCreditCreate) -> UserCredit: + path = f"{self._path}/{user_id}/credits" + data = self._serialize_data(payload) + response = self._make_request("POST", path, json=data) + return UserCredit.model_validate(response) + + def get_credit(self, user_id: int, credit_id: int) -> UserCredit: + path = f"{self._path}/{user_id}/credits/{credit_id}" + response = self._make_request("GET", path) + return UserCredit.model_validate(response) + + def use_credits(self, user_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/credits/use" + return self._make_request("PUT", path, json=payload) + + def use_credit(self, user_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + """Alias for use_credits.""" + return self.use_credits(user_id, payload) + + def refresh_credits(self, user_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/credits/refresh" + return self._make_request("PUT", path, json=payload) + + def refresh_credit(self, user_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + """Alias for refresh_credits.""" + return self.refresh_credits(user_id, payload) + + def expire_credit(self, user_id: int, credit_id: int) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/credits/{credit_id}/expire" + return self._make_request("PUT", path) + + def delete_credit(self, user_id: int, credit_id: int) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/credits/{credit_id}" + return self._make_request("DELETE", path) + + # API keys + def list_api_keys(self, user_id: int, **params) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/api_keys" + return self._make_request("GET", path, params=params) + + def search_api_keys( + self, user_id: int, filters: Dict[str, Any], **params + ) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/api_keys/search" + return self._make_request("POST", path, json=filters, params=params) + + def get_api_key(self, user_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/api_keys/{api_key_id}" + return self._make_request("GET", path) + + def create_api_key(self, user_id: int, payload: Dict[str, Any]) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/api_keys" + return self._make_request("POST", path, json=payload) + + def update_api_key( + self, user_id: int, api_key_id: int, payload: Dict[str, Any] + ) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/api_keys/{api_key_id}" + return self._make_request("PUT", path, json=payload) + + def rotate_api_key(self, user_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/api_keys/{api_key_id}/rotate" + return self._make_request("PUT", path) + + def activate_api_key(self, user_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/api_keys/{api_key_id}/activate" + return self._make_request("PUT", path) + + def pause_api_key(self, user_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/api_keys/{api_key_id}/pause" + return self._make_request("PUT", path) + + def pause_all_api_keys(self, user_id: int) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/api_keys/pause" + return self._make_request("PUT", path) + + def delete_api_key(self, user_id: int, api_key_id: int) -> Dict[str, Any]: + path = f"{self._path}/{user_id}/api_keys/{api_key_id}" + return self._make_request("DELETE", path) + + # Extended metrics def get_account_metrics( self, user_id: int, diff --git a/nexla_sdk/resources/validators.py b/nexla_sdk/resources/validators.py new file mode 100644 index 0000000..4e34511 --- /dev/null +++ b/nexla_sdk/resources/validators.py @@ -0,0 +1,178 @@ +"""Resource for managing validators (data validation rules).""" + +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.validators.requests import ( + ValidatorCopyOptions, + ValidatorCreate, + ValidatorUpdate, +) +from nexla_sdk.models.validators.responses import Validator +from nexla_sdk.resources.base_resource import BaseResource + + +class ValidatorsResource(BaseResource): + """Resource for managing validators (data validation rules). + + Validators are code containers used to validate data within flows. + They support various code types including Python, JavaScript, and Jolt. + + Examples: + # List validators + validators = client.validators.list() + + # Get a specific validator + validator = client.validators.get(123) + + # Create a Python validator + validator = client.validators.create(ValidatorCreate( + name="My Validator", + code_type="python", + code="def validate(record): return record['value'] > 0" + )) + + # List public validators + public_validators = client.validators.list_public() + """ + + def __init__(self, client): + """Initialize the validators resource. + + Args: + client: Nexla client instance + """ + super().__init__(client) + self._path = "/validators" + self._model_class = Validator + + def list( + self, + page: Optional[int] = None, + per_page: Optional[int] = None, + access_role: Optional[str] = None, + expand: bool = False, + **kwargs, + ) -> List[Validator]: + """List validators with optional filters. + + Args: + page: Page number (1-based) + per_page: Items per page + access_role: Filter by access role (owner, collaborator, operator, admin) + expand: Include full details for each validator + **kwargs: Additional query parameters + + Returns: + List of validators + """ + if expand: + kwargs["expand"] = 1 + return super().list( + page=page, per_page=per_page, access_role=access_role, **kwargs + ) + + def list_public( + self, + page: Optional[int] = None, + per_page: Optional[int] = None, + ) -> List[Validator]: + """List publicly available validators. + + Args: + page: Page number (1-based) + per_page: Items per page + + Returns: + List of public validators + """ + path = f"{self._path}/public" + params = {} + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + response = self._make_request("GET", path, params=params) + return self._parse_response(response) + + def get(self, validator_id: int, expand: bool = False) -> Validator: + """Get validator by ID. + + Args: + validator_id: Validator ID + expand: Include expanded references + + Returns: + Validator instance + """ + return super().get(validator_id, expand=expand) + + def create(self, data: Union[ValidatorCreate, Dict[str, Any]]) -> Validator: + """Create a new validator. + + Args: + data: Validator creation data + + Returns: + Created validator + """ + return super().create(data) + + def update( + self, validator_id: int, data: Union[ValidatorUpdate, Dict[str, Any]] + ) -> Validator: + """Update a validator. + + Args: + validator_id: Validator ID + data: Updated validator data + + Returns: + Updated validator + """ + return super().update(validator_id, data) + + def delete(self, validator_id: int) -> Dict[str, Any]: + """Delete a validator. + + Args: + validator_id: Validator ID + + Returns: + Response with status + """ + return super().delete(validator_id) + + def copy( + self, + validator_id: int, + options: Optional[Union[ValidatorCopyOptions, Dict[str, Any]]] = None, + ) -> Validator: + """Copy a validator. + + Args: + validator_id: Validator ID to copy + options: Copy options (owner_id, org_id, etc.) + + Returns: + Copied validator + """ + return super().copy(validator_id, options) + + def search_tags(self, tags: List[str]) -> List[Validator]: + """Search validators by tags. + + Args: + tags: List of tags to search for + + Returns: + List of validators matching the tags + """ + path = f"{self._path}/search_tags" + response = self._make_request("POST", path, json=tags) + return self._parse_response(response) + + def search(self, filters: Dict[str, Any]) -> List[Validator]: + """Search validators (alias to search_tags endpoint).""" + path = f"{self._path}/search" + response = self._make_request("POST", path, json=filters) + return self._parse_response(response) diff --git a/nexla_sdk/resources/vendor_endpoints.py b/nexla_sdk/resources/vendor_endpoints.py new file mode 100644 index 0000000..bef8073 --- /dev/null +++ b/nexla_sdk/resources/vendor_endpoints.py @@ -0,0 +1,44 @@ +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.vendor_endpoints.requests import ( + VendorEndpointCreate, + VendorEndpointUpdate, +) +from nexla_sdk.models.vendor_endpoints.responses import VendorEndpoint +from nexla_sdk.resources.base_resource import BaseResource + + +class VendorEndpointsResource(BaseResource): + """Resource for managing vendor endpoints.""" + + def __init__(self, client): + super().__init__(client) + self._path = "/vendor_endpoints" + self._model_class = VendorEndpoint + + def list(self, **kwargs) -> List[VendorEndpoint]: + return super().list(**kwargs) + + def get(self, vendor_endpoint_id: int) -> VendorEndpoint: + return super().get(vendor_endpoint_id) + + def create( + self, data: Union[VendorEndpointCreate, Dict[str, Any]] + ) -> VendorEndpoint: + return super().create(data) + + def update( + self, + vendor_endpoint_id: int, + data: Union[VendorEndpointUpdate, Dict[str, Any]], + ) -> VendorEndpoint: + return super().update(vendor_endpoint_id, data) + + def delete(self, vendor_endpoint_id: int) -> Dict[str, Any]: + return super().delete(vendor_endpoint_id) + + def update_all(self, payload: Dict[str, Any]) -> Dict[str, Any]: + return self._make_request("PUT", self._path, json=payload) + + def delete_all(self, payload: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: + return self._make_request("DELETE", self._path, json=payload) diff --git a/nexla_sdk/resources/vendors.py b/nexla_sdk/resources/vendors.py new file mode 100644 index 0000000..e54a755 --- /dev/null +++ b/nexla_sdk/resources/vendors.py @@ -0,0 +1,197 @@ +"""Resource for managing vendors.""" + +from typing import Any, Dict, List, Optional, Union + +from nexla_sdk.models.vendors.requests import VendorCreate, VendorUpdate +from nexla_sdk.models.vendors.responses import Vendor +from nexla_sdk.resources.base_resource import BaseResource + + +class VendorsResource(BaseResource): + """Resource for managing vendors. + + Vendors represent third-party service providers that can be + connected via auth templates and endpoints. + + Write operations (create, update, delete) require super user access. + + Examples: + # List all vendors + vendors = client.vendors.list() + + # Get a vendor by ID + vendor = client.vendors.get(123) + + # Get a vendor by name + vendor = client.vendors.get_by_name("salesforce") + + # Create a vendor (super user only) + vendor = client.vendors.create(VendorCreate( + name="new_vendor", + display_name="New Vendor" + )) + + # Update a vendor (super user only) + vendor = client.vendors.update(123, VendorUpdate( + description="Updated description" + )) + + # Delete a vendor (super user only) + client.vendors.delete(123) + """ + + def __init__(self, client): + """Initialize the vendors resource. + + Args: + client: Nexla client instance + """ + super().__init__(client) + self._path = "/vendors" + self._model_class = Vendor + + def list( + self, + expand: bool = False, + page: Optional[int] = None, + per_page: Optional[int] = None, + **kwargs, + ) -> List[Vendor]: + """List vendors. + + Args: + expand: Include nested auth_templates and vendor_endpoints + page: Page number (1-based) + per_page: Items per page + + Returns: + List of vendors + """ + params = kwargs.copy() + if expand: + params["expand"] = "true" + if page is not None: + params["page"] = page + if per_page is not None: + params["per_page"] = per_page + response = self._make_request("GET", self._path, params=params) + return self._parse_response(response) + + def get(self, vendor_id: int, expand: bool = False) -> Vendor: + """Get vendor by ID. + + Args: + vendor_id: Vendor ID + expand: Include nested auth_templates and vendor_endpoints + + Returns: + Vendor instance + """ + path = f"{self._path}/{vendor_id}" + params = {"expand": "true"} if expand else {} + response = self._make_request("GET", path, params=params) + return self._parse_response(response) + + def get_by_name(self, vendor_name: str, expand: bool = False) -> Vendor: + """Get vendor by name. + + Args: + vendor_name: Vendor name + expand: Include nested auth_templates and vendor_endpoints + + Returns: + Vendor instance + """ + params = {"vendor_name": vendor_name} + if expand: + params["expand"] = "true" + response = self._make_request("GET", self._path, params=params) + return self._parse_response(response) + + def create(self, data: Union[VendorCreate, Dict[str, Any]]) -> Vendor: + """Create a new vendor (super user only). + + Args: + data: Vendor creation data + + Returns: + Created vendor + """ + return super().create(data) + + def update( + self, vendor_id: int, data: Union[VendorUpdate, Dict[str, Any]] + ) -> Vendor: + """Update a vendor (super user only). + + Args: + vendor_id: Vendor ID + data: Updated vendor data + + Returns: + Updated vendor + """ + return super().update(vendor_id, data) + + def update_by_name( + self, vendor_name: str, data: Union[VendorUpdate, Dict[str, Any]] + ) -> Vendor: + """Update a vendor by name (super user only). + + Args: + vendor_name: Vendor name + data: Updated vendor data + + Returns: + Updated vendor + """ + params = {"vendor_name": vendor_name} + serialized_data = self._serialize_data(data) + response = self._make_request("PUT", self._path, json=serialized_data, params=params) + return self._parse_response(response) + + def delete(self, vendor_id: int) -> Dict[str, Any]: + """Delete a vendor (super user only). + + Args: + vendor_id: Vendor ID + + Returns: + Response with status + """ + return super().delete(vendor_id) + + def delete_by_name(self, vendor_name: str) -> Dict[str, Any]: + """Delete a vendor by name (super user only). + + Args: + vendor_name: Vendor name + + Returns: + Response with status + """ + params = {"vendor_name": vendor_name} + return self._make_request("DELETE", self._path, params=params) + + def update_all(self, payload: Dict[str, Any]) -> Dict[str, Any]: + """Update vendors via collection endpoint.""" + return self._make_request("PUT", self._path, json=payload) + + def delete_all(self, payload: Dict[str, Any] = None) -> Dict[str, Any]: + """Delete vendors via collection endpoint.""" + return self._make_request("DELETE", self._path, json=payload or {}) + + def delete_auth_template( + self, vendor_id: int, auth_template_id: int + ) -> Dict[str, Any]: + """Delete an auth template from a vendor (super user only). + + Args: + vendor_id: Vendor ID + auth_template_id: Auth template ID + + Returns: + Response with status + """ + path = f"{self._path}/{vendor_id}/auth_templates/{auth_template_id}" + return self._make_request("DELETE", path) diff --git a/package.json b/package.json new file mode 100644 index 0000000..ac47710 --- /dev/null +++ b/package.json @@ -0,0 +1,22 @@ +{ + "name": "nexla-sdk-monorepo", + "private": true, + "packageManager": "pnpm@9.15.4", + "workspaces": [ + "packages/*", + "docs-site" + ], + "scripts": { + "lint": "turbo run lint", + "test": "turbo run test", + "build": "turbo run build", + "typecheck": "turbo run typecheck", + "changeset": "changeset", + "version": "changeset version", + "release": "changeset publish" + }, + "devDependencies": { + "@changesets/cli": "^2.27.9", + "turbo": "^2.3.3" + } +} diff --git a/packages/ts-sdk/README.md b/packages/ts-sdk/README.md new file mode 100644 index 0000000..3a21fd0 --- /dev/null +++ b/packages/ts-sdk/README.md @@ -0,0 +1,100 @@ +# Nexla TypeScript SDK + +TypeScript SDK for interacting with the Nexla API. + +## Installation + +```bash +npm install @nexla/sdk +``` + +## Authentication + +### Service key (recommended) + +```ts +import { NexlaClient } from "@nexla/sdk"; + +const client = new NexlaClient({ serviceKey: process.env.NEXLA_SERVICE_KEY }); +``` + +### Access token + +```ts +import { NexlaClient } from "@nexla/sdk"; + +const client = new NexlaClient({ accessToken: process.env.NEXLA_ACCESS_TOKEN }); +``` + +Supported env vars: + +- `NEXLA_SERVICE_KEY` +- `NEXLA_ACCESS_TOKEN` +- `NEXLA_API_URL` + +## Quick Start + +```ts +import { NexlaClient } from "@nexla/sdk"; + +const client = new NexlaClient({ serviceKey: process.env.NEXLA_SERVICE_KEY }); + +const flows = await client.flows.list(); +const activated = await client.flows.flow_activate_with_flow_id({ + params: { path: { flow_id: 123 } } +}); +``` + +## Raw OpenAPI Access + +Use the raw client when you need direct path-level access: + +```ts +const { data } = await client.raw.GET("/flows", { + params: { query: { page: 1 } } +}); +``` + +## Webhooks + +```ts +import { WebhooksClient } from "@nexla/sdk"; + +const webhooks = new WebhooksClient({ apiKey: process.env.NEXLA_WEBHOOK_API_KEY }); +await webhooks.sendOneRecord("https://api.nexla.com/webhook/abc", { id: 1 }); +``` + +## SDK Configuration + +```ts +const client = new NexlaClient({ + serviceKey: process.env.NEXLA_SERVICE_KEY, + baseUrl: "https://dataops.nexla.io/nexla-api", + apiVersion: "v1", + retry: { maxRetries: 3 }, + userAgent: "my-app/1.2.3" +}); +``` + +## Documentation Map + +- [Architecture](../../docs/ts-sdk/architecture.md) +- [API coverage process](../../docs/ts-sdk/api-coverage.md) +- [Python to TS migration guide](../../docs/ts-sdk/migration-guide.md) +- [Generated parity matrix](../../docs/ts-sdk/parity-matrix.md) +- [Integration test policy (non-blocking)](../../docs/ts-sdk/integration-tests.md) + +## Refreshing Generated Coverage Docs + +From repository root: + +```bash +pnpm -C packages/ts-sdk gen +node packages/ts-sdk/scripts/generate-parity-matrix.mjs +``` + +This regenerates TS OpenAPI artifacts and refreshes `docs/ts-sdk/parity-matrix.md`. + +## License + +MIT diff --git a/packages/ts-sdk/coverage/lcov-report/base.css b/packages/ts-sdk/coverage/lcov-report/base.css new file mode 100644 index 0000000..f418035 --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/base.css @@ -0,0 +1,224 @@ +body, html { + margin:0; padding: 0; + height: 100%; +} +body { + font-family: Helvetica Neue, Helvetica, Arial; + font-size: 14px; + color:#333; +} +.small { font-size: 12px; } +*, *:after, *:before { + -webkit-box-sizing:border-box; + -moz-box-sizing:border-box; + box-sizing:border-box; + } +h1 { font-size: 20px; margin: 0;} +h2 { font-size: 14px; } +pre { + font: 12px/1.4 Consolas, "Liberation Mono", Menlo, Courier, monospace; + margin: 0; + padding: 0; + -moz-tab-size: 2; + -o-tab-size: 2; + tab-size: 2; +} +a { color:#0074D9; text-decoration:none; } +a:hover { text-decoration:underline; } +.strong { font-weight: bold; } +.space-top1 { padding: 10px 0 0 0; } +.pad2y { padding: 20px 0; } +.pad1y { padding: 10px 0; } +.pad2x { padding: 0 20px; } +.pad2 { padding: 20px; } +.pad1 { padding: 10px; } +.space-left2 { padding-left:55px; } +.space-right2 { padding-right:20px; } +.center { text-align:center; } +.clearfix { display:block; } +.clearfix:after { + content:''; + display:block; + height:0; + clear:both; + visibility:hidden; + } +.fl { float: left; } +@media only screen and (max-width:640px) { + .col3 { width:100%; max-width:100%; } + .hide-mobile { display:none!important; } +} + +.quiet { + color: #7f7f7f; + color: rgba(0,0,0,0.5); +} +.quiet a { opacity: 0.7; } + +.fraction { + font-family: Consolas, 'Liberation Mono', Menlo, Courier, monospace; + font-size: 10px; + color: #555; + background: #E8E8E8; + padding: 4px 5px; + border-radius: 3px; + vertical-align: middle; +} + +div.path a:link, div.path a:visited { color: #333; } +table.coverage { + border-collapse: collapse; + margin: 10px 0 0 0; + padding: 0; +} + +table.coverage td { + margin: 0; + padding: 0; + vertical-align: top; +} +table.coverage td.line-count { + text-align: right; + padding: 0 5px 0 20px; +} +table.coverage td.line-coverage { + text-align: right; + padding-right: 10px; + min-width:20px; +} + +table.coverage td span.cline-any { + display: inline-block; + padding: 0 5px; + width: 100%; +} +.missing-if-branch { + display: inline-block; + margin-right: 5px; + border-radius: 3px; + position: relative; + padding: 0 4px; + background: #333; + color: yellow; +} + +.skip-if-branch { + display: none; + margin-right: 10px; + position: relative; + padding: 0 4px; + background: #ccc; + color: white; +} +.missing-if-branch .typ, .skip-if-branch .typ { + color: inherit !important; +} +.coverage-summary { + border-collapse: collapse; + width: 100%; +} +.coverage-summary tr { border-bottom: 1px solid #bbb; } +.keyline-all { border: 1px solid #ddd; } +.coverage-summary td, .coverage-summary th { padding: 10px; } +.coverage-summary tbody { border: 1px solid #bbb; } +.coverage-summary td { border-right: 1px solid #bbb; } +.coverage-summary td:last-child { border-right: none; } +.coverage-summary th { + text-align: left; + font-weight: normal; + white-space: nowrap; +} +.coverage-summary th.file { border-right: none !important; } +.coverage-summary th.pct { } +.coverage-summary th.pic, +.coverage-summary th.abs, +.coverage-summary td.pct, +.coverage-summary td.abs { text-align: right; } +.coverage-summary td.file { white-space: nowrap; } +.coverage-summary td.pic { min-width: 120px !important; } +.coverage-summary tfoot td { } + +.coverage-summary .sorter { + height: 10px; + width: 7px; + display: inline-block; + margin-left: 0.5em; + background: url(sort-arrow-sprite.png) no-repeat scroll 0 0 transparent; +} +.coverage-summary .sorted .sorter { + background-position: 0 -20px; +} +.coverage-summary .sorted-desc .sorter { + background-position: 0 -10px; +} +.status-line { height: 10px; } +/* yellow */ +.cbranch-no { background: yellow !important; color: #111; } +/* dark red */ +.red.solid, .status-line.low, .low .cover-fill { background:#C21F39 } +.low .chart { border:1px solid #C21F39 } +.highlighted, +.highlighted .cstat-no, .highlighted .fstat-no, .highlighted .cbranch-no{ + background: #C21F39 !important; +} +/* medium red */ +.cstat-no, .fstat-no, .cbranch-no, .cbranch-no { background:#F6C6CE } +/* light red */ +.low, .cline-no { background:#FCE1E5 } +/* light green */ +.high, .cline-yes { background:rgb(230,245,208) } +/* medium green */ +.cstat-yes { background:rgb(161,215,106) } +/* dark green */ +.status-line.high, .high .cover-fill { background:rgb(77,146,33) } +.high .chart { border:1px solid rgb(77,146,33) } +/* dark yellow (gold) */ +.status-line.medium, .medium .cover-fill { background: #f9cd0b; } +.medium .chart { border:1px solid #f9cd0b; } +/* light yellow */ +.medium { background: #fff4c2; } + +.cstat-skip { background: #ddd; color: #111; } +.fstat-skip { background: #ddd; color: #111 !important; } +.cbranch-skip { background: #ddd !important; color: #111; } + +span.cline-neutral { background: #eaeaea; } + +.coverage-summary td.empty { + opacity: .5; + padding-top: 4px; + padding-bottom: 4px; + line-height: 1; + color: #888; +} + +.cover-fill, .cover-empty { + display:inline-block; + height: 12px; +} +.chart { + line-height: 0; +} +.cover-empty { + background: white; +} +.cover-full { + border-right: none !important; +} +pre.prettyprint { + border: none !important; + padding: 0 !important; + margin: 0 !important; +} +.com { color: #999 !important; } +.ignore-none { color: #999; font-weight: normal; } + +.wrapper { + min-height: 100%; + height: auto !important; + height: 100%; + margin: 0 auto -48px; +} +.footer, .push { + height: 48px; +} diff --git a/packages/ts-sdk/coverage/lcov-report/block-navigation.js b/packages/ts-sdk/coverage/lcov-report/block-navigation.js new file mode 100644 index 0000000..530d1ed --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/block-navigation.js @@ -0,0 +1,87 @@ +/* eslint-disable */ +var jumpToCode = (function init() { + // Classes of code we would like to highlight in the file view + var missingCoverageClasses = ['.cbranch-no', '.cstat-no', '.fstat-no']; + + // Elements to highlight in the file listing view + var fileListingElements = ['td.pct.low']; + + // We don't want to select elements that are direct descendants of another match + var notSelector = ':not(' + missingCoverageClasses.join('):not(') + ') > '; // becomes `:not(a):not(b) > ` + + // Selector that finds elements on the page to which we can jump + var selector = + fileListingElements.join(', ') + + ', ' + + notSelector + + missingCoverageClasses.join(', ' + notSelector); // becomes `:not(a):not(b) > a, :not(a):not(b) > b` + + // The NodeList of matching elements + var missingCoverageElements = document.querySelectorAll(selector); + + var currentIndex; + + function toggleClass(index) { + missingCoverageElements + .item(currentIndex) + .classList.remove('highlighted'); + missingCoverageElements.item(index).classList.add('highlighted'); + } + + function makeCurrent(index) { + toggleClass(index); + currentIndex = index; + missingCoverageElements.item(index).scrollIntoView({ + behavior: 'smooth', + block: 'center', + inline: 'center' + }); + } + + function goToPrevious() { + var nextIndex = 0; + if (typeof currentIndex !== 'number' || currentIndex === 0) { + nextIndex = missingCoverageElements.length - 1; + } else if (missingCoverageElements.length > 1) { + nextIndex = currentIndex - 1; + } + + makeCurrent(nextIndex); + } + + function goToNext() { + var nextIndex = 0; + + if ( + typeof currentIndex === 'number' && + currentIndex < missingCoverageElements.length - 1 + ) { + nextIndex = currentIndex + 1; + } + + makeCurrent(nextIndex); + } + + return function jump(event) { + if ( + document.getElementById('fileSearch') === document.activeElement && + document.activeElement != null + ) { + // if we're currently focused on the search input, we don't want to navigate + return; + } + + switch (event.which) { + case 78: // n + case 74: // j + goToNext(); + break; + case 66: // b + case 75: // k + case 80: // p + goToPrevious(); + break; + } + }; +})(); +window.addEventListener('keydown', jumpToCode); diff --git a/packages/ts-sdk/coverage/lcov-report/favicon.png b/packages/ts-sdk/coverage/lcov-report/favicon.png new file mode 100644 index 0000000000000000000000000000000000000000..c1525b811a167671e9de1fa78aab9f5c0b61cef7 GIT binary patch literal 445 zcmV;u0Yd(XP))rP{nL}Ln%S7`m{0DjX9TLF* zFCb$4Oi7vyLOydb!7n&^ItCzb-%BoB`=x@N2jll2Nj`kauio%aw_@fe&*}LqlFT43 z8doAAe))z_%=P%v^@JHp3Hjhj^6*Kr_h|g_Gr?ZAa&y>wxHE99Gk>A)2MplWz2xdG zy8VD2J|Uf#EAw*bo5O*PO_}X2Tob{%bUoO2G~T`@%S6qPyc}VkhV}UifBuRk>%5v( z)x7B{I~z*k<7dv#5tC+m{km(D087J4O%+<<;K|qwefb6@GSX45wCK}Sn*> + + + + Code coverage report for All files + + + + + + + + + +
+
+

All files

+
+ +
+ 84.78% + Statements + 468/552 +
+ + +
+ 71.02% + Branches + 125/176 +
+ + +
+ 82.45% + Functions + 47/57 +
+ + +
+ 84.78% + Lines + 468/552 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
src +
+
93.47%86/9291.66%11/1283.33%10/1293.47%86/92
src/auth +
+
89.14%115/12968.18%30/4492.85%13/1489.14%115/129
src/client +
+
81.41%184/22673.03%65/8987.5%14/1681.41%184/226
src/resources +
+
69.69%23/3380%4/550%4/869.69%23/33
src/webhooks +
+
83.33%60/7257.69%15/2685.71%6/783.33%60/72
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov-report/prettify.css b/packages/ts-sdk/coverage/lcov-report/prettify.css new file mode 100644 index 0000000..b317a7c --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/prettify.css @@ -0,0 +1 @@ +.pln{color:#000}@media screen{.str{color:#080}.kwd{color:#008}.com{color:#800}.typ{color:#606}.lit{color:#066}.pun,.opn,.clo{color:#660}.tag{color:#008}.atn{color:#606}.atv{color:#080}.dec,.var{color:#606}.fun{color:red}}@media print,projection{.str{color:#060}.kwd{color:#006;font-weight:bold}.com{color:#600;font-style:italic}.typ{color:#404;font-weight:bold}.lit{color:#044}.pun,.opn,.clo{color:#440}.tag{color:#006;font-weight:bold}.atn{color:#404}.atv{color:#060}}pre.prettyprint{padding:2px;border:1px solid #888}ol.linenums{margin-top:0;margin-bottom:0}li.L0,li.L1,li.L2,li.L3,li.L5,li.L6,li.L7,li.L8{list-style-type:none}li.L1,li.L3,li.L5,li.L7,li.L9{background:#eee} diff --git a/packages/ts-sdk/coverage/lcov-report/prettify.js b/packages/ts-sdk/coverage/lcov-report/prettify.js new file mode 100644 index 0000000..b322523 --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/prettify.js @@ -0,0 +1,2 @@ +/* eslint-disable */ +window.PR_SHOULD_USE_CONTINUATION=true;(function(){var h=["break,continue,do,else,for,if,return,while"];var u=[h,"auto,case,char,const,default,double,enum,extern,float,goto,int,long,register,short,signed,sizeof,static,struct,switch,typedef,union,unsigned,void,volatile"];var p=[u,"catch,class,delete,false,import,new,operator,private,protected,public,this,throw,true,try,typeof"];var l=[p,"alignof,align_union,asm,axiom,bool,concept,concept_map,const_cast,constexpr,decltype,dynamic_cast,explicit,export,friend,inline,late_check,mutable,namespace,nullptr,reinterpret_cast,static_assert,static_cast,template,typeid,typename,using,virtual,where"];var x=[p,"abstract,boolean,byte,extends,final,finally,implements,import,instanceof,null,native,package,strictfp,super,synchronized,throws,transient"];var R=[x,"as,base,by,checked,decimal,delegate,descending,dynamic,event,fixed,foreach,from,group,implicit,in,interface,internal,into,is,lock,object,out,override,orderby,params,partial,readonly,ref,sbyte,sealed,stackalloc,string,select,uint,ulong,unchecked,unsafe,ushort,var"];var r="all,and,by,catch,class,else,extends,false,finally,for,if,in,is,isnt,loop,new,no,not,null,of,off,on,or,return,super,then,true,try,unless,until,when,while,yes";var w=[p,"debugger,eval,export,function,get,null,set,undefined,var,with,Infinity,NaN"];var s="caller,delete,die,do,dump,elsif,eval,exit,foreach,for,goto,if,import,last,local,my,next,no,our,print,package,redo,require,sub,undef,unless,until,use,wantarray,while,BEGIN,END";var I=[h,"and,as,assert,class,def,del,elif,except,exec,finally,from,global,import,in,is,lambda,nonlocal,not,or,pass,print,raise,try,with,yield,False,True,None"];var f=[h,"alias,and,begin,case,class,def,defined,elsif,end,ensure,false,in,module,next,nil,not,or,redo,rescue,retry,self,super,then,true,undef,unless,until,when,yield,BEGIN,END"];var H=[h,"case,done,elif,esac,eval,fi,function,in,local,set,then,until"];var A=[l,R,w,s+I,f,H];var e=/^(DIR|FILE|vector|(de|priority_)?queue|list|stack|(const_)?iterator|(multi)?(set|map)|bitset|u?(int|float)\d*)/;var C="str";var z="kwd";var j="com";var O="typ";var G="lit";var L="pun";var F="pln";var m="tag";var E="dec";var J="src";var P="atn";var n="atv";var N="nocode";var M="(?:^^\\.?|[+-]|\\!|\\!=|\\!==|\\#|\\%|\\%=|&|&&|&&=|&=|\\(|\\*|\\*=|\\+=|\\,|\\-=|\\->|\\/|\\/=|:|::|\\;|<|<<|<<=|<=|=|==|===|>|>=|>>|>>=|>>>|>>>=|\\?|\\@|\\[|\\^|\\^=|\\^\\^|\\^\\^=|\\{|\\||\\|=|\\|\\||\\|\\|=|\\~|break|case|continue|delete|do|else|finally|instanceof|return|throw|try|typeof)\\s*";function k(Z){var ad=0;var S=false;var ac=false;for(var V=0,U=Z.length;V122)){if(!(al<65||ag>90)){af.push([Math.max(65,ag)|32,Math.min(al,90)|32])}if(!(al<97||ag>122)){af.push([Math.max(97,ag)&~32,Math.min(al,122)&~32])}}}}af.sort(function(av,au){return(av[0]-au[0])||(au[1]-av[1])});var ai=[];var ap=[NaN,NaN];for(var ar=0;arat[0]){if(at[1]+1>at[0]){an.push("-")}an.push(T(at[1]))}}an.push("]");return an.join("")}function W(al){var aj=al.source.match(new RegExp("(?:\\[(?:[^\\x5C\\x5D]|\\\\[\\s\\S])*\\]|\\\\u[A-Fa-f0-9]{4}|\\\\x[A-Fa-f0-9]{2}|\\\\[0-9]+|\\\\[^ux0-9]|\\(\\?[:!=]|[\\(\\)\\^]|[^\\x5B\\x5C\\(\\)\\^]+)","g"));var ah=aj.length;var an=[];for(var ak=0,am=0;ak=2&&ai==="["){aj[ak]=X(ag)}else{if(ai!=="\\"){aj[ak]=ag.replace(/[a-zA-Z]/g,function(ao){var ap=ao.charCodeAt(0);return"["+String.fromCharCode(ap&~32,ap|32)+"]"})}}}}return aj.join("")}var aa=[];for(var V=0,U=Z.length;V=0;){S[ac.charAt(ae)]=Y}}var af=Y[1];var aa=""+af;if(!ag.hasOwnProperty(aa)){ah.push(af);ag[aa]=null}}ah.push(/[\0-\uffff]/);V=k(ah)})();var X=T.length;var W=function(ah){var Z=ah.sourceCode,Y=ah.basePos;var ad=[Y,F];var af=0;var an=Z.match(V)||[];var aj={};for(var ae=0,aq=an.length;ae=5&&"lang-"===ap.substring(0,5);if(am&&!(ai&&typeof ai[1]==="string")){am=false;ap=J}if(!am){aj[ag]=ap}}var ab=af;af+=ag.length;if(!am){ad.push(Y+ab,ap)}else{var al=ai[1];var ak=ag.indexOf(al);var ac=ak+al.length;if(ai[2]){ac=ag.length-ai[2].length;ak=ac-al.length}var ar=ap.substring(5);B(Y+ab,ag.substring(0,ak),W,ad);B(Y+ab+ak,al,q(ar,al),ad);B(Y+ab+ac,ag.substring(ac),W,ad)}}ah.decorations=ad};return W}function i(T){var W=[],S=[];if(T.tripleQuotedStrings){W.push([C,/^(?:\'\'\'(?:[^\'\\]|\\[\s\S]|\'{1,2}(?=[^\']))*(?:\'\'\'|$)|\"\"\"(?:[^\"\\]|\\[\s\S]|\"{1,2}(?=[^\"]))*(?:\"\"\"|$)|\'(?:[^\\\']|\\[\s\S])*(?:\'|$)|\"(?:[^\\\"]|\\[\s\S])*(?:\"|$))/,null,"'\""])}else{if(T.multiLineStrings){W.push([C,/^(?:\'(?:[^\\\']|\\[\s\S])*(?:\'|$)|\"(?:[^\\\"]|\\[\s\S])*(?:\"|$)|\`(?:[^\\\`]|\\[\s\S])*(?:\`|$))/,null,"'\"`"])}else{W.push([C,/^(?:\'(?:[^\\\'\r\n]|\\.)*(?:\'|$)|\"(?:[^\\\"\r\n]|\\.)*(?:\"|$))/,null,"\"'"])}}if(T.verbatimStrings){S.push([C,/^@\"(?:[^\"]|\"\")*(?:\"|$)/,null])}var Y=T.hashComments;if(Y){if(T.cStyleComments){if(Y>1){W.push([j,/^#(?:##(?:[^#]|#(?!##))*(?:###|$)|.*)/,null,"#"])}else{W.push([j,/^#(?:(?:define|elif|else|endif|error|ifdef|include|ifndef|line|pragma|undef|warning)\b|[^\r\n]*)/,null,"#"])}S.push([C,/^<(?:(?:(?:\.\.\/)*|\/?)(?:[\w-]+(?:\/[\w-]+)+)?[\w-]+\.h|[a-z]\w*)>/,null])}else{W.push([j,/^#[^\r\n]*/,null,"#"])}}if(T.cStyleComments){S.push([j,/^\/\/[^\r\n]*/,null]);S.push([j,/^\/\*[\s\S]*?(?:\*\/|$)/,null])}if(T.regexLiterals){var X=("/(?=[^/*])(?:[^/\\x5B\\x5C]|\\x5C[\\s\\S]|\\x5B(?:[^\\x5C\\x5D]|\\x5C[\\s\\S])*(?:\\x5D|$))+/");S.push(["lang-regex",new RegExp("^"+M+"("+X+")")])}var V=T.types;if(V){S.push([O,V])}var U=(""+T.keywords).replace(/^ | $/g,"");if(U.length){S.push([z,new RegExp("^(?:"+U.replace(/[\s,]+/g,"|")+")\\b"),null])}W.push([F,/^\s+/,null," \r\n\t\xA0"]);S.push([G,/^@[a-z_$][a-z_$@0-9]*/i,null],[O,/^(?:[@_]?[A-Z]+[a-z][A-Za-z_$@0-9]*|\w+_t\b)/,null],[F,/^[a-z_$][a-z_$@0-9]*/i,null],[G,new RegExp("^(?:0x[a-f0-9]+|(?:\\d(?:_\\d+)*\\d*(?:\\.\\d*)?|\\.\\d\\+)(?:e[+\\-]?\\d+)?)[a-z]*","i"),null,"0123456789"],[F,/^\\[\s\S]?/,null],[L,/^.[^\s\w\.$@\'\"\`\/\#\\]*/,null]);return g(W,S)}var K=i({keywords:A,hashComments:true,cStyleComments:true,multiLineStrings:true,regexLiterals:true});function Q(V,ag){var U=/(?:^|\s)nocode(?:\s|$)/;var ab=/\r\n?|\n/;var ac=V.ownerDocument;var S;if(V.currentStyle){S=V.currentStyle.whiteSpace}else{if(window.getComputedStyle){S=ac.defaultView.getComputedStyle(V,null).getPropertyValue("white-space")}}var Z=S&&"pre"===S.substring(0,3);var af=ac.createElement("LI");while(V.firstChild){af.appendChild(V.firstChild)}var W=[af];function ae(al){switch(al.nodeType){case 1:if(U.test(al.className)){break}if("BR"===al.nodeName){ad(al);if(al.parentNode){al.parentNode.removeChild(al)}}else{for(var an=al.firstChild;an;an=an.nextSibling){ae(an)}}break;case 3:case 4:if(Z){var am=al.nodeValue;var aj=am.match(ab);if(aj){var ai=am.substring(0,aj.index);al.nodeValue=ai;var ah=am.substring(aj.index+aj[0].length);if(ah){var ak=al.parentNode;ak.insertBefore(ac.createTextNode(ah),al.nextSibling)}ad(al);if(!ai){al.parentNode.removeChild(al)}}}break}}function ad(ak){while(!ak.nextSibling){ak=ak.parentNode;if(!ak){return}}function ai(al,ar){var aq=ar?al.cloneNode(false):al;var ao=al.parentNode;if(ao){var ap=ai(ao,1);var an=al.nextSibling;ap.appendChild(aq);for(var am=an;am;am=an){an=am.nextSibling;ap.appendChild(am)}}return aq}var ah=ai(ak.nextSibling,0);for(var aj;(aj=ah.parentNode)&&aj.nodeType===1;){ah=aj}W.push(ah)}for(var Y=0;Y=S){ah+=2}if(V>=ap){Z+=2}}}var t={};function c(U,V){for(var S=V.length;--S>=0;){var T=V[S];if(!t.hasOwnProperty(T)){t[T]=U}else{if(window.console){console.warn("cannot override language handler %s",T)}}}}function q(T,S){if(!(T&&t.hasOwnProperty(T))){T=/^\s*]*(?:>|$)/],[j,/^<\!--[\s\S]*?(?:-\->|$)/],["lang-",/^<\?([\s\S]+?)(?:\?>|$)/],["lang-",/^<%([\s\S]+?)(?:%>|$)/],[L,/^(?:<[%?]|[%?]>)/],["lang-",/^]*>([\s\S]+?)<\/xmp\b[^>]*>/i],["lang-js",/^]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\s\S]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i]]),["default-markup","htm","html","mxml","xhtml","xml","xsl"]);c(g([[F,/^[\s]+/,null," \t\r\n"],[n,/^(?:\"[^\"]*\"?|\'[^\']*\'?)/,null,"\"'"]],[[m,/^^<\/?[a-z](?:[\w.:-]*\w)?|\/?>$/i],[P,/^(?!style[\s=]|on)[a-z](?:[\w:-]*\w)?/i],["lang-uq.val",/^=\s*([^>\'\"\s]*(?:[^>\'\"\s\/]|\/(?=\s)))/],[L,/^[=<>\/]+/],["lang-js",/^on\w+\s*=\s*\"([^\"]+)\"/i],["lang-js",/^on\w+\s*=\s*\'([^\']+)\'/i],["lang-js",/^on\w+\s*=\s*([^\"\'>\s]+)/i],["lang-css",/^style\s*=\s*\"([^\"]+)\"/i],["lang-css",/^style\s*=\s*\'([^\']+)\'/i],["lang-css",/^style\s*=\s*([^\"\'>\s]+)/i]]),["in.tag"]);c(g([],[[n,/^[\s\S]+/]]),["uq.val"]);c(i({keywords:l,hashComments:true,cStyleComments:true,types:e}),["c","cc","cpp","cxx","cyc","m"]);c(i({keywords:"null,true,false"}),["json"]);c(i({keywords:R,hashComments:true,cStyleComments:true,verbatimStrings:true,types:e}),["cs"]);c(i({keywords:x,cStyleComments:true}),["java"]);c(i({keywords:H,hashComments:true,multiLineStrings:true}),["bsh","csh","sh"]);c(i({keywords:I,hashComments:true,multiLineStrings:true,tripleQuotedStrings:true}),["cv","py"]);c(i({keywords:s,hashComments:true,multiLineStrings:true,regexLiterals:true}),["perl","pl","pm"]);c(i({keywords:f,hashComments:true,multiLineStrings:true,regexLiterals:true}),["rb"]);c(i({keywords:w,cStyleComments:true,regexLiterals:true}),["js"]);c(i({keywords:r,hashComments:3,cStyleComments:true,multilineStrings:true,tripleQuotedStrings:true,regexLiterals:true}),["coffee"]);c(g([],[[C,/^[\s\S]+/]]),["regex"]);function d(V){var U=V.langExtension;try{var S=a(V.sourceNode);var T=S.sourceCode;V.sourceCode=T;V.spans=S.spans;V.basePos=0;q(U,T)(V);D(V)}catch(W){if("console" in window){console.log(W&&W.stack?W.stack:W)}}}function y(W,V,U){var S=document.createElement("PRE");S.innerHTML=W;if(U){Q(S,U)}var T={langExtension:V,numberLines:U,sourceNode:S};d(T);return S.innerHTML}function b(ad){function Y(af){return document.getElementsByTagName(af)}var ac=[Y("pre"),Y("code"),Y("xmp")];var T=[];for(var aa=0;aa=0){var ah=ai.match(ab);var am;if(!ah&&(am=o(aj))&&"CODE"===am.tagName){ah=am.className.match(ab)}if(ah){ah=ah[1]}var al=false;for(var ak=aj.parentNode;ak;ak=ak.parentNode){if((ak.tagName==="pre"||ak.tagName==="code"||ak.tagName==="xmp")&&ak.className&&ak.className.indexOf("prettyprint")>=0){al=true;break}}if(!al){var af=aj.className.match(/\blinenums\b(?::(\d+))?/);af=af?af[1]&&af[1].length?+af[1]:true:false;if(af){Q(aj,af)}S={langExtension:ah,sourceNode:aj,numberLines:af};d(S)}}}if(X]*(?:>|$)/],[PR.PR_COMMENT,/^<\!--[\s\S]*?(?:-\->|$)/],[PR.PR_PUNCTUATION,/^(?:<[%?]|[%?]>)/],["lang-",/^<\?([\s\S]+?)(?:\?>|$)/],["lang-",/^<%([\s\S]+?)(?:%>|$)/],["lang-",/^]*>([\s\S]+?)<\/xmp\b[^>]*>/i],["lang-handlebars",/^]*type\s*=\s*['"]?text\/x-handlebars-template['"]?\b[^>]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-js",/^]*>([\s\S]*?)(<\/script\b[^>]*>)/i],["lang-css",/^]*>([\s\S]*?)(<\/style\b[^>]*>)/i],["lang-in.tag",/^(<\/?[a-z][^<>]*>)/i],[PR.PR_DECLARATION,/^{{[#^>/]?\s*[\w.][^}]*}}/],[PR.PR_DECLARATION,/^{{&?\s*[\w.][^}]*}}/],[PR.PR_DECLARATION,/^{{{>?\s*[\w.][^}]*}}}/],[PR.PR_COMMENT,/^{{![^}]*}}/]]),["handlebars","hbs"]);PR.registerLangHandler(PR.createSimpleLexer([[PR.PR_PLAIN,/^[ \t\r\n\f]+/,null," \t\r\n\f"]],[[PR.PR_STRING,/^\"(?:[^\n\r\f\\\"]|\\(?:\r\n?|\n|\f)|\\[\s\S])*\"/,null],[PR.PR_STRING,/^\'(?:[^\n\r\f\\\']|\\(?:\r\n?|\n|\f)|\\[\s\S])*\'/,null],["lang-css-str",/^url\(([^\)\"\']*)\)/i],[PR.PR_KEYWORD,/^(?:url|rgb|\!important|@import|@page|@media|@charset|inherit)(?=[^\-\w]|$)/i,null],["lang-css-kw",/^(-?(?:[_a-z]|(?:\\[0-9a-f]+ ?))(?:[_a-z0-9\-]|\\(?:\\[0-9a-f]+ ?))*)\s*:/i],[PR.PR_COMMENT,/^\/\*[^*]*\*+(?:[^\/*][^*]*\*+)*\//],[PR.PR_COMMENT,/^(?:)/],[PR.PR_LITERAL,/^(?:\d+|\d*\.\d+)(?:%|[a-z]+)?/i],[PR.PR_LITERAL,/^#(?:[0-9a-f]{3}){1,2}/i],[PR.PR_PLAIN,/^-?(?:[_a-z]|(?:\\[\da-f]+ ?))(?:[_a-z\d\-]|\\(?:\\[\da-f]+ ?))*/i],[PR.PR_PUNCTUATION,/^[^\s\w\'\"]+/]]),["css"]);PR.registerLangHandler(PR.createSimpleLexer([],[[PR.PR_KEYWORD,/^-?(?:[_a-z]|(?:\\[\da-f]+ ?))(?:[_a-z\d\-]|\\(?:\\[\da-f]+ ?))*/i]]),["css-kw"]);PR.registerLangHandler(PR.createSimpleLexer([],[[PR.PR_STRING,/^[^\)\"\']+/]]),["css-str"]); diff --git a/packages/ts-sdk/coverage/lcov-report/sort-arrow-sprite.png b/packages/ts-sdk/coverage/lcov-report/sort-arrow-sprite.png new file mode 100644 index 0000000000000000000000000000000000000000..6ed68316eb3f65dec9063332d2f69bf3093bbfab GIT binary patch literal 138 zcmeAS@N?(olHy`uVBq!ia0vp^>_9Bd!3HEZxJ@+%Qh}Z>jv*C{$p!i!8j}?a+@3A= zIAGwzjijN=FBi!|L1t?LM;Q;gkwn>2cAy-KV{dn nf0J1DIvEHQu*n~6U}x}qyky7vi4|9XhBJ7&`njxgN@xNA8m%nc literal 0 HcmV?d00001 diff --git a/packages/ts-sdk/coverage/lcov-report/sorter.js b/packages/ts-sdk/coverage/lcov-report/sorter.js new file mode 100644 index 0000000..4ed70ae --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/sorter.js @@ -0,0 +1,210 @@ +/* eslint-disable */ +var addSorting = (function() { + 'use strict'; + var cols, + currentSort = { + index: 0, + desc: false + }; + + // returns the summary table element + function getTable() { + return document.querySelector('.coverage-summary'); + } + // returns the thead element of the summary table + function getTableHeader() { + return getTable().querySelector('thead tr'); + } + // returns the tbody element of the summary table + function getTableBody() { + return getTable().querySelector('tbody'); + } + // returns the th element for nth column + function getNthColumn(n) { + return getTableHeader().querySelectorAll('th')[n]; + } + + function onFilterInput() { + const searchValue = document.getElementById('fileSearch').value; + const rows = document.getElementsByTagName('tbody')[0].children; + + // Try to create a RegExp from the searchValue. If it fails (invalid regex), + // it will be treated as a plain text search + let searchRegex; + try { + searchRegex = new RegExp(searchValue, 'i'); // 'i' for case-insensitive + } catch (error) { + searchRegex = null; + } + + for (let i = 0; i < rows.length; i++) { + const row = rows[i]; + let isMatch = false; + + if (searchRegex) { + // If a valid regex was created, use it for matching + isMatch = searchRegex.test(row.textContent); + } else { + // Otherwise, fall back to the original plain text search + isMatch = row.textContent + .toLowerCase() + .includes(searchValue.toLowerCase()); + } + + row.style.display = isMatch ? '' : 'none'; + } + } + + // loads the search box + function addSearchBox() { + var template = document.getElementById('filterTemplate'); + var templateClone = template.content.cloneNode(true); + templateClone.getElementById('fileSearch').oninput = onFilterInput; + template.parentElement.appendChild(templateClone); + } + + // loads all columns + function loadColumns() { + var colNodes = getTableHeader().querySelectorAll('th'), + colNode, + cols = [], + col, + i; + + for (i = 0; i < colNodes.length; i += 1) { + colNode = colNodes[i]; + col = { + key: colNode.getAttribute('data-col'), + sortable: !colNode.getAttribute('data-nosort'), + type: colNode.getAttribute('data-type') || 'string' + }; + cols.push(col); + if (col.sortable) { + col.defaultDescSort = col.type === 'number'; + colNode.innerHTML = + colNode.innerHTML + ''; + } + } + return cols; + } + // attaches a data attribute to every tr element with an object + // of data values keyed by column name + function loadRowData(tableRow) { + var tableCols = tableRow.querySelectorAll('td'), + colNode, + col, + data = {}, + i, + val; + for (i = 0; i < tableCols.length; i += 1) { + colNode = tableCols[i]; + col = cols[i]; + val = colNode.getAttribute('data-value'); + if (col.type === 'number') { + val = Number(val); + } + data[col.key] = val; + } + return data; + } + // loads all row data + function loadData() { + var rows = getTableBody().querySelectorAll('tr'), + i; + + for (i = 0; i < rows.length; i += 1) { + rows[i].data = loadRowData(rows[i]); + } + } + // sorts the table using the data for the ith column + function sortByIndex(index, desc) { + var key = cols[index].key, + sorter = function(a, b) { + a = a.data[key]; + b = b.data[key]; + return a < b ? -1 : a > b ? 1 : 0; + }, + finalSorter = sorter, + tableBody = document.querySelector('.coverage-summary tbody'), + rowNodes = tableBody.querySelectorAll('tr'), + rows = [], + i; + + if (desc) { + finalSorter = function(a, b) { + return -1 * sorter(a, b); + }; + } + + for (i = 0; i < rowNodes.length; i += 1) { + rows.push(rowNodes[i]); + tableBody.removeChild(rowNodes[i]); + } + + rows.sort(finalSorter); + + for (i = 0; i < rows.length; i += 1) { + tableBody.appendChild(rows[i]); + } + } + // removes sort indicators for current column being sorted + function removeSortIndicators() { + var col = getNthColumn(currentSort.index), + cls = col.className; + + cls = cls.replace(/ sorted$/, '').replace(/ sorted-desc$/, ''); + col.className = cls; + } + // adds sort indicators for current column being sorted + function addSortIndicators() { + getNthColumn(currentSort.index).className += currentSort.desc + ? ' sorted-desc' + : ' sorted'; + } + // adds event listeners for all sorter widgets + function enableUI() { + var i, + el, + ithSorter = function ithSorter(i) { + var col = cols[i]; + + return function() { + var desc = col.defaultDescSort; + + if (currentSort.index === i) { + desc = !currentSort.desc; + } + sortByIndex(i, desc); + removeSortIndicators(); + currentSort.index = i; + currentSort.desc = desc; + addSortIndicators(); + }; + }; + for (i = 0; i < cols.length; i += 1) { + if (cols[i].sortable) { + // add the click event handler on the th so users + // dont have to click on those tiny arrows + el = getNthColumn(i).querySelector('.sorter').parentElement; + if (el.addEventListener) { + el.addEventListener('click', ithSorter(i)); + } else { + el.attachEvent('onclick', ithSorter(i)); + } + } + } + } + // adds sorting functionality to the UI + return function() { + if (!getTable()) { + return; + } + cols = loadColumns(); + loadData(); + addSearchBox(); + addSortIndicators(); + enableUI(); + }; +})(); + +window.addEventListener('load', addSorting); diff --git a/packages/ts-sdk/coverage/lcov-report/src/auth/access-token.ts.html b/packages/ts-sdk/coverage/lcov-report/src/auth/access-token.ts.html new file mode 100644 index 0000000..567a7e1 --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/src/auth/access-token.ts.html @@ -0,0 +1,163 @@ + + + + + + Code coverage report for src/auth/access-token.ts + + + + + + + + + +
+
+

All files / src/auth access-token.ts

+
+ +
+ 84.21% + Statements + 16/19 +
+ + +
+ 80% + Branches + 4/5 +
+ + +
+ 80% + Functions + 4/5 +
+ + +
+ 84.21% + Lines + 16/19 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+

+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +271x +  +  +1x +10x +10x +  +10x +10x +  +  +10x +10x +  +10x +7x +7x +  +10x +1x +1x +  +10x +  +  +10x + 
import { AuthenticationError } from "../errors.js";
+import type { AuthProvider } from "./types.js";
+ 
+export class AccessTokenAuthProvider implements AuthProvider {
+  readonly isRefreshable = false;
+  private readonly accessToken: string;
+ 
+  constructor(accessToken: string) {
+    if (!accessToken) {
+      throw new AuthenticationError("Access token must be provided");
+    }
+    this.accessToken = accessToken;
+  }
+ 
+  async getAccessToken(): Promise<string> {
+    return this.accessToken;
+  }
+ 
+  async refreshAccessToken(): Promise<void> {
+    throw new AuthenticationError("Direct access tokens cannot be refreshed");
+  }
+ 
+  async logout(): Promise<void> {
+    // No-op for direct tokens.
+  }
+}
+ 
+ +
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov-report/src/auth/index.html b/packages/ts-sdk/coverage/lcov-report/src/auth/index.html new file mode 100644 index 0000000..167b82e --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/src/auth/index.html @@ -0,0 +1,131 @@ + + + + + + Code coverage report for src/auth + + + + + + + + + +
+
+

All files src/auth

+
+ +
+ 89.14% + Statements + 115/129 +
+ + +
+ 68.18% + Branches + 30/44 +
+ + +
+ 92.85% + Functions + 13/14 +
+ + +
+ 89.14% + Lines + 115/129 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
access-token.ts +
+
84.21%16/1980%4/580%4/584.21%16/19
service-key.ts +
+
90%99/11066.66%26/39100%9/990%99/110
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov-report/src/auth/service-key.ts.html b/packages/ts-sdk/coverage/lcov-report/src/auth/service-key.ts.html new file mode 100644 index 0000000..b045101 --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/src/auth/service-key.ts.html @@ -0,0 +1,517 @@ + + + + + + Code coverage report for src/auth/service-key.ts + + + + + + + + + +
+
+

All files / src/auth service-key.ts

+
+ +
+ 90% + Statements + 99/110 +
+ + +
+ 66.66% + Branches + 26/39 +
+ + +
+ 100% + Functions + 9/9 +
+ + +
+ 90% + Lines + 99/110 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+

+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +1451x +  +  +  +  +  +  +  +  +  +  +  +1x +128x +  +128x +128x +128x +128x +128x +128x +  +128x +128x +  +128x +128x +128x +128x +128x +128x +128x +128x +  +128x +131x +128x +131x +1x +1x +  +130x +  +  +  +129x +131x +  +128x +2x +2x +  +128x +1x +  +1x +1x +1x +1x +1x +  +1x +1x +1x +1x +1x +1x +  +1x +1x +1x +1x +1x +  +128x +3x +3x +3x +3x +  +128x +131x +131x +131x +131x +131x +131x +  +131x +131x +131x +131x +  +131x +1x +1x +  +1x +1x +1x +  +  +  +  +130x +131x +1x +1x +  +129x +129x +  +130x +1x +1x +  +131x +131x +131x +131x +128x +  +1x +131x +131x +1x +1x +130x +130x +130x +  +  +131x +  +1x +1x +1x +1x +  +  +1x +  +  +  + 
import { AuthenticationError, NexlaError, isRecord } from "../errors.js";
+import type { AuthProvider } from "./types.js";
+ 
+export interface ServiceKeyAuthOptions {
+  serviceKey: string;
+  baseUrl: string;
+  apiVersion: string;
+  tokenRefreshMargin: number;
+  fetchFn: typeof fetch;
+  userAgent?: string;
+}
+ 
+export class ServiceKeyAuthProvider implements AuthProvider {
+  readonly isRefreshable = true;
+ 
+  private readonly serviceKey: string;
+  private readonly baseUrl: string;
+  private readonly apiVersion: string;
+  private readonly tokenRefreshMargin: number;
+  private readonly fetchFn: typeof fetch;
+  private readonly userAgent: string | undefined;
+ 
+  private accessToken: string | undefined;
+  private tokenExpiry = 0;
+ 
+  constructor(options: ServiceKeyAuthOptions) {
+    this.serviceKey = options.serviceKey;
+    this.baseUrl = options.baseUrl.replace(/\/$/, "");
+    this.apiVersion = options.apiVersion;
+    this.tokenRefreshMargin = options.tokenRefreshMargin;
+    this.fetchFn = options.fetchFn;
+    this.userAgent = options.userAgent;
+  }
+ 
+  async getAccessToken(): Promise<string> {
+    if (!this.accessToken) {
+      await this.obtainSessionToken();
+    } else if (this.isTokenNearExpiry()) {
+      await this.obtainSessionToken();
+    }
+ 
+    if (!this.accessToken) {
+      throw new AuthenticationError("No access token available after authentication");
+    }
+ 
+    return this.accessToken;
+  }
+ 
+  async refreshAccessToken(): Promise<void> {
+    await this.obtainSessionToken();
+  }
+ 
+  async logout(): Promise<void> {
+    if (!this.accessToken) return;
+ 
+    const headers: Record<string, string> = {
+      Accept: `application/vnd.nexla.api.${this.apiVersion}+json`,
+      Authorization: `Bearer ${this.accessToken}`
+    };
+    if (this.userAgent) headers["User-Agent"] = this.userAgent;
+ 
+    try {
+      await this.fetchFn(`${this.baseUrl}/token/logout`, {
+        method: "POST",
+        headers
+      });
+    } catch {
+      // Best-effort logout; ignore errors.
+    } finally {
+      this.accessToken = undefined;
+      this.tokenExpiry = 0;
+    }
+  }
+ 
+  private isTokenNearExpiry(): boolean {
+    if (!this.tokenExpiry) return true;
+    const now = Date.now() / 1000;
+    return this.tokenExpiry - now < this.tokenRefreshMargin;
+  }
+ 
+  private async obtainSessionToken(): Promise<void> {
+    const headers: Record<string, string> = {
+      Authorization: `Basic ${this.serviceKey}`,
+      Accept: `application/vnd.nexla.api.${this.apiVersion}+json`,
+      "Content-Length": "0"
+    };
+    if (this.userAgent) headers["User-Agent"] = this.userAgent;
+ 
+    const response = await this.fetchFn(`${this.baseUrl}/token`, {
+      method: "POST",
+      headers
+    });
+ 
+    if (!response.ok) {
+      const errorBody = await safeParseJson(response);
+      const message = extractErrorMessage(errorBody, response) ?? "Authentication failed";
+ 
+      if (response.status === 401) {
+        throw new AuthenticationError(message, { statusCode: response.status, response: errorBody });
+      }
+ 
+      throw new NexlaError(message, { statusCode: response.status, response: errorBody });
+    }
+ 
+    const data = await safeParseJson(response);
+    if (!isRecord(data)) {
+      throw new NexlaError("Invalid token response format", { response: data });
+    }
+ 
+    const accessToken = data["access_token"];
+    const expiresIn = data["expires_in"];
+ 
+    if (typeof accessToken !== "string") {
+      throw new NexlaError("Missing access token in response", { response: data });
+    }
+ 
+    const expiresInSeconds = typeof expiresIn === "number" ? expiresIn : 86400;
+    this.accessToken = accessToken;
+    this.tokenExpiry = Date.now() / 1000 + expiresInSeconds;
+  }
+}
+ 
+const safeParseJson = async (response: Response): Promise<unknown> => {
+  const contentType = response.headers.get("content-type")?.toLowerCase() ?? "";
+  if (!contentType.includes("application/json")) {
+    return undefined;
+  }
+  try {
+    return await response.json();
+  } catch {
+    return undefined;
+  }
+};
+ 
+const extractErrorMessage = (body: unknown, response: Response): string | undefined => {
+  if (isRecord(body)) {
+    const message = body["message"];
+    if (typeof message === "string") return message;
+    const error = body["error"];
+    if (typeof error === "string") return error;
+  }
+  if (response.statusText) return response.statusText;
+  return undefined;
+};
+ +
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov-report/src/client/http.ts.html b/packages/ts-sdk/coverage/lcov-report/src/client/http.ts.html new file mode 100644 index 0000000..29ad750 --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/src/client/http.ts.html @@ -0,0 +1,262 @@ + + + + + + Code coverage report for src/client/http.ts + + + + + + + + + +
+
+

All files / src/client http.ts

+
+ +
+ 88.88% + Statements + 40/45 +
+ + +
+ 94.73% + Branches + 18/19 +
+ + +
+ 100% + Functions + 4/4 +
+ + +
+ 88.88% + Lines + 40/45 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+

+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60  +  +  +  +  +  +  +1x +  +1x +134x +134x +134x +134x +  +134x +134x +  +134x +134x +137x +137x +137x +137x +3x +3x +3x +3x +3x +134x +137x +  +  +  +  +  +137x +134x +134x +  +1x +137x +137x +  +1x +3x +3x +1x +1x +1x +1x +1x +  +2x +2x +2x +2x +  +1x + 
export interface RetryOptions {
+  maxRetries?: number;
+  backoffMs?: number;
+  maxBackoffMs?: number;
+  retryOn?: number[];
+}
+ 
+const DEFAULT_RETRY_ON = [429, 502, 503, 504];
+ 
+export const createFetchWithRetry = (baseFetch: typeof fetch, options: RetryOptions = {}): ((input: Request) => Promise<Response>) => {
+  const maxRetries = options.maxRetries ?? 3;
+  const backoffMs = options.backoffMs ?? 300;
+  const maxBackoffMs = options.maxBackoffMs ?? 3000;
+  const retryOn = options.retryOn ?? DEFAULT_RETRY_ON;
+ 
+  return async (input: Request): Promise<Response> => {
+    const original = input;
+ 
+    let attempt = 0;
+    while (true) {
+      const request = attempt === 0 ? original : original.clone();
+      try {
+        const response = await baseFetch(request);
+        if (shouldRetryResponse(response.status, retryOn) && attempt < maxRetries) {
+          const delay = computeRetryDelay(backoffMs, maxBackoffMs, attempt, response);
+          await sleep(delay);
+          attempt += 1;
+          continue;
+        }
+        return response;
+      } catch (error) {
+        if (attempt >= maxRetries) throw error;
+        const delay = computeRetryDelay(backoffMs, maxBackoffMs, attempt);
+        await sleep(delay);
+        attempt += 1;
+      }
+    }
+  };
+};
+ 
+const shouldRetryResponse = (status: number, retryOn: number[]): boolean => {
+  return retryOn.includes(status);
+};
+ 
+const computeRetryDelay = (base: number, max: number, attempt: number, response?: Response): number => {
+  const retryAfter = response?.headers.get("retry-after");
+  if (retryAfter) {
+    const parsed = Number(retryAfter);
+    if (!Number.isNaN(parsed)) {
+      return Math.min(parsed * 1000, max);
+    }
+  }
+ 
+  const jitter = Math.random() * 100;
+  const delay = Math.min(base * 2 ** attempt + jitter, max);
+  return delay;
+};
+ 
+const sleep = (ms: number): Promise<void> => new Promise((resolve) => setTimeout(resolve, ms));
+ 
+ +
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov-report/src/client/index.html b/packages/ts-sdk/coverage/lcov-report/src/client/index.html new file mode 100644 index 0000000..edd4eb7 --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/src/client/index.html @@ -0,0 +1,146 @@ + + + + + + Code coverage report for src/client + + + + + + + + + +
+
+

All files src/client

+
+ +
+ 81.41% + Statements + 184/226 +
+ + +
+ 73.03% + Branches + 65/89 +
+ + +
+ 87.5% + Functions + 14/16 +
+ + +
+ 81.41% + Lines + 184/226 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
http.ts +
+
88.88%40/4594.73%18/19100%4/488.88%40/45
nexla-client.ts +
+
88.88%144/16268.11%47/6990.9%10/1188.88%144/162
operation-types.ts +
+
0%0/190%0/10%0/10%0/19
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov-report/src/client/nexla-client.ts.html b/packages/ts-sdk/coverage/lcov-report/src/client/nexla-client.ts.html new file mode 100644 index 0000000..aca2e76 --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/src/client/nexla-client.ts.html @@ -0,0 +1,715 @@ + + + + + + Code coverage report for src/client/nexla-client.ts + + + + + + + + + +
+
+

All files / src/client nexla-client.ts

+
+ +
+ 88.88% + Statements + 144/162 +
+ + +
+ 68.11% + Branches + 47/69 +
+ + +
+ 90.9% + Functions + 10/11 +
+ + +
+ 88.88% + Lines + 144/162 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+

+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115 +116 +117 +118 +119 +120 +121 +122 +123 +124 +125 +126 +127 +128 +129 +130 +131 +132 +133 +134 +135 +136 +137 +138 +139 +140 +141 +142 +143 +144 +145 +146 +147 +148 +149 +150 +151 +152 +153 +154 +155 +156 +157 +158 +159 +160 +161 +162 +163 +164 +165 +166 +167 +168 +169 +170 +171 +172 +173 +174 +175 +176 +177 +178 +179 +180 +181 +182 +183 +184 +185 +186 +187 +188 +189 +190 +191 +192 +193 +194 +195 +196 +197 +198 +199 +200 +201 +202 +203 +204 +205 +206 +207 +208 +209 +210 +2111x +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +1x +131x +131x +131x +131x +131x +131x +  +  +131x +131x +131x +  +131x +  +  +  +  +131x +  +  +  +131x +131x +131x +  +131x +131x +  +131x +123x +123x +123x +123x +123x +123x +123x +123x +123x +123x +123x +131x +8x +8x +  +131x +  +131x +131x +131x +131x +131x +  +131x +131x +  +  +131x +1x +1x +131x +130x +130x +130x +  +131x +131x +131x +  +131x +  +  +  +131x +131x +  +131x +130x +130x +130x +130x +130x +131x +131x +131x +22x +22x +109x +131x +  +130x +130x +130x +22x +2x +1x +1x +20x +20x +130x +  +131x +122x +122x +122x +122x +122x +122x +106x +122x +  +131x +  +  +  +131x +131x +131x +131x +131x +131x +131x +65x +131x +16x +131x +38x +131x +  +131x +12x +131x +  +131x +  +131x +  +131x +  +131x +131x +  +131x +22x +22x +  +22x +3x +3x +21x +13x +13x +22x +3x +3x +5x +1x +1x +1x +2x +2x +2x +  +  +22x +131x +  +1x +22x +21x +21x +1x +1x +21x +1x +1x +1x +  +1x +1x +1x +1x +1x +  +  + 
import createClient, { type Client } from "openapi-fetch";
+import type { HttpMethod, PathsWithMethod } from "openapi-typescript-helpers";
+import type { paths } from "../generated/schema.js";
+import { AuthenticationError, NexlaError, NotFoundError, RateLimitError, ServerError, ValidationError, isRecord } from "../errors.js";
+import { AccessTokenAuthProvider } from "../auth/access-token.js";
+import { ServiceKeyAuthProvider } from "../auth/service-key.js";
+import type { AuthProvider } from "../auth/types.js";
+import { createFetchWithRetry, type RetryOptions } from "./http.js";
+import type { RequestOptions } from "./types.js";
+import type { OperationData, OperationId, OperationInit } from "./operation-types.js";
+import { createGeneratedResources, type GeneratedResourceClients } from "../resources/generated/index.js";
+import { WebhooksClient } from "../webhooks/index.js";
+ 
+export interface NexlaClientOptions {
+  serviceKey?: string;
+  accessToken?: string;
+  baseUrl?: string;
+  apiVersion?: string;
+  tokenRefreshMargin?: number;
+  retry?: RetryOptions;
+  fetch?: typeof fetch;
+  userAgent?: string;
+  webhookApiKey?: string;
+}
+ 
+export class NexlaClient {
+  readonly raw: Client<paths>;
+  readonly baseUrl: string;
+  readonly apiVersion: string;
+  readonly acceptHeader: string;
+  readonly authProvider: AuthProvider;
+  readonly webhooks?: WebhooksClient;
+ 
+ 
+  constructor(options: NexlaClientOptions = {}) {
+    const serviceKey = options.serviceKey ?? process.env.NEXLA_SERVICE_KEY;
+    const accessToken = options.accessToken ?? process.env.NEXLA_ACCESS_TOKEN;
+ 
+    if (!serviceKey && !accessToken) {
+      throw new NexlaError(
+        "Either serviceKey or accessToken must be provided (or set NEXLA_SERVICE_KEY/NEXLA_ACCESS_TOKEN)."
+      );
+    }
+    if (serviceKey && accessToken) {
+      throw new NexlaError("Cannot provide both serviceKey and accessToken.");
+    }
+ 
+    this.baseUrl = (options.baseUrl ?? process.env.NEXLA_API_URL ?? "https://dataops.nexla.io/nexla-api").replace(/\/$/, "");
+    this.apiVersion = options.apiVersion ?? "v1";
+    this.acceptHeader = `application/vnd.nexla.api.${this.apiVersion}+json`;
+ 
+    const fetchImpl = options.fetch ?? globalThis.fetch;
+    const fetchWithRetry = createFetchWithRetry(fetchImpl, options.retry);
+ 
+    if (serviceKey) {
+      const authOptions = {
+        serviceKey,
+        baseUrl: this.baseUrl,
+        apiVersion: this.apiVersion,
+        tokenRefreshMargin: options.tokenRefreshMargin ?? 3600,
+        fetchFn: fetchImpl
+      } as const;
+      const provider = new ServiceKeyAuthProvider(
+        options.userAgent ? { ...authOptions, userAgent: options.userAgent } : authOptions
+      );
+      this.authProvider = provider;
+    } else {
+      this.authProvider = new AccessTokenAuthProvider(accessToken ?? "");
+    }
+ 
+    this.raw = createClient<paths>({ baseUrl: this.baseUrl, fetch: fetchWithRetry });
+ 
+    this.raw.use({
+      onRequest: async ({ request }) => {
+        const headers = new Headers(request.headers);
+        const skipAuth = headers.get("x-nexla-skip-auth") === "true";
+        headers.delete("x-nexla-skip-auth");
+ 
+        if (!headers.has("Accept")) headers.set("Accept", this.acceptHeader);
+        if (!headers.has("Content-Type") && request.method !== "GET" && request.method !== "HEAD") {
+          headers.set("Content-Type", "application/json");
+        }
+        if (options.userAgent) {
+          headers.set("User-Agent", options.userAgent);
+        }
+        if (!skipAuth) {
+          const token = await this.authProvider.getAccessToken();
+          headers.set("Authorization", `Bearer ${token}`);
+        }
+ 
+        return new Request(request, { headers });
+      }
+    });
+ 
+    if (options.webhookApiKey) {
+      this.webhooks = new WebhooksClient({ apiKey: options.webhookApiKey, fetch: fetchImpl });
+    }
+ 
+    Object.assign(this, createGeneratedResources(this));
+  }
+ 
+  async request<Method extends HttpMethod, Path extends PathsWithMethod<paths, Method>>(
+    method: Method,
+    path: Path,
+    init?: RequestOptions
+  ): Promise<unknown> {
+    const execute = async (): Promise<unknown> => {
+      const result = await this.invoke(method, path, init);
+      const response = result as { data?: unknown; error?: unknown; response?: Response };
+      if (response.error) {
+        throw this.mapError(response.error, response.response ?? new Response(null, { status: 500 }));
+      }
+      return response.data as unknown;
+    };
+ 
+    try {
+      return await execute();
+    } catch (error) {
+      if (error instanceof AuthenticationError && this.authProvider.isRefreshable) {
+        await this.authProvider.refreshAccessToken();
+        return await execute();
+      }
+      throw error;
+    }
+  }
+ 
+  async requestOperation<OpId extends OperationId>(
+    _operationId: OpId,
+    method: HttpMethod,
+    path: string,
+    init?: OperationInit<OpId>
+  ): Promise<OperationData<OpId>> {
+    const data = await this.request(method, path as PathsWithMethod<paths, HttpMethod>, init as RequestOptions);
+    return data as OperationData<OpId>;
+  }
+ 
+  async logout(): Promise<void> {
+    await this.authProvider.logout();
+  }
+ 
+  private invoke<Method extends HttpMethod, Path extends PathsWithMethod<paths, Method>>(
+    method: Method,
+    path: Path,
+    init?: RequestOptions
+  ) {
+    switch (method) {
+      case "get":
+        return this.raw.GET(path as never, init as never);
+      case "post":
+        return this.raw.POST(path as never, init as never);
+      case "put":
+        return this.raw.PUT(path as never, init as never);
+      case "patch":
+        return this.raw.PATCH(path as never, init as never);
+      case "delete":
+        return this.raw.DELETE(path as never, init as never);
+      case "options":
+        return this.raw.OPTIONS(path as never, init as never);
+      case "head":
+        return this.raw.HEAD(path as never, init as never);
+      case "trace":
+        return this.raw.TRACE(path as never, init as never);
+      default:
+        throw new NexlaError(`Unsupported HTTP method: ${method}`);
+    }
+  }
+ 
+  private mapError(errorBody: unknown, response: Response): NexlaError {
+    const message = extractErrorMessage(errorBody, response) ?? `Request failed with status ${response.status}`;
+    const options = { statusCode: response.status, response: errorBody };
+ 
+    if (response.status === 401 || response.status === 403) {
+      return new AuthenticationError(message, options);
+    }
+    if (response.status === 404) {
+      return new NotFoundError(message, options);
+    }
+    if (response.status === 422 || response.status === 400) {
+      return new ValidationError(message, options);
+    }
+    if (response.status === 429) {
+      const retryAfter = parseRetryAfter(response.headers.get("retry-after"));
+      return new RateLimitError(message, retryAfter === undefined ? options : { ...options, retryAfter });
+    }
+    if (response.status >= 500) {
+      return new ServerError(message, options);
+    }
+ 
+    return new NexlaError(message, options);
+  }
+}
+ 
+const extractErrorMessage = (body: unknown, response: Response): string | undefined => {
+  if (isRecord(body)) {
+    const message = body["message"];
+    if (typeof message === "string") return message;
+    const error = body["error"];
+    if (typeof error === "string") return error;
+  }
+  if (response.statusText) return response.statusText;
+  return undefined;
+};
+ 
+const parseRetryAfter = (value: string | null): number | undefined => {
+  if (!value) return undefined;
+  const parsed = Number(value);
+  return Number.isNaN(parsed) ? undefined : parsed;
+};
+ 
+export interface NexlaClient extends GeneratedResourceClients {}
+ 
+ +
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov-report/src/client/operation-types.ts.html b/packages/ts-sdk/coverage/lcov-report/src/client/operation-types.ts.html new file mode 100644 index 0000000..33afa21 --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/src/client/operation-types.ts.html @@ -0,0 +1,142 @@ + + + + + + Code coverage report for src/client/operation-types.ts + + + + + + + + + +
+
+

All files / src/client operation-types.ts

+
+ +
+ 0% + Statements + 0/19 +
+ + +
+ 0% + Branches + 0/1 +
+ + +
+ 0% + Functions + 0/1 +
+ + +
+ 0% + Lines + 0/19 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+

+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  + 
import type { FetchResponse, RequestOptions } from "openapi-fetch";
+import type { MediaType } from "openapi-typescript-helpers";
+import type { operations } from "../generated/schema.js";
+
+export type OperationId = keyof operations;
+
+export type OperationInit<OpId extends OperationId> = RequestOptions<operations[OpId]>;
+
+export type OperationResponse<OpId extends OperationId> = FetchResponse<
+  operations[OpId],
+  OperationInit<OpId>,
+  MediaType
+>;
+
+export type OperationData<OpId extends OperationId> = OperationResponse<OpId> extends {
+  data: infer D;
+}
+  ? D
+  : never;
+ +
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov-report/src/errors.ts.html b/packages/ts-sdk/coverage/lcov-report/src/errors.ts.html new file mode 100644 index 0000000..4926cf5 --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/src/errors.ts.html @@ -0,0 +1,427 @@ + + + + + + Code coverage report for src/errors.ts + + + + + + + + + +
+
+

All files / src errors.ts

+
+ +
+ 93.47% + Statements + 86/92 +
+ + +
+ 91.66% + Branches + 11/12 +
+ + +
+ 83.33% + Functions + 10/12 +
+ + +
+ 93.47% + Lines + 86/92 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+

+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +97 +98 +99 +100 +101 +102 +103 +104 +105 +106 +107 +108 +109 +110 +111 +112 +113 +114 +115  +  +1x +29x +29x +29x +29x +29x +29x +29x +29x +29x +  +29x +29x +29x +  +  +  +  +  +  +  +  +  +29x +29x +29x +29x +29x +29x +29x +29x +29x +29x +29x +29x +29x +29x +  +29x +1x +1x +1x +1x +1x +1x +1x +1x +1x +1x +1x +1x +1x +29x +  +1x +1x +5x +5x +5x +1x +  +1x +1x +  +  +  +1x +  +1x +1x +13x +13x +13x +1x +  +1x +1x +3x +3x +3x +1x +  +1x +2x +  +2x +2x +2x +2x +2x +2x +2x +2x +2x +  +1x +1x +2x +2x +2x +1x +  +1x +1x +  +  +  +1x +  +1x +154x +154x + 
export type ErrorDetails = Record<string, unknown>;
+ 
+export class NexlaError extends Error {
+  readonly details: ErrorDetails;
+  readonly operation: string | undefined;
+  readonly resourceType: string | undefined;
+  readonly resourceId: string | undefined;
+  readonly step: string | undefined;
+  readonly context: ErrorDetails;
+  readonly originalError: Error | undefined;
+  readonly statusCode: number | undefined;
+  readonly response: unknown | undefined;
+ 
+  constructor(
+    message: string,
+    options: {
+      details?: ErrorDetails;
+      operation?: string;
+      resourceType?: string;
+      resourceId?: string;
+      step?: string;
+      context?: ErrorDetails;
+      originalError?: Error;
+      statusCode?: number;
+      response?: unknown;
+    } = {}
+  ) {
+    super(message);
+    this.name = "NexlaError";
+    this.details = options.details ?? {};
+    this.operation = options.operation;
+    this.resourceType = options.resourceType;
+    this.resourceId = options.resourceId;
+    this.step = options.step;
+    this.context = options.context ?? {};
+    this.originalError = options.originalError;
+    this.statusCode = options.statusCode;
+    this.response = options.response;
+  }
+ 
+  getErrorSummary(): Record<string, unknown> {
+    return {
+      message: this.message,
+      step: this.step,
+      operation: this.operation,
+      resource_type: this.resourceType,
+      resource_id: this.resourceId,
+      details: this.details,
+      context: this.context,
+      status_code: this.statusCode,
+      response: this.response,
+      original_error: this.originalError?.message
+    };
+  }
+}
+ 
+export class AuthenticationError extends NexlaError {
+  constructor(message = "Authentication failed", options: ConstructorParameters<typeof NexlaError>[1] = {}) {
+    super(message, { operation: "authentication", ...options });
+    this.name = "AuthenticationError";
+  }
+}
+ 
+export class AuthorizationError extends NexlaError {
+  constructor(message = "Authorization failed", options: ConstructorParameters<typeof NexlaError>[1] = {}) {
+    super(message, options);
+    this.name = "AuthorizationError";
+  }
+}
+ 
+export class NotFoundError extends NexlaError {
+  constructor(message = "Resource not found", options: ConstructorParameters<typeof NexlaError>[1] = {}) {
+    super(message, options);
+    this.name = "NotFoundError";
+  }
+}
+ 
+export class ValidationError extends NexlaError {
+  constructor(message = "Validation failed", options: ConstructorParameters<typeof NexlaError>[1] = {}) {
+    super(message, options);
+    this.name = "ValidationError";
+  }
+}
+ 
+export class RateLimitError extends NexlaError {
+  readonly retryAfter: number | undefined;
+ 
+  constructor(
+    message = "Rate limit exceeded",
+    options: ConstructorParameters<typeof NexlaError>[1] & { retryAfter?: number } = {}
+  ) {
+    super(message, options);
+    this.name = "RateLimitError";
+    this.retryAfter = options.retryAfter;
+  }
+}
+ 
+export class ServerError extends NexlaError {
+  constructor(message = "Server error", options: ConstructorParameters<typeof NexlaError>[1] = {}) {
+    super(message, options);
+    this.name = "ServerError";
+  }
+}
+ 
+export class ResourceConflictError extends NexlaError {
+  constructor(message = "Resource conflict", options: ConstructorParameters<typeof NexlaError>[1] = {}) {
+    super(message, options);
+    this.name = "ResourceConflictError";
+  }
+}
+ 
+export const isRecord = (value: unknown): value is Record<string, unknown> => {
+  return typeof value === "object" && value !== null;
+};
+ 
+ +
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov-report/src/index.html b/packages/ts-sdk/coverage/lcov-report/src/index.html new file mode 100644 index 0000000..4694dab --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/src/index.html @@ -0,0 +1,116 @@ + + + + + + Code coverage report for src + + + + + + + + + +
+
+

All files src

+
+ +
+ 93.47% + Statements + 86/92 +
+ + +
+ 91.66% + Branches + 11/12 +
+ + +
+ 83.33% + Functions + 10/12 +
+ + +
+ 93.47% + Lines + 86/92 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
errors.ts +
+
93.47%86/9291.66%11/1283.33%10/1293.47%86/92
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov-report/src/resources/index.html b/packages/ts-sdk/coverage/lcov-report/src/resources/index.html new file mode 100644 index 0000000..c36f30c --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/src/resources/index.html @@ -0,0 +1,116 @@ + + + + + + Code coverage report for src/resources + + + + + + + + + +
+
+

All files src/resources

+
+ +
+ 69.69% + Statements + 23/33 +
+ + +
+ 80% + Branches + 4/5 +
+ + +
+ 50% + Functions + 4/8 +
+ + +
+ 69.69% + Lines + 23/33 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
resource-client.ts +
+
69.69%23/3380%4/550%4/869.69%23/33
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov-report/src/resources/resource-client.ts.html b/packages/ts-sdk/coverage/lcov-report/src/resources/resource-client.ts.html new file mode 100644 index 0000000..af5f1aa --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/src/resources/resource-client.ts.html @@ -0,0 +1,259 @@ + + + + + + Code coverage report for src/resources/resource-client.ts + + + + + + + + + +
+
+

All files / src/resources resource-client.ts

+
+ +
+ 69.69% + Statements + 23/33 +
+ + +
+ 80% + Branches + 4/5 +
+ + +
+ 50% + Functions + 4/8 +
+ + +
+ 69.69% + Lines + 23/33 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+

+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +591x +  +  +  +  +  +  +  +  +  +  +  +  +  +  +  +1x +1x +1x +  +1x +1x +1x +1x +  +1x +  +  +  +1x +  +  +  +1x +1x +1x +  +1x +  +  +  +1x +  +  +  +1x +1x +  +1x +  +  +  +1x +1x +  +1x +1x +1x + 
import type { HttpMethod, PathsWithMethod } from "openapi-typescript-helpers";
+import type { paths } from "../generated/schema.js";
+import { resourceMap } from "../generated/resource-map.js";
+import type { NexlaClient } from "../client/nexla-client.js";
+import { NexlaError } from "../errors.js";
+import type { RequestOptions } from "../client/types.js";
+ 
+export type ResourceKey = keyof typeof resourceMap;
+ 
+type ResourceEntry = {
+  path: string;
+  method: string;
+};
+ 
+type ResourceAction = "list" | "get" | "create" | "update" | "delete";
+ 
+export class ResourceClient {
+  private readonly client: NexlaClient;
+  private readonly key: ResourceKey;
+ 
+  constructor(client: NexlaClient, key: ResourceKey) {
+    this.client = client;
+    this.key = key;
+  }
+ 
+  async list(init?: Record<string, unknown>): Promise<unknown> {
+    return this.call("list", init);
+  }
+ 
+  async get(init?: Record<string, unknown>): Promise<unknown> {
+    return this.call("get", init);
+  }
+ 
+  async create(init?: Record<string, unknown>): Promise<unknown> {
+    return this.call("create", init);
+  }
+ 
+  async update(init?: Record<string, unknown>): Promise<unknown> {
+    return this.call("update", init);
+  }
+ 
+  async delete(init?: Record<string, unknown>): Promise<unknown> {
+    return this.call("delete", init);
+  }
+ 
+  private async call(action: ResourceAction, init?: Record<string, unknown>): Promise<unknown> {
+    const entry = (resourceMap[this.key] as Record<string, ResourceEntry | undefined>)[action];
+ 
+    if (!entry) {
+      throw new NexlaError(`Action '${action}' is not available for resource '${this.key}'.`);
+    }
+ 
+    const method = entry.method.toLowerCase() as HttpMethod;
+    const path = entry.path as PathsWithMethod<paths, HttpMethod>;
+ 
+    return this.client.request(method, path, init as RequestOptions);
+  }
+}
+ 
+ +
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov-report/src/webhooks/index.html b/packages/ts-sdk/coverage/lcov-report/src/webhooks/index.html new file mode 100644 index 0000000..b01de5d --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/src/webhooks/index.html @@ -0,0 +1,116 @@ + + + + + + Code coverage report for src/webhooks + + + + + + + + + +
+
+

All files src/webhooks

+
+ +
+ 83.33% + Statements + 60/72 +
+ + +
+ 57.69% + Branches + 15/26 +
+ + +
+ 85.71% + Functions + 6/7 +
+ + +
+ 83.33% + Lines + 60/72 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FileStatementsBranchesFunctionsLines
index.ts +
+
83.33%60/7257.69%15/2685.71%6/783.33%60/72
+
+
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov-report/src/webhooks/index.ts.html b/packages/ts-sdk/coverage/lcov-report/src/webhooks/index.ts.html new file mode 100644 index 0000000..4e3f6db --- /dev/null +++ b/packages/ts-sdk/coverage/lcov-report/src/webhooks/index.ts.html @@ -0,0 +1,373 @@ + + + + + + Code coverage report for src/webhooks/index.ts + + + + + + + + + +
+
+

All files / src/webhooks index.ts

+
+ +
+ 83.33% + Statements + 60/72 +
+ + +
+ 57.69% + Branches + 15/26 +
+ + +
+ 85.71% + Functions + 6/7 +
+ + +
+ 83.33% + Lines + 60/72 +
+ + +
+

+ Press n or j to go to the next uncovered block, b, p or k for the previous block. +

+ +
+
+

+
1 +2 +3 +4 +5 +6 +7 +8 +9 +10 +11 +12 +13 +14 +15 +16 +17 +18 +19 +20 +21 +22 +23 +24 +25 +26 +27 +28 +29 +30 +31 +32 +33 +34 +35 +36 +37 +38 +39 +40 +41 +42 +43 +44 +45 +46 +47 +48 +49 +50 +51 +52 +53 +54 +55 +56 +57 +58 +59 +60 +61 +62 +63 +64 +65 +66 +67 +68 +69 +70 +71 +72 +73 +74 +75 +76 +77 +78 +79 +80 +81 +82 +83 +84 +85 +86 +87 +88 +89 +90 +91 +92 +93 +94 +95 +96 +971x +  +  +  +  +  +  +  +  +  +  +  +  +  +1x +3x +3x +  +3x +3x +3x +3x +  +3x +3x +3x +3x +3x +3x +3x +  +3x +  +  +  +  +  +  +  +3x +3x +3x +3x +3x +3x +3x +  +3x +2x +2x +3x +3x +3x +  +3x +3x +1x +1x +  +3x +3x +3x +3x +3x +  +3x +1x +1x +1x +1x +  +2x +3x +3x +  +1x +3x +3x +  +  +3x +3x +3x +  +  +3x +  +1x +1x +1x +1x +1x +1x +1x +  +  + 
import { NexlaError, isRecord } from "../errors.js";
+ 
+export interface WebhookSendOptions {
+  includeHeaders?: boolean;
+  includeUrlParams?: boolean;
+  forceSchemaDetection?: boolean;
+  authMethod?: "query" | "header";
+}
+ 
+export interface WebhooksClientOptions {
+  apiKey: string;
+  fetch?: typeof fetch;
+}
+ 
+export class WebhooksClient {
+  private readonly apiKey: string;
+  private readonly fetchFn: typeof fetch;
+ 
+  constructor(options: WebhooksClientOptions) {
+    this.apiKey = options.apiKey;
+    this.fetchFn = options.fetch ?? globalThis.fetch;
+  }
+ 
+  async sendOneRecord(
+    webhookUrl: string,
+    record: Record<string, unknown>,
+    options?: WebhookSendOptions
+  ): Promise<unknown> {
+    return this.makeRequest(webhookUrl, record, options);
+  }
+ 
+  async sendManyRecords(
+    webhookUrl: string,
+    records: Record<string, unknown>[],
+    options?: WebhookSendOptions
+  ): Promise<unknown> {
+    return this.makeRequest(webhookUrl, records, options);
+  }
+ 
+  private async makeRequest(
+    webhookUrl: string,
+    body: Record<string, unknown> | Record<string, unknown>[],
+    options?: WebhookSendOptions
+  ): Promise<unknown> {
+    const authMethod = options?.authMethod ?? "query";
+    const url = new URL(webhookUrl);
+ 
+    if (authMethod === "query") {
+      url.searchParams.set("api_key", this.apiKey);
+    }
+    if (options?.includeHeaders) url.searchParams.set("include_headers", "true");
+    if (options?.includeUrlParams) url.searchParams.set("include_url_params", "true");
+    if (options?.forceSchemaDetection) url.searchParams.set("force_schema_detection", "true");
+ 
+    const headers: Record<string, string> = { "Content-Type": "application/json" };
+    if (authMethod === "header") {
+      headers["Authorization"] = `Basic ${Buffer.from(this.apiKey).toString("base64")}`;
+    }
+ 
+    const response = await this.fetchFn(url.toString(), {
+      method: "POST",
+      headers,
+      body: JSON.stringify(body)
+    });
+ 
+    if (!response.ok) {
+      const errorBody = await safeParseJson(response);
+      const message = extractErrorMessage(errorBody) ?? response.statusText ?? "Webhook request failed";
+      throw new NexlaError(message, { statusCode: response.status, response: errorBody });
+    }
+ 
+    return safeParseJson(response);
+  }
+}
+ 
+const safeParseJson = async (response: Response): Promise<unknown> => {
+  const contentType = response.headers.get("content-type")?.toLowerCase() ?? "";
+  if (!contentType.includes("application/json")) {
+    return undefined;
+  }
+  try {
+    return await response.json();
+  } catch {
+    return undefined;
+  }
+};
+ 
+const extractErrorMessage = (body: unknown): string | undefined => {
+  if (isRecord(body)) {
+    const message = body["message"];
+    if (typeof message === "string") return message;
+    const error = body["error"];
+    if (typeof error === "string") return error;
+  }
+  return undefined;
+};
+ +
+
+ + + + + + + + \ No newline at end of file diff --git a/packages/ts-sdk/coverage/lcov.info b/packages/ts-sdk/coverage/lcov.info new file mode 100644 index 0000000..72abb54 --- /dev/null +++ b/packages/ts-sdk/coverage/lcov.info @@ -0,0 +1,914 @@ +TN: +SF:src/errors.ts +FN:3, +FN:14,NexlaError +FN:41,getErrorSummary +FN:58,AuthenticationError +FN:65,AuthorizationError +FN:72,NotFoundError +FN:79,ValidationError +FN:85, +FN:88,RateLimitError +FN:99,ServerError +FN:106,ResourceConflictError +FN:112,isRecord +FNF:12 +FNH:10 +FNDA:29, +FNDA:29,NexlaError +FNDA:1,getErrorSummary +FNDA:5,AuthenticationError +FNDA:0,AuthorizationError +FNDA:13,NotFoundError +FNDA:3,ValidationError +FNDA:2, +FNDA:2,RateLimitError +FNDA:2,ServerError +FNDA:0,ResourceConflictError +FNDA:154,isRecord +DA:3,1 +DA:4,29 +DA:5,29 +DA:6,29 +DA:7,29 +DA:8,29 +DA:9,29 +DA:10,29 +DA:11,29 +DA:12,29 +DA:14,29 +DA:15,29 +DA:16,29 +DA:26,29 +DA:27,29 +DA:28,29 +DA:29,29 +DA:30,29 +DA:31,29 +DA:32,29 +DA:33,29 +DA:34,29 +DA:35,29 +DA:36,29 +DA:37,29 +DA:38,29 +DA:39,29 +DA:41,29 +DA:42,1 +DA:43,1 +DA:44,1 +DA:45,1 +DA:46,1 +DA:47,1 +DA:48,1 +DA:49,1 +DA:50,1 +DA:51,1 +DA:52,1 +DA:53,1 +DA:54,1 +DA:55,29 +DA:57,1 +DA:58,1 +DA:59,5 +DA:60,5 +DA:61,5 +DA:62,1 +DA:64,1 +DA:65,1 +DA:66,0 +DA:67,0 +DA:68,0 +DA:69,1 +DA:71,1 +DA:72,1 +DA:73,13 +DA:74,13 +DA:75,13 +DA:76,1 +DA:78,1 +DA:79,1 +DA:80,3 +DA:81,3 +DA:82,3 +DA:83,1 +DA:85,1 +DA:86,2 +DA:88,2 +DA:89,2 +DA:90,2 +DA:91,2 +DA:92,2 +DA:93,2 +DA:94,2 +DA:95,2 +DA:96,2 +DA:98,1 +DA:99,1 +DA:100,2 +DA:101,2 +DA:102,2 +DA:103,1 +DA:105,1 +DA:106,1 +DA:107,0 +DA:108,0 +DA:109,0 +DA:110,1 +DA:112,1 +DA:113,154 +DA:114,154 +LF:92 +LH:86 +BRDA:3,0,0,29 +BRDA:14,1,0,29 +BRDA:41,2,0,1 +BRDA:52,3,0,0 +BRDA:58,4,0,5 +BRDA:72,5,0,13 +BRDA:79,6,0,3 +BRDA:85,7,0,2 +BRDA:88,8,0,2 +BRDA:99,9,0,2 +BRDA:112,10,0,154 +BRDA:113,11,0,152 +BRF:12 +BRH:11 +end_of_record +TN: +SF:src/auth/access-token.ts +FN:4, +FN:8,AccessTokenAuthProvider +FN:15,getAccessToken +FN:19,refreshAccessToken +FN:23,logout +FNF:5 +FNH:4 +FNDA:10, +FNDA:10,AccessTokenAuthProvider +FNDA:7,getAccessToken +FNDA:1,refreshAccessToken +FNDA:0,logout +DA:1,1 +DA:4,1 +DA:5,10 +DA:6,10 +DA:8,10 +DA:9,10 +DA:10,0 +DA:11,0 +DA:12,10 +DA:13,10 +DA:15,10 +DA:16,7 +DA:17,7 +DA:19,10 +DA:20,1 +DA:21,1 +DA:23,10 +DA:25,0 +DA:26,10 +LF:19 +LH:16 +BRDA:4,0,0,10 +BRDA:8,1,0,10 +BRDA:9,2,0,0 +BRDA:15,3,0,7 +BRDA:19,4,0,1 +BRF:5 +BRH:4 +end_of_record +TN: +SF:src/auth/service-key.ts +FN:13, +FN:26,ServiceKeyAuthProvider +FN:35,getAccessToken +FN:49,refreshAccessToken +FN:53,logout +FN:75,isTokenNearExpiry +FN:81,obtainSessionToken +FN:123,safeParseJson +FN:135,extractErrorMessage +FNF:9 +FNH:9 +FNDA:128, +FNDA:128,ServiceKeyAuthProvider +FNDA:131,getAccessToken +FNDA:2,refreshAccessToken +FNDA:1,logout +FNDA:3,isTokenNearExpiry +FNDA:131,obtainSessionToken +FNDA:131,safeParseJson +FNDA:1,extractErrorMessage +DA:1,1 +DA:13,1 +DA:14,128 +DA:16,128 +DA:17,128 +DA:18,128 +DA:19,128 +DA:20,128 +DA:21,128 +DA:23,128 +DA:24,128 +DA:26,128 +DA:27,128 +DA:28,128 +DA:29,128 +DA:30,128 +DA:31,128 +DA:32,128 +DA:33,128 +DA:35,128 +DA:36,131 +DA:37,128 +DA:38,131 +DA:39,1 +DA:40,1 +DA:42,130 +DA:43,0 +DA:44,0 +DA:46,129 +DA:47,131 +DA:49,128 +DA:50,2 +DA:51,2 +DA:53,128 +DA:54,1 +DA:56,1 +DA:57,1 +DA:58,1 +DA:59,1 +DA:60,1 +DA:62,1 +DA:63,1 +DA:64,1 +DA:65,1 +DA:66,1 +DA:67,1 +DA:69,1 +DA:70,1 +DA:71,1 +DA:72,1 +DA:73,1 +DA:75,128 +DA:76,3 +DA:77,3 +DA:78,3 +DA:79,3 +DA:81,128 +DA:82,131 +DA:83,131 +DA:84,131 +DA:85,131 +DA:86,131 +DA:87,131 +DA:89,131 +DA:90,131 +DA:91,131 +DA:92,131 +DA:94,131 +DA:95,1 +DA:96,1 +DA:98,1 +DA:99,1 +DA:100,1 +DA:102,0 +DA:103,0 +DA:105,130 +DA:106,131 +DA:107,1 +DA:108,1 +DA:110,129 +DA:111,129 +DA:113,130 +DA:114,1 +DA:115,1 +DA:117,131 +DA:118,131 +DA:119,131 +DA:120,131 +DA:121,128 +DA:123,1 +DA:124,131 +DA:125,131 +DA:126,1 +DA:127,1 +DA:128,130 +DA:129,130 +DA:130,130 +DA:131,0 +DA:132,0 +DA:133,131 +DA:135,1 +DA:136,1 +DA:137,1 +DA:138,1 +DA:139,0 +DA:140,0 +DA:141,1 +DA:142,0 +DA:143,0 +DA:144,0 +LF:110 +LH:99 +BRDA:13,0,0,128 +BRDA:26,1,0,128 +BRDA:35,2,0,131 +BRDA:36,3,0,128 +BRDA:37,4,0,126 +BRDA:38,5,0,130 +BRDA:38,6,0,3 +BRDA:38,7,0,1 +BRDA:40,8,0,129 +BRDA:42,9,0,0 +BRDA:44,10,0,129 +BRDA:49,11,0,2 +BRDA:50,12,0,1 +BRDA:53,13,0,1 +BRDA:54,14,0,0 +BRDA:60,15,0,0 +BRDA:66,16,0,0 +BRDA:75,17,0,3 +BRDA:76,18,0,0 +BRDA:81,19,0,131 +BRDA:87,20,0,0 +BRDA:94,21,0,1 +BRDA:96,22,0,0 +BRDA:100,23,0,0 +BRDA:103,24,0,130 +BRDA:106,25,0,1 +BRDA:108,26,0,130 +BRDA:108,27,0,129 +BRDA:113,28,0,1 +BRDA:115,29,0,128 +BRDA:117,30,0,0 +BRDA:123,31,0,131 +BRDA:124,32,0,0 +BRDA:125,33,0,1 +BRDA:127,34,0,130 +BRDA:130,35,0,0 +BRDA:135,36,0,1 +BRDA:138,37,0,0 +BRDA:141,38,0,0 +BRF:39 +BRH:26 +end_of_record +TN: +SF:src/client/http.ts +FN:10,createFetchWithRetry +FN:41,shouldRetryResponse +FN:45,computeRetryDelay +FN:59,sleep +FNF:4 +FNH:4 +FNDA:134,createFetchWithRetry +FNDA:137,shouldRetryResponse +FNDA:3,computeRetryDelay +FNDA:3,sleep +DA:8,1 +DA:10,1 +DA:11,134 +DA:12,134 +DA:13,134 +DA:14,134 +DA:16,134 +DA:17,134 +DA:19,134 +DA:20,134 +DA:21,137 +DA:22,137 +DA:23,137 +DA:24,137 +DA:25,3 +DA:26,3 +DA:27,3 +DA:28,3 +DA:29,3 +DA:30,134 +DA:31,137 +DA:32,0 +DA:33,0 +DA:34,0 +DA:35,0 +DA:36,0 +DA:37,137 +DA:38,134 +DA:39,134 +DA:41,1 +DA:42,137 +DA:43,137 +DA:45,1 +DA:46,3 +DA:47,3 +DA:48,1 +DA:49,1 +DA:50,1 +DA:51,1 +DA:52,1 +DA:54,2 +DA:55,2 +DA:56,2 +DA:57,2 +DA:59,1 +LF:45 +LH:40 +BRDA:10,0,0,134 +BRDA:11,1,0,130 +BRDA:12,2,0,131 +BRDA:13,3,0,131 +BRDA:16,4,0,134 +BRDA:20,5,0,137 +BRDA:21,6,0,134 +BRDA:21,7,0,3 +BRDA:24,8,0,4 +BRDA:24,9,0,136 +BRDA:24,10,0,3 +BRDA:29,11,0,134 +BRDA:31,12,0,0 +BRDA:41,13,0,137 +BRDA:45,14,0,3 +BRDA:47,15,0,1 +BRDA:52,16,0,2 +BRDA:59,17,0,3 +BRDA:59,18,0,3 +BRF:19 +BRH:18 +end_of_record +TN: +SF:src/client/nexla-client.ts +FN:26, +FN:35,NexlaClient +FN:74,onRequest +FN:102,request +FN:107,execute +FN:127,requestOperation +FN:137,logout +FN:141,invoke +FN:168,mapError +FN:193,extractErrorMessage +FN:204,parseRetryAfter +FNF:11 +FNH:10 +FNDA:131, +FNDA:131,NexlaClient +FNDA:131,onRequest +FNDA:130,request +FNDA:131,execute +FNDA:122,requestOperation +FNDA:0,logout +FNDA:131,invoke +FNDA:22,mapError +FNDA:22,extractErrorMessage +FNDA:1,parseRetryAfter +DA:1,1 +DA:26,1 +DA:27,131 +DA:28,131 +DA:29,131 +DA:30,131 +DA:31,131 +DA:32,131 +DA:35,131 +DA:36,131 +DA:37,131 +DA:39,131 +DA:40,0 +DA:41,0 +DA:42,0 +DA:43,0 +DA:44,131 +DA:45,0 +DA:46,0 +DA:48,131 +DA:49,131 +DA:50,131 +DA:52,131 +DA:53,131 +DA:55,131 +DA:56,123 +DA:57,123 +DA:58,123 +DA:59,123 +DA:60,123 +DA:61,123 +DA:62,123 +DA:63,123 +DA:64,123 +DA:65,123 +DA:66,123 +DA:67,131 +DA:68,8 +DA:69,8 +DA:71,131 +DA:73,131 +DA:74,131 +DA:75,131 +DA:76,131 +DA:77,131 +DA:79,131 +DA:80,131 +DA:81,0 +DA:82,0 +DA:83,131 +DA:84,1 +DA:85,1 +DA:86,131 +DA:87,130 +DA:88,130 +DA:89,130 +DA:91,131 +DA:92,131 +DA:93,131 +DA:95,131 +DA:96,0 +DA:97,0 +DA:99,131 +DA:100,131 +DA:102,131 +DA:103,130 +DA:104,130 +DA:105,130 +DA:106,130 +DA:107,130 +DA:108,131 +DA:109,131 +DA:110,131 +DA:111,22 +DA:112,22 +DA:113,109 +DA:114,131 +DA:116,130 +DA:117,130 +DA:118,130 +DA:119,22 +DA:120,2 +DA:121,1 +DA:122,1 +DA:123,20 +DA:124,20 +DA:125,130 +DA:127,131 +DA:128,122 +DA:129,122 +DA:130,122 +DA:131,122 +DA:132,122 +DA:133,122 +DA:134,106 +DA:135,122 +DA:137,131 +DA:138,0 +DA:139,0 +DA:141,131 +DA:142,131 +DA:143,131 +DA:144,131 +DA:145,131 +DA:146,131 +DA:147,131 +DA:148,65 +DA:149,131 +DA:150,16 +DA:151,131 +DA:152,38 +DA:153,131 +DA:154,0 +DA:155,131 +DA:156,12 +DA:157,131 +DA:158,0 +DA:159,131 +DA:160,0 +DA:161,131 +DA:162,0 +DA:163,131 +DA:164,0 +DA:165,131 +DA:166,131 +DA:168,131 +DA:169,22 +DA:170,22 +DA:172,22 +DA:173,3 +DA:174,3 +DA:175,21 +DA:176,13 +DA:177,13 +DA:178,22 +DA:179,3 +DA:180,3 +DA:181,5 +DA:182,1 +DA:183,1 +DA:184,1 +DA:185,2 +DA:186,2 +DA:187,2 +DA:189,0 +DA:190,22 +DA:191,131 +DA:193,1 +DA:194,22 +DA:195,21 +DA:196,21 +DA:197,1 +DA:198,1 +DA:199,21 +DA:200,1 +DA:201,1 +DA:202,1 +DA:204,1 +DA:205,1 +DA:206,1 +DA:207,1 +DA:208,1 +LF:162 +LH:144 +BRDA:26,0,0,131 +BRDA:35,1,0,131 +BRDA:36,2,0,8 +BRDA:37,3,0,123 +BRDA:39,4,0,8 +BRDA:39,5,0,0 +BRDA:44,6,0,123 +BRDA:44,7,0,0 +BRDA:48,8,0,0 +BRDA:48,9,0,0 +BRDA:52,10,0,0 +BRDA:55,11,0,123 +BRDA:64,12,0,0 +BRDA:67,13,0,8 +BRDA:68,14,0,0 +BRDA:95,15,0,0 +BRDA:74,16,0,131 +BRDA:80,17,0,0 +BRDA:80,18,0,0 +BRDA:80,19,0,0 +BRDA:83,20,0,1 +BRDA:86,21,0,130 +BRDA:102,22,0,130 +BRDA:118,23,0,22 +BRDA:119,24,0,3 +BRDA:119,25,0,2 +BRDA:120,26,0,1 +BRDA:122,27,0,20 +BRDA:107,28,0,131 +BRDA:110,29,0,22 +BRDA:111,30,0,0 +BRDA:112,31,0,109 +BRDA:127,32,0,122 +BRDA:133,33,0,106 +BRDA:141,34,0,131 +BRDA:147,35,0,65 +BRDA:149,36,0,16 +BRDA:151,37,0,38 +BRDA:153,38,0,0 +BRDA:155,39,0,12 +BRDA:157,40,0,0 +BRDA:159,41,0,0 +BRDA:161,42,0,0 +BRDA:163,43,0,0 +BRDA:168,44,0,22 +BRDA:169,45,0,1 +BRDA:172,46,0,20 +BRDA:172,47,0,21 +BRDA:172,48,0,19 +BRDA:172,49,0,3 +BRDA:175,50,0,13 +BRDA:177,51,0,6 +BRDA:178,52,0,8 +BRDA:178,53,0,3 +BRDA:180,54,0,5 +BRDA:180,55,0,3 +BRDA:181,56,0,1 +BRDA:183,57,0,0 +BRDA:184,58,0,2 +BRDA:187,59,0,0 +BRDA:193,60,0,22 +BRDA:194,61,0,21 +BRDA:196,62,0,20 +BRDA:196,63,0,1 +BRDA:199,64,0,1 +BRDA:200,65,0,0 +BRDA:204,66,0,1 +BRDA:205,67,0,0 +BRDA:207,68,0,0 +BRF:69 +BRH:47 +end_of_record +TN: +SF:src/client/operation-types.ts +FN:1,(empty-report) +FNF:1 +FNH:0 +FNDA:0,(empty-report) +DA:1,0 +DA:2,0 +DA:3,0 +DA:4,0 +DA:5,0 +DA:6,0 +DA:7,0 +DA:8,0 +DA:9,0 +DA:10,0 +DA:11,0 +DA:12,0 +DA:13,0 +DA:14,0 +DA:15,0 +DA:16,0 +DA:17,0 +DA:18,0 +DA:19,0 +LF:19 +LH:0 +BRDA:1,0,0,0 +BRF:1 +BRH:0 +end_of_record +TN: +SF:src/resources/resource-client.ts +FN:17, +FN:21,ResourceClient +FN:26,list +FN:30,get +FN:34,create +FN:38,update +FN:42,delete +FN:46,call +FNF:8 +FNH:4 +FNDA:1, +FNDA:1,ResourceClient +FNDA:0,list +FNDA:0,get +FNDA:1,create +FNDA:0,update +FNDA:0,delete +FNDA:1,call +DA:1,1 +DA:17,1 +DA:18,1 +DA:19,1 +DA:21,1 +DA:22,1 +DA:23,1 +DA:24,1 +DA:26,1 +DA:27,0 +DA:28,0 +DA:30,1 +DA:31,0 +DA:32,0 +DA:34,1 +DA:35,1 +DA:36,1 +DA:38,1 +DA:39,0 +DA:40,0 +DA:42,1 +DA:43,0 +DA:44,0 +DA:46,1 +DA:47,1 +DA:49,1 +DA:50,0 +DA:51,0 +DA:53,1 +DA:54,1 +DA:56,1 +DA:57,1 +DA:58,1 +LF:33 +LH:23 +BRDA:17,0,0,1 +BRDA:21,1,0,1 +BRDA:34,2,0,1 +BRDA:46,3,0,1 +BRDA:49,4,0,0 +BRF:5 +BRH:4 +end_of_record +TN: +SF:src/webhooks/index.ts +FN:15, +FN:19,WebhooksClient +FN:24,sendOneRecord +FN:32,sendManyRecords +FN:40,makeRequest +FN:76,safeParseJson +FN:88,extractErrorMessage +FNF:7 +FNH:6 +FNDA:3, +FNDA:3,WebhooksClient +FNDA:3,sendOneRecord +FNDA:0,sendManyRecords +FNDA:3,makeRequest +FNDA:3,safeParseJson +FNDA:1,extractErrorMessage +DA:1,1 +DA:15,1 +DA:16,3 +DA:17,3 +DA:19,3 +DA:20,3 +DA:21,3 +DA:22,3 +DA:24,3 +DA:25,3 +DA:26,3 +DA:27,3 +DA:28,3 +DA:29,3 +DA:30,3 +DA:32,3 +DA:33,0 +DA:34,0 +DA:35,0 +DA:36,0 +DA:37,0 +DA:38,0 +DA:40,3 +DA:41,3 +DA:42,3 +DA:43,3 +DA:44,3 +DA:45,3 +DA:46,3 +DA:48,3 +DA:49,2 +DA:50,2 +DA:51,3 +DA:52,3 +DA:53,3 +DA:55,3 +DA:56,3 +DA:57,1 +DA:58,1 +DA:60,3 +DA:61,3 +DA:62,3 +DA:63,3 +DA:64,3 +DA:66,3 +DA:67,1 +DA:68,1 +DA:69,1 +DA:70,1 +DA:72,2 +DA:73,3 +DA:74,3 +DA:76,1 +DA:77,3 +DA:78,3 +DA:79,0 +DA:80,0 +DA:81,3 +DA:82,3 +DA:83,3 +DA:84,0 +DA:85,0 +DA:86,3 +DA:88,1 +DA:89,1 +DA:90,1 +DA:91,1 +DA:92,1 +DA:93,1 +DA:94,1 +DA:95,0 +DA:96,0 +LF:72 +LH:60 +BRDA:15,0,0,3 +BRDA:19,1,0,3 +BRDA:21,2,0,0 +BRDA:24,3,0,3 +BRDA:40,4,0,3 +BRDA:45,5,0,1 +BRDA:45,6,0,2 +BRDA:48,7,0,2 +BRDA:51,8,0,1 +BRDA:51,9,0,0 +BRDA:52,10,0,1 +BRDA:52,11,0,0 +BRDA:53,12,0,1 +BRDA:53,13,0,0 +BRDA:56,14,0,1 +BRDA:66,15,0,1 +BRDA:68,16,0,0 +BRDA:68,17,0,0 +BRDA:70,18,0,2 +BRDA:76,19,0,3 +BRDA:77,20,0,0 +BRDA:78,21,0,0 +BRDA:83,22,0,0 +BRDA:88,23,0,1 +BRDA:91,24,0,0 +BRDA:94,25,0,0 +BRF:26 +BRH:15 +end_of_record diff --git a/packages/ts-sdk/eslint.config.js b/packages/ts-sdk/eslint.config.js new file mode 100644 index 0000000..5a230f5 --- /dev/null +++ b/packages/ts-sdk/eslint.config.js @@ -0,0 +1,35 @@ +import tseslint from "@typescript-eslint/eslint-plugin"; +import parser from "@typescript-eslint/parser"; + +export default [ + { + ignores: [ + "coverage/**", + "dist/**", + "src/generated/**", + "src/resources/generated/**", + "scripts/**", + "tsup.config.ts", + "vitest.config.ts" + ] + }, + { + files: ["src/**/*.ts", "tests/**/*.ts"], + languageOptions: { + parser, + parserOptions: { + sourceType: "module", + project: "./tsconfig.json" + } + }, + plugins: { + "@typescript-eslint": tseslint + }, + rules: { + "@typescript-eslint/no-explicit-any": "error", + "@typescript-eslint/consistent-type-imports": ["error", { "prefer": "type-imports" }], + "@typescript-eslint/no-unused-vars": ["error", { "argsIgnorePattern": "^_" }], + "@typescript-eslint/ban-ts-comment": ["error", { "ts-ignore": "allow-with-description" }] + } + } +]; diff --git a/packages/ts-sdk/package.json b/packages/ts-sdk/package.json new file mode 100644 index 0000000..6685f67 --- /dev/null +++ b/packages/ts-sdk/package.json @@ -0,0 +1,54 @@ +{ + "name": "@nexla/sdk", + "version": "0.1.0", + "description": "TypeScript SDK for Nexla", + "type": "module", + "engines": { + "node": ">=18" + }, + "main": "./dist/index.cjs", + "module": "./dist/index.mjs", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "types": "./dist/index.d.ts", + "import": "./dist/index.mjs", + "require": "./dist/index.cjs" + } + }, + "files": [ + "dist", + "README.md" + ], + "scripts": { + "build": "tsup", + "dev": "tsup --watch", + "gen:types": "openapi-typescript ../../plugin-redoc-0.yaml -o src/generated/schema.ts", + "gen:resources": "node ./scripts/generate-resource-map.mjs", + "gen:spec-metadata": "node ./scripts/generate-spec-metadata.mjs", + "gen": "pnpm run gen:types && pnpm run gen:resources && pnpm run gen:spec-metadata", + "check:generated": "node ./scripts/check-generated-coverage.mjs", + "lint": "eslint src tests --ext .ts", + "typecheck": "tsc --noEmit -p tsconfig.typecheck.json", + "test": "vitest run", + "test:integration": "vitest run tests/integration --passWithNoTests", + "test:watch": "vitest", + "coverage": "vitest run --coverage" + }, + "dependencies": { + "openapi-fetch": "^0.10.2" + }, + "devDependencies": { + "@types/node": "^20.11.30", + "@typescript-eslint/eslint-plugin": "^8.25.0", + "@typescript-eslint/parser": "^8.25.0", + "@vitest/coverage-v8": "^2.1.8", + "eslint": "^9.18.0", + "openapi-typescript": "^6.7.6", + "openapi-typescript-helpers": "^0.0.11", + "tsup": "^8.3.5", + "typescript": "^5.7.3", + "vitest": "^2.1.8", + "yaml": "^2.4.5" + } +} diff --git a/packages/ts-sdk/scripts/check-generated-coverage.mjs b/packages/ts-sdk/scripts/check-generated-coverage.mjs new file mode 100644 index 0000000..fcebf5a --- /dev/null +++ b/packages/ts-sdk/scripts/check-generated-coverage.mjs @@ -0,0 +1,185 @@ +import crypto from "node:crypto"; +import fs from "node:fs"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; +import YAML from "yaml"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const packageRoot = path.resolve(__dirname, ".."); +const repoRoot = path.resolve(packageRoot, "..", ".."); + +const specPath = path.join(repoRoot, "plugin-redoc-0.yaml"); +const generatedResourcesDir = path.join(packageRoot, "src", "resources", "generated"); +const specMetadataPath = path.join(packageRoot, "src", "generated", "spec-metadata.ts"); + +const METHODS = ["get", "post", "put", "patch", "delete", "options", "head", "trace"]; + +const endpointKey = (method, apiPath) => `${method.toUpperCase()} ${apiPath}`; +const normalizeTag = (value) => String(value).replace(/\s+/g, " ").trim(); + +const readSpecOperations = () => { + const rawSpec = fs.readFileSync(specPath, "utf8"); + const spec = YAML.parse(rawSpec); + const endpointToOperationId = new Map(); + + for (const [apiPath, pathItem] of Object.entries(spec?.paths ?? {})) { + for (const method of METHODS) { + const operation = pathItem?.[method]; + if (!operation) continue; + + const tags = Array.isArray(operation.tags) ? operation.tags.map(normalizeTag) : []; + if (tags.includes("Webhooks")) continue; + + endpointToOperationId.set(endpointKey(method, apiPath), operation.operationId ?? null); + } + } + + return endpointToOperationId; +}; + +const readGeneratedEndpoints = () => { + const endpoints = new Set(); + const operationIds = new Set(); + const methodNames = new Set(); + const files = fs + .readdirSync(generatedResourcesDir) + .filter((fileName) => fileName.endsWith(".ts") && fileName !== "index.ts" && fileName !== "utils.ts") + .sort(); + + const requestRegex = /\.request\(\s*"([a-z]+)"\s*,\s*"([^"]+)"/g; + const requestOperationRegex = /\.requestOperation\(\s*"([^"]+)"\s*,\s*"([a-z]+)"\s*,\s*"([^"]+)"/g; + const asyncMethodRegex = /async\s+(?:\["([^"]+)"\]|([A-Za-z_$][A-Za-z0-9_$]*))\s*\(/g; + + for (const fileName of files) { + const fullPath = path.join(generatedResourcesDir, fileName); + const contents = fs.readFileSync(fullPath, "utf8"); + + for (const match of contents.matchAll(requestRegex)) { + endpoints.add(endpointKey(match[1], match[2])); + } + + for (const match of contents.matchAll(requestOperationRegex)) { + endpoints.add(endpointKey(match[2], match[3])); + operationIds.add(match[1]); + } + + for (const match of contents.matchAll(asyncMethodRegex)) { + methodNames.add(match[1] || match[2]); + } + } + + return { endpoints, operationIds, methodNames }; +}; + +const parseSpecMetadata = () => { + if (!fs.existsSync(specMetadataPath)) { + throw new Error(`Missing generated file: ${specMetadataPath}`); + } + + const contents = fs.readFileSync(specMetadataPath, "utf8"); + const hashAlgorithmMatch = contents.match(/export const SPEC_HASH_ALGORITHM\s*=\s*"([^"]+)"/); + const hashMatch = contents.match(/export const SPEC_HASH\s*=\s*"([0-9a-fA-F]+)"/); + + if (!hashAlgorithmMatch || !hashMatch) { + throw new Error( + `Unable to parse SPEC_HASH_ALGORITHM and SPEC_HASH from ${specMetadataPath}. Run: pnpm -C packages/ts-sdk gen:spec-metadata` + ); + } + + return { + hashAlgorithm: hashAlgorithmMatch[1], + hash: hashMatch[1].toLowerCase() + }; +}; + +const computeSpecHash = (hashAlgorithm) => + crypto.createHash(hashAlgorithm).update(fs.readFileSync(specPath)).digest("hex"); + +const formatEndpointList = (items, limit = 20) => { + const shown = items.slice(0, limit).map((item) => ` - ${item}`); + if (items.length > limit) { + shown.push(` - ... (${items.length - limit} more)`); + } + return shown.join("\n"); +}; + +const specOperations = readSpecOperations(); +const specEndpoints = new Set(specOperations.keys()); +const { + endpoints: generatedEndpoints, + operationIds: generatedOperationIds, + methodNames: generatedMethodNames +} = readGeneratedEndpoints(); + +const missingEndpoints = [...specEndpoints].filter((key) => !generatedEndpoints.has(key)); +const coveredByOperationId = []; +const hardMissingEndpoints = []; + +for (const missingEndpoint of missingEndpoints) { + const operationId = specOperations.get(missingEndpoint); + if (operationId && (generatedOperationIds.has(operationId) || generatedMethodNames.has(operationId))) { + coveredByOperationId.push(missingEndpoint); + } else { + hardMissingEndpoints.push(missingEndpoint); + } +} + +hardMissingEndpoints.sort(); +coveredByOperationId.sort(); +const extraEndpoints = [...generatedEndpoints].filter((key) => !specEndpoints.has(key)).sort(); + +const errors = []; + +if (hardMissingEndpoints.length > 0) { + errors.push( + [ + `Missing generated endpoint coverage for ${hardMissingEndpoints.length} spec path/method entries (excluding Webhooks):`, + formatEndpointList(hardMissingEndpoints) + ].join("\n") + ); +} + +if (extraEndpoints.length > 0) { + errors.push( + [ + `Generated resource endpoint coverage includes ${extraEndpoints.length} path/method entries not present in spec:`, + formatEndpointList(extraEndpoints) + ].join("\n") + ); +} + +let metadata; +try { + metadata = parseSpecMetadata(); + const computedHash = computeSpecHash(metadata.hashAlgorithm); + if (computedHash !== metadata.hash) { + errors.push( + `Spec hash mismatch: spec-metadata has ${metadata.hashAlgorithm}:${metadata.hash}, expected ${metadata.hashAlgorithm}:${computedHash}. Run: pnpm -C packages/ts-sdk gen:spec-metadata` + ); + } +} catch (error) { + const message = error instanceof Error ? error.message : String(error); + errors.push(message); +} + +if (errors.length > 0) { + console.error("Generated SDK quality gate failed:\n"); + console.error(errors.join("\n\n")); + process.exit(1); +} + +if (coveredByOperationId.length > 0) { + console.warn( + [ + `Coverage note: ${coveredByOperationId.length} spec path/method entries share duplicate operationIds and are treated as covered by generated operation methods:`, + formatEndpointList(coveredByOperationId) + ].join("\n") + ); +} + +console.log( + `Generated endpoint coverage OK: ${generatedEndpoints.size} unique endpoints match ${specEndpoints.size} spec endpoints (excluding Webhooks-tagged operations).` +); +console.log(`Spec metadata hash OK: ${metadata.hashAlgorithm}:${metadata.hash}`); diff --git a/packages/ts-sdk/scripts/generate-parity-matrix.mjs b/packages/ts-sdk/scripts/generate-parity-matrix.mjs new file mode 100644 index 0000000..3ee5f8d --- /dev/null +++ b/packages/ts-sdk/scripts/generate-parity-matrix.mjs @@ -0,0 +1,293 @@ +import fs from "node:fs"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; +import YAML from "yaml"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const repoRoot = path.resolve(__dirname, "..", "..", ".."); +const pythonClientPath = path.join(repoRoot, "nexla_sdk", "client.py"); +const tsClientPath = path.join(repoRoot, "packages", "ts-sdk", "src", "client", "nexla-client.ts"); +const tsGeneratedIndexPath = path.join(repoRoot, "packages", "ts-sdk", "src", "resources", "generated", "index.ts"); +const tsGeneratedResourcesDir = path.join(repoRoot, "packages", "ts-sdk", "src", "resources", "generated"); +const openApiPath = path.join(repoRoot, "plugin-redoc-0.yaml"); +const outputPath = path.join(repoRoot, "docs", "ts-sdk", "parity-matrix.md"); + +const METHODS = ["get", "post", "put", "patch", "delete", "options", "head", "trace"]; + +const read = (filePath) => fs.readFileSync(filePath, "utf8"); + +const toPercent = (numerator, denominator) => { + if (denominator === 0) return "0.0"; + return ((numerator / denominator) * 100).toFixed(1); +}; + +const extractPythonResources = (source) => { + const set = new Set(); + const assignmentPattern = /self\.([a-z_]+)\s*=\s*[A-Za-z0-9_]+Resource\(self\)/g; + + let match = assignmentPattern.exec(source); + while (match) { + set.add(match[1]); + match = assignmentPattern.exec(source); + } + + if (/def\s+create_webhook_client\s*\(/.test(source)) { + set.add("webhooks"); + } + + return [...set].sort(); +}; + +const extractTsResources = (generatedIndexSource, tsClientSource) => { + const set = new Set(); + const assignmentPattern = /^\s*([a-z_]+):\s*new\s+[A-Za-z0-9_]+\(client\),?\s*$/gm; + + let match = assignmentPattern.exec(generatedIndexSource); + while (match) { + set.add(match[1]); + match = assignmentPattern.exec(generatedIndexSource); + } + + if ( + /readonly\s+webhooks\?:\s+WebhooksClient;/.test(tsClientSource) || + /new\s+WebhooksClient\(/.test(tsClientSource) + ) { + set.add("webhooks"); + } + + return [...set].sort(); +}; + +const extractSpecOperationIds = (rawSpec) => { + const spec = YAML.parse(rawSpec); + const paths = spec?.paths ?? {}; + + const allOperationIds = new Set(); + const sessionOperationIds = new Set(); + const webhookOperationIds = new Set(); + + for (const pathItem of Object.values(paths)) { + for (const method of METHODS) { + const operation = pathItem?.[method]; + if (!operation?.operationId) continue; + + const operationId = operation.operationId; + allOperationIds.add(operationId); + + const firstTag = Array.isArray(operation.tags) && typeof operation.tags[0] === "string" + ? operation.tags[0].trim() + : ""; + + if (firstTag === "Webhooks") { + webhookOperationIds.add(operationId); + } else { + sessionOperationIds.add(operationId); + } + } + } + + return { allOperationIds, sessionOperationIds, webhookOperationIds }; +}; + +const extractTsOperationIds = (generatedResourcesDir) => { + const set = new Set(); + const files = fs + .readdirSync(generatedResourcesDir) + .filter((fileName) => fileName.endsWith(".ts") && fileName !== "index.ts" && fileName !== "utils.ts"); + + for (const fileName of files) { + const filePath = path.join(generatedResourcesDir, fileName); + const source = read(filePath); + const operationPattern = /requestOperation\("([^"]+)"/g; + + let match = operationPattern.exec(source); + while (match) { + set.add(match[1]); + match = operationPattern.exec(source); + } + } + + return set; +}; + +const formatCode = (value) => `\`${value}\``; + +const buildResourceRows = (pythonResources, tsResources) => { + const tsSet = new Set(tsResources); + + const aliasMap = { + webhooks: "webhooks" + }; + + const notes = { + webhooks: + "Python uses create_webhook_client(); TS uses WebhooksClient directly or NexlaClient({ webhookApiKey })." + }; + + const rows = []; + let coveredCount = 0; + + for (const pythonResource of pythonResources) { + const alias = aliasMap[pythonResource] ?? pythonResource; + const isCovered = tsSet.has(alias); + + if (isCovered) coveredCount += 1; + + const tsEquivalent = isCovered + ? alias === "webhooks" + ? "`WebhooksClient` / `client.webhooks`" + : formatCode(`client.${alias}`) + : "--"; + + const status = isCovered ? "covered" : "missing"; + const note = notes[pythonResource] + ? notes[pythonResource] + : isCovered + ? "Generated TS resource client available." + : "Use `client.raw` for typed path-level access until this resource is generated."; + + rows.push(`| ${formatCode(pythonResource)} | ${tsEquivalent} | ${status} | ${note} |`); + } + + return { rows, coveredCount }; +}; + +const buildTsOnlyRows = (pythonResources, tsResources) => { + const pythonSet = new Set(pythonResources); + + return tsResources + .filter((resource) => !pythonSet.has(resource)) + .map((resource) => { + const note = resource === "webhooks" + ? "Webhook support exists in both SDKs but with different entry points." + : "OpenAPI-generated TS resource. Python client does not expose this as a first-class resource property."; + return `| ${formatCode(resource)} | ${note} |`; + }); +}; + +const renderMarkdown = ({ + pythonResources, + tsResources, + coveredCount, + specOperationIds, + specSessionOperationIds, + specWebhookOperationIds, + tsOperationIds, + missingOperationIds, + extraOperationIds, + pythonRows, + tsOnlyRows +}) => { + const lines = []; + const generatedAt = new Date().toISOString(); + + lines.push("# TypeScript SDK Parity Matrix (Generated)"); + lines.push(""); + lines.push("> Auto-generated by `node packages/ts-sdk/scripts/generate-parity-matrix.mjs`. Do not edit this file manually."); + lines.push(""); + lines.push(`Generated at: ${formatCode(generatedAt)}`); + lines.push(""); + lines.push("## Summary"); + lines.push(""); + lines.push(`- Python resources discovered: **${pythonResources.length}**`); + lines.push(`- TS resources discovered: **${tsResources.length}**`); + lines.push( + `- Python resource parity: **${coveredCount}/${pythonResources.length} (${toPercent(coveredCount, pythonResources.length)}%)**` + ); + lines.push(`- OpenAPI operations in spec: **${specOperationIds.size}**`); + lines.push(`- OpenAPI session operations in spec (excluding webhook-tagged operations): **${specSessionOperationIds.size}**`); + lines.push(`- OpenAPI webhook-tagged operations in spec: **${specWebhookOperationIds.size}**`); + lines.push(`- OperationIds implemented in generated TS resources: **${tsOperationIds.size}**`); + lines.push( + `- Session operationId coverage: **${specSessionOperationIds.size - missingOperationIds.length}/${specSessionOperationIds.size} (${toPercent(specSessionOperationIds.size - missingOperationIds.length, specSessionOperationIds.size)}%)**` + ); + lines.push(""); + lines.push("## Python To TS Resource Parity"); + lines.push(""); + lines.push("| Python resource | TS equivalent | Status | Notes |"); + lines.push("| --- | --- | --- | --- |"); + lines.push(...pythonRows); + + if (tsOnlyRows.length > 0) { + lines.push(""); + lines.push("## TS-Only Resource Surfaces"); + lines.push(""); + lines.push("| TS resource | Notes |"); + lines.push("| --- | --- |"); + lines.push(...tsOnlyRows); + } + + lines.push(""); + lines.push("## Operation Coverage Details"); + lines.push(""); + lines.push(`- Missing session operationIds in generated TS resources: **${missingOperationIds.length}**`); + lines.push(`- Extra operationIds in generated TS resources (not found in current spec session set): **${extraOperationIds.length}**`); + + if (missingOperationIds.length > 0) { + lines.push(""); + lines.push("### Missing operationIds"); + lines.push(""); + for (const operationId of missingOperationIds) { + lines.push(`- ${formatCode(operationId)}`); + } + } + + if (extraOperationIds.length > 0) { + lines.push(""); + lines.push("### Extra operationIds"); + lines.push(""); + for (const operationId of extraOperationIds) { + lines.push(`- ${formatCode(operationId)}`); + } + } + + lines.push(""); + return `${lines.join("\n")}`; +}; + +const main = () => { + const pythonClientSource = read(pythonClientPath); + const tsClientSource = read(tsClientPath); + const tsGeneratedIndexSource = read(tsGeneratedIndexPath); + const rawSpec = read(openApiPath); + + const pythonResources = extractPythonResources(pythonClientSource); + const tsResources = extractTsResources(tsGeneratedIndexSource, tsClientSource); + + const { allOperationIds, sessionOperationIds, webhookOperationIds } = extractSpecOperationIds(rawSpec); + const tsOperationIds = extractTsOperationIds(tsGeneratedResourcesDir); + + const missingOperationIds = [...sessionOperationIds] + .filter((operationId) => !tsOperationIds.has(operationId)) + .sort(); + + const extraOperationIds = [...tsOperationIds] + .filter((operationId) => !sessionOperationIds.has(operationId)) + .sort(); + + const { rows: pythonRows, coveredCount } = buildResourceRows(pythonResources, tsResources); + const tsOnlyRows = buildTsOnlyRows(pythonResources, tsResources); + + const markdown = renderMarkdown({ + pythonResources, + tsResources, + coveredCount, + specOperationIds: allOperationIds, + specSessionOperationIds: sessionOperationIds, + specWebhookOperationIds: webhookOperationIds, + tsOperationIds, + missingOperationIds, + extraOperationIds, + pythonRows, + tsOnlyRows + }); + + fs.mkdirSync(path.dirname(outputPath), { recursive: true }); + fs.writeFileSync(outputPath, markdown, "utf8"); + + process.stdout.write(`Wrote ${path.relative(repoRoot, outputPath)}\n`); +}; + +main(); diff --git a/packages/ts-sdk/scripts/generate-resource-map.mjs b/packages/ts-sdk/scripts/generate-resource-map.mjs new file mode 100644 index 0000000..055ab40 --- /dev/null +++ b/packages/ts-sdk/scripts/generate-resource-map.mjs @@ -0,0 +1,361 @@ +import fs from "node:fs"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; +import YAML from "yaml"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const repoRoot = path.resolve(__dirname, "..", "..", ".."); +const specPath = path.join(repoRoot, "plugin-redoc-0.yaml"); +const generatedDir = path.join(__dirname, "..", "src", "generated"); +const resourcesDir = path.join(__dirname, "..", "src", "resources", "generated"); + +const basePaths = { + api_keys: "/api_keys", + approval_requests: "/approval_requests", + async_tasks: "/async_tasks", + attribute_transforms: "/attribute_transforms", + auth_parameters: "/auth_parameters", + auth_templates: "/auth_templates", + catalog_configs: "/catalog_configs", + cluster_endpoints: "/cluster_endpoints", + clusters: "/clusters", + code_containers: "/code_containers", + connectors: "/connectors", + credentials: "/data_credentials", + cubejs: "/cubejs", + custom_data_flows: "/custom_data_flows", + dashboard_transforms: "/dashboard_transforms", + data_credentials_groups: "/data_credentials_groups", + data_flows: "/data_flows", + data_schemas: "/data_schemas", + destinations: "/data_sinks", + doc_containers: "/doc_containers", + flow_nodes: "/flow_nodes", + flow_triggers: "/flow_triggers", + flows: "/flows", + lookups: "/data_maps", + marketplace: "/marketplace", + mcp_sessions: "/mcp_sessions", + nexsets: "/data_sets", + notification_channel_settings: "/notification_channel_settings", + notification_settings: "/notification_settings", + notification_types: "/notification_types", + notifications: "/notifications", + org_auth_configs: "/api_auth_configs", + org_tiers: "/org_tiers", + organizations: "/orgs", + projects: "/projects", + quarantine_settings: "/quarantine_settings", + resource_parameters: "/resource_parameters", + runtimes: "/runtimes", + search_health: "/search_health", + self_signup_blocked_domains: "/self_signup_blocked_domains", + service_keys: "/service_keys", + sources: "/data_sources", + teams: "/teams", + tool_sets: "/tool_sets", + tools: "/tools", + transforms: "/transforms", + user_settings: "/user_settings", + user_tiers: "/user_tiers", + users: "/users", + validators: "/validators", + vendor_endpoints: "/vendor_endpoints", + vendors: "/vendors" +}; + +const tagToResource = { + "Session Management": "tokens", + "Flows": "flows", + "Sources": "sources", + "Destinations (Data Sinks)": "destinations", + "Nexsets (Data Sets)": "nexsets", + "Custom Data Flows": "custom_data_flows", + "Credentials": "credentials", + "Lookups (Data Maps)": "lookups", + "Data Maps": "lookups", + "Organizations": "organizations", + "Users": "users", + "Teams": "teams", + "Projects": "projects", + "Transforms": "transforms", + "Attribute Transforms": "attribute_transforms", + "Code Containers": "code_containers", + "Doc Containers": "doc_containers", + "Notifications": "notifications", + "Notification Settings": "notification_settings", + "Notification Types": "notification_types", + "Notification Channel Settings": "notification_channel_settings", + "Metrics": "metrics", + "Audit Logs": "audit_logs", + "Access Control": "access_control", + "Quarantine Settings": "quarantine_settings", + "Gen AI Recommendations": "genai", + "Approval Requests": "approval_requests", + "Limits": "limits", + "Env: Org Management": "env_org_management", + "Env: Vendor Management": "env_vendor_management", + "Env: General Settings": "env_general_settings", + "Org authentication configs": "org_auth_configs", + "Marketplace": "marketplace", + "Self Sign-Up": "self_signup", + "Self Sign-Up Admin": "self_signup_admin", + "Async Tasks": "async_tasks", + "Custom Runtimes": "runtimes", + "GenAI Configurations": "genai", + "GenAI Configs": "genai", + "GenAI Configuration": "genai", + "API Keys": "api_keys", + "Tool Sets": "tool_sets", + "Tools": "tools", + "Service Keys": "service_keys", + "User Settings": "user_settings", + "User Tiers": "user_tiers", + "Org Tiers": "org_tiers", + "Catalog Configs": "catalog_configs", + "Clusters": "clusters", + "Cluster Endpoints": "cluster_endpoints", + "Connectors": "connectors", + "Data Schemas": "data_schemas", + "Data Flows": "data_flows", + "Data Credentials Groups": "data_credentials_groups", + "Auth Templates": "auth_templates", + "Auth Parameters": "auth_parameters", + "Resource Parameters": "resource_parameters", + "Search Health": "search_health", + "MCP Sessions": "mcp_sessions", + "Flow Nodes": "flow_nodes", + "Flow Triggers": "flow_triggers", + "Vendors": "vendors", + "Vendor Endpoints": "vendor_endpoints", + "Dashboard Transforms": "dashboard_transforms", + "CubeJS": "cubejs" +}; + +const rawSpec = fs.readFileSync(specPath, "utf8"); +const spec = YAML.parse(rawSpec); +const paths = spec?.paths ?? {}; + +const METHODS = ["get", "post", "put", "patch", "delete", "options", "head", "trace"]; + +const normalizeTag = (value) => value.replace(/\s+/g, " ").trim(); + +const slugify = (value) => + value + .toLowerCase() + .replace(/[^a-z0-9]+/g, "_") + .replace(/^_+|_+$/g, "") + .replace(/_+/g, "_"); + +const toPascalCase = (value) => + value + .split("_") + .filter(Boolean) + .map((segment) => segment.charAt(0).toUpperCase() + segment.slice(1)) + .join(""); + +const isValidIdentifier = (value) => /^[A-Za-z_$][A-Za-z0-9_$]*$/.test(value); + +const resolveResourceKey = (tags, apiPath) => { + const tag = typeof tags?.[0] === "string" ? normalizeTag(tags[0]) : undefined; + if (tag) { + if (tag === "Webhooks") return null; + return tagToResource[tag] ?? slugify(tag); + } + + for (const [resourceKey, basePath] of Object.entries(basePaths)) { + if (apiPath === basePath || apiPath.startsWith(`${basePath}/`)) { + return resourceKey; + } + } + + return "misc"; +}; + +const requiresSessionToken = (security) => { + if (!security) return true; + if (!Array.isArray(security) || security.length === 0) return false; + return security.some((entry) => Object.prototype.hasOwnProperty.call(entry, "NexlaSessionToken")); +}; + +const operationsByResource = new Map(); +const operationIndex = new Map(); + +for (const [apiPath, pathItem] of Object.entries(paths)) { + for (const method of METHODS) { + const operation = pathItem?.[method]; + if (!operation || !operation.operationId) continue; + + const operationId = operation.operationId; + const resourceKey = resolveResourceKey(operation.tags, apiPath); + if (!resourceKey) continue; + + const entry = { + operationId, + method, + path: apiPath, + summary: operation.summary ?? "", + skipAuth: !requiresSessionToken(operation.security) + }; + + if (!operationsByResource.has(resourceKey)) { + operationsByResource.set(resourceKey, []); + } + operationsByResource.get(resourceKey).push(entry); + operationIndex.set(operationId, entry); + } +} + +const resourceMap = {}; + +for (const [resourceKey, basePath] of Object.entries(basePaths)) { + const entry = {}; + const baseOps = paths[basePath] ?? {}; + + if (baseOps.get) entry.list = { path: basePath, method: "get" }; + if (baseOps.post) entry.create = { path: basePath, method: "post" }; + + const itemPath = Object.keys(paths).find((p) => { + if (!p.startsWith(`${basePath}/`)) return false; + const tail = p.slice(basePath.length + 1); + return /^\{[^/]+\}$/.test(tail); + }); + + if (itemPath) { + const itemOps = paths[itemPath] ?? {}; + if (itemOps.get) entry.get = { path: itemPath, method: "get" }; + if (itemOps.put) entry.update = { path: itemPath, method: "put" }; + else if (itemOps.patch) entry.update = { path: itemPath, method: "patch" }; + if (itemOps.delete) entry.delete = { path: itemPath, method: "delete" }; + } + + resourceMap[resourceKey] = entry; +} + +const resourceMapContents = `/**\n * Auto-generated resource map from OpenAPI.\n * Do not edit manually.\n */\n\nexport const resourceMap = ${JSON.stringify(resourceMap, null, 2)} as const;\n\nexport type ResourceMap = typeof resourceMap;\n`; + +fs.mkdirSync(generatedDir, { recursive: true }); +fs.writeFileSync(path.join(generatedDir, "resource-map.ts"), resourceMapContents, "utf8"); + +const withSkipAuthContents = `/**\n * Auto-generated helpers for resource clients.\n * Do not edit manually.\n */\n\nimport type { HeadersOptions } from \"openapi-fetch\";\n\ntype HeaderCarrier = { headers?: HeadersOptions };\n\nconst normalizeHeaders = (headers?: HeadersOptions): Record => {\n if (!headers) return {};\n if (headers instanceof Headers) {\n const record: Record = {};\n headers.forEach((value, key) => {\n record[key] = value;\n });\n return record;\n }\n if (Array.isArray(headers)) {\n return Object.fromEntries(headers);\n }\n const record: Record = {};\n for (const [key, value] of Object.entries(headers)) {\n if (value === null || value === undefined) continue;\n if (Array.isArray(value)) {\n record[key] = value.map((item) => String(item)).join(\", \");\n } else {\n record[key] = String(value);\n }\n }\n return record;\n};\n\nexport const withSkipAuth = (init?: T): T => {\n if (!init) {\n return { headers: { \"x-nexla-skip-auth\": \"true\" } } as unknown as T;\n }\n const headers = { ...normalizeHeaders(init.headers), \"x-nexla-skip-auth\": \"true\" };\n return { ...(init as HeaderCarrier), headers } as unknown as T;\n};\n`; + +if (fs.existsSync(resourcesDir)) { + fs.rmSync(resourcesDir, { recursive: true, force: true }); +} +fs.mkdirSync(resourcesDir, { recursive: true }); +fs.writeFileSync(path.join(resourcesDir, "utils.ts"), withSkipAuthContents, "utf8"); + +const resourceKeys = Array.from(operationsByResource.keys()).sort(); + +const exportLines = []; +const interfaceLines = []; +const factoryLines = []; + +for (const resourceKey of resourceKeys) { + const operations = operationsByResource.get(resourceKey) ?? []; + operations.sort((a, b) => a.operationId.localeCompare(b.operationId)); + + const className = `${toPascalCase(resourceKey)}Resource`; + exportLines.push(`export { ${className} } from \"./${resourceKey}.js\";`); + interfaceLines.push(` ${resourceKey}: ${className};`); + factoryLines.push(` ${resourceKey}: new ${className}(client)`); + + const methodNames = new Set(); + + const aliasEntries = []; + const resourceMapEntry = resourceMap[resourceKey]; + if (resourceMapEntry?.list) { + const listOp = operations.find((op) => op.path === resourceMapEntry.list.path && op.method === resourceMapEntry.list.method); + if (listOp) aliasEntries.push({ name: "list", op: listOp }); + } + if (resourceMapEntry?.create) { + const createOp = operations.find((op) => op.path === resourceMapEntry.create.path && op.method === resourceMapEntry.create.method); + if (createOp) aliasEntries.push({ name: "create", op: createOp }); + } + if (resourceMapEntry?.get) { + const getOp = operations.find((op) => op.path === resourceMapEntry.get.path && op.method === resourceMapEntry.get.method); + if (getOp) aliasEntries.push({ name: "get", op: getOp }); + } + if (resourceMapEntry?.update) { + const updateOp = operations.find((op) => op.path === resourceMapEntry.update.path && op.method === resourceMapEntry.update.method); + if (updateOp) aliasEntries.push({ name: "update", op: updateOp }); + } + if (resourceMapEntry?.delete) { + const deleteOp = operations.find((op) => op.path === resourceMapEntry.delete.path && op.method === resourceMapEntry.delete.method); + if (deleteOp) aliasEntries.push({ name: "delete", op: deleteOp }); + } + + const lines = []; + lines.push("import type { NexlaClient } from \"../../client/nexla-client.js\";"); + lines.push("import type { OperationData, OperationInit } from \"../../client/operation-types.js\";"); + lines.push("import { withSkipAuth } from \"./utils.js\";"); + lines.push(""); + lines.push(`export class ${className} {`); + lines.push(" private readonly client: NexlaClient;"); + lines.push(""); + lines.push(" constructor(client: NexlaClient) {"); + lines.push(" this.client = client;"); + lines.push(" }"); + + for (const alias of aliasEntries) { + if (methodNames.has(alias.name)) continue; + methodNames.add(alias.name); + lines.push(""); + const methodName = isValidIdentifier(alias.name) ? alias.name : `[\"${alias.name}\"]`; + const maybeSkip = alias.op.skipAuth; + const initVar = maybeSkip ? "withSkipAuth(init)" : "init"; + lines.push(` async ${methodName}(init?: OperationInit<\"${alias.op.operationId}\">): Promise> {`); + lines.push(` return this.client.requestOperation(\"${alias.op.operationId}\", \"${alias.op.method}\", \"${alias.op.path}\", ${initVar});`); + lines.push(" }"); + } + + for (const op of operations) { + const opId = op.operationId; + if (methodNames.has(opId)) continue; + methodNames.add(opId); + lines.push(""); + if (op.summary) { + lines.push(` /** ${op.summary.replace(/\*/g, "").trim()} */`); + } + const methodName = isValidIdentifier(opId) ? opId : `[\"${opId.replace(/\\/g, "\\\\").replace(/\"/g, "\\\"")}\"]`; + const initVar = op.skipAuth ? "withSkipAuth(init)" : "init"; + lines.push(` async ${methodName}(init?: OperationInit<\"${opId}\">): Promise> {`); + lines.push(` return this.client.requestOperation(\"${opId}\", \"${op.method}\", \"${op.path}\", ${initVar});`); + lines.push(" }"); + } + + lines.push("}"); + + fs.writeFileSync(path.join(resourcesDir, `${resourceKey}.ts`), lines.join("\n"), "utf8"); +} + +const indexLines = []; +indexLines.push("import type { NexlaClient } from \"../../client/nexla-client.js\";"); +for (const resourceKey of resourceKeys) { + const className = `${toPascalCase(resourceKey)}Resource`; + indexLines.push(`import { ${className} } from \"./${resourceKey}.js\";`); +} +indexLines.push(""); +for (const resourceKey of resourceKeys) { + const className = `${toPascalCase(resourceKey)}Resource`; + indexLines.push(`export { ${className} };`); +} +indexLines.push(""); +indexLines.push("export interface GeneratedResourceClients {"); +for (const line of interfaceLines) { + indexLines.push(line); +} +indexLines.push("}"); +indexLines.push(""); +indexLines.push("export const createGeneratedResources = (client: NexlaClient): GeneratedResourceClients => ({"); +for (const line of factoryLines) { + indexLines.push(line + ","); +} +indexLines.push("});"); + +fs.writeFileSync(path.join(resourcesDir, "index.ts"), indexLines.join("\n"), "utf8"); + +console.log(`Generated resource map at ${path.join(generatedDir, "resource-map.ts")}`); +console.log(`Generated resources at ${resourcesDir}`); diff --git a/packages/ts-sdk/scripts/generate-spec-metadata.mjs b/packages/ts-sdk/scripts/generate-spec-metadata.mjs new file mode 100644 index 0000000..0b2f079 --- /dev/null +++ b/packages/ts-sdk/scripts/generate-spec-metadata.mjs @@ -0,0 +1,34 @@ +import crypto from "node:crypto"; +import fs from "node:fs"; +import path from "node:path"; +import { fileURLToPath } from "node:url"; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const packageRoot = path.resolve(__dirname, ".."); +const repoRoot = path.resolve(packageRoot, "..", ".."); + +const specPath = path.join(repoRoot, "plugin-redoc-0.yaml"); +const outputPath = path.join(packageRoot, "src", "generated", "spec-metadata.ts"); + +const SPEC_SOURCE = "../../plugin-redoc-0.yaml"; +const SPEC_HASH_ALGORITHM = "sha256"; + +const specBytes = fs.readFileSync(specPath); +const specHash = crypto.createHash(SPEC_HASH_ALGORITHM).update(specBytes).digest("hex"); + +const output = `/** + * Auto-generated spec metadata from ${SPEC_SOURCE}. + * Do not edit manually. + */ + +export const SPEC_SOURCE = "${SPEC_SOURCE}"; +export const SPEC_HASH_ALGORITHM = "${SPEC_HASH_ALGORITHM}"; +export const SPEC_HASH = "${specHash}"; +`; + +fs.mkdirSync(path.dirname(outputPath), { recursive: true }); +fs.writeFileSync(outputPath, output, "utf8"); + +console.log(`Generated spec metadata at ${outputPath}`); diff --git a/packages/ts-sdk/src/auth/access-token.ts b/packages/ts-sdk/src/auth/access-token.ts new file mode 100644 index 0000000..8ce23f9 --- /dev/null +++ b/packages/ts-sdk/src/auth/access-token.ts @@ -0,0 +1,26 @@ +import { AuthenticationError } from "../errors.js"; +import type { AuthProvider } from "./types.js"; + +export class AccessTokenAuthProvider implements AuthProvider { + readonly isRefreshable = false; + private readonly accessToken: string; + + constructor(accessToken: string) { + if (!accessToken) { + throw new AuthenticationError("Access token must be provided"); + } + this.accessToken = accessToken; + } + + async getAccessToken(): Promise { + return this.accessToken; + } + + async refreshAccessToken(): Promise { + throw new AuthenticationError("Direct access tokens cannot be refreshed"); + } + + async logout(): Promise { + // No-op for direct tokens. + } +} diff --git a/packages/ts-sdk/src/auth/service-key.ts b/packages/ts-sdk/src/auth/service-key.ts new file mode 100644 index 0000000..38d3998 --- /dev/null +++ b/packages/ts-sdk/src/auth/service-key.ts @@ -0,0 +1,144 @@ +import { AuthenticationError, NexlaError, isRecord } from "../errors.js"; +import type { AuthProvider } from "./types.js"; + +export interface ServiceKeyAuthOptions { + serviceKey: string; + baseUrl: string; + apiVersion: string; + tokenRefreshMargin: number; + fetchFn: typeof fetch; + userAgent?: string; +} + +export class ServiceKeyAuthProvider implements AuthProvider { + readonly isRefreshable = true; + + private readonly serviceKey: string; + private readonly baseUrl: string; + private readonly apiVersion: string; + private readonly tokenRefreshMargin: number; + private readonly fetchFn: typeof fetch; + private readonly userAgent: string | undefined; + + private accessToken: string | undefined; + private tokenExpiry = 0; + + constructor(options: ServiceKeyAuthOptions) { + this.serviceKey = options.serviceKey; + this.baseUrl = options.baseUrl.replace(/\/$/, ""); + this.apiVersion = options.apiVersion; + this.tokenRefreshMargin = options.tokenRefreshMargin; + this.fetchFn = options.fetchFn; + this.userAgent = options.userAgent; + } + + async getAccessToken(): Promise { + if (!this.accessToken) { + await this.obtainSessionToken(); + } else if (this.isTokenNearExpiry()) { + await this.obtainSessionToken(); + } + + if (!this.accessToken) { + throw new AuthenticationError("No access token available after authentication"); + } + + return this.accessToken; + } + + async refreshAccessToken(): Promise { + await this.obtainSessionToken(); + } + + async logout(): Promise { + if (!this.accessToken) return; + + const headers: Record = { + Accept: `application/vnd.nexla.api.${this.apiVersion}+json`, + Authorization: `Bearer ${this.accessToken}` + }; + if (this.userAgent) headers["User-Agent"] = this.userAgent; + + try { + await this.fetchFn(`${this.baseUrl}/token/logout`, { + method: "POST", + headers + }); + } catch { + // Best-effort logout; ignore errors. + } finally { + this.accessToken = undefined; + this.tokenExpiry = 0; + } + } + + private isTokenNearExpiry(): boolean { + if (!this.tokenExpiry) return true; + const now = Date.now() / 1000; + return this.tokenExpiry - now < this.tokenRefreshMargin; + } + + private async obtainSessionToken(): Promise { + const headers: Record = { + Authorization: `Basic ${this.serviceKey}`, + Accept: `application/vnd.nexla.api.${this.apiVersion}+json`, + "Content-Length": "0" + }; + if (this.userAgent) headers["User-Agent"] = this.userAgent; + + const response = await this.fetchFn(`${this.baseUrl}/token`, { + method: "POST", + headers + }); + + if (!response.ok) { + const errorBody = await safeParseJson(response); + const message = extractErrorMessage(errorBody, response) ?? "Authentication failed"; + + if (response.status === 401) { + throw new AuthenticationError(message, { statusCode: response.status, response: errorBody }); + } + + throw new NexlaError(message, { statusCode: response.status, response: errorBody }); + } + + const data = await safeParseJson(response); + if (!isRecord(data)) { + throw new NexlaError("Invalid token response format", { response: data }); + } + + const accessToken = data["access_token"]; + const expiresIn = data["expires_in"]; + + if (typeof accessToken !== "string") { + throw new NexlaError("Missing access token in response", { response: data }); + } + + const expiresInSeconds = typeof expiresIn === "number" ? expiresIn : 86400; + this.accessToken = accessToken; + this.tokenExpiry = Date.now() / 1000 + expiresInSeconds; + } +} + +const safeParseJson = async (response: Response): Promise => { + const contentType = response.headers.get("content-type")?.toLowerCase() ?? ""; + if (!contentType.includes("application/json")) { + return undefined; + } + try { + return await response.json(); + } catch { + return undefined; + } +}; + +const extractErrorMessage = (body: unknown, response: Response): string | undefined => { + if (isRecord(body)) { + const message = body["message"]; + if (typeof message === "string") return message; + const error = body["error"]; + if (typeof error === "string") return error; + } + if (response.statusText) return response.statusText; + return undefined; +}; diff --git a/packages/ts-sdk/src/auth/types.ts b/packages/ts-sdk/src/auth/types.ts new file mode 100644 index 0000000..a7cc296 --- /dev/null +++ b/packages/ts-sdk/src/auth/types.ts @@ -0,0 +1,6 @@ +export interface AuthProvider { + getAccessToken(): Promise; + refreshAccessToken(): Promise; + logout(): Promise; + readonly isRefreshable: boolean; +} diff --git a/packages/ts-sdk/src/client/http.ts b/packages/ts-sdk/src/client/http.ts new file mode 100644 index 0000000..c92330f --- /dev/null +++ b/packages/ts-sdk/src/client/http.ts @@ -0,0 +1,59 @@ +export interface RetryOptions { + maxRetries?: number; + backoffMs?: number; + maxBackoffMs?: number; + retryOn?: number[]; +} + +const DEFAULT_RETRY_ON = [429, 502, 503, 504]; + +export const createFetchWithRetry = (baseFetch: typeof fetch, options: RetryOptions = {}): ((input: Request) => Promise) => { + const maxRetries = options.maxRetries ?? 3; + const backoffMs = options.backoffMs ?? 300; + const maxBackoffMs = options.maxBackoffMs ?? 3000; + const retryOn = options.retryOn ?? DEFAULT_RETRY_ON; + + return async (input: Request): Promise => { + const original = input; + + let attempt = 0; + while (true) { + const request = attempt === 0 ? original : original.clone(); + try { + const response = await baseFetch(request); + if (shouldRetryResponse(response.status, retryOn) && attempt < maxRetries) { + const delay = computeRetryDelay(backoffMs, maxBackoffMs, attempt, response); + await sleep(delay); + attempt += 1; + continue; + } + return response; + } catch (error) { + if (attempt >= maxRetries) throw error; + const delay = computeRetryDelay(backoffMs, maxBackoffMs, attempt); + await sleep(delay); + attempt += 1; + } + } + }; +}; + +const shouldRetryResponse = (status: number, retryOn: number[]): boolean => { + return retryOn.includes(status); +}; + +const computeRetryDelay = (base: number, max: number, attempt: number, response?: Response): number => { + const retryAfter = response?.headers.get("retry-after"); + if (retryAfter) { + const parsed = Number(retryAfter); + if (!Number.isNaN(parsed)) { + return Math.min(parsed * 1000, max); + } + } + + const jitter = Math.random() * 100; + const delay = Math.min(base * 2 ** attempt + jitter, max); + return delay; +}; + +const sleep = (ms: number): Promise => new Promise((resolve) => setTimeout(resolve, ms)); diff --git a/packages/ts-sdk/src/client/nexla-client.ts b/packages/ts-sdk/src/client/nexla-client.ts new file mode 100644 index 0000000..fd1160c --- /dev/null +++ b/packages/ts-sdk/src/client/nexla-client.ts @@ -0,0 +1,222 @@ +import createClient, { type Client } from "openapi-fetch"; +import type { HttpMethod, PathsWithMethod } from "openapi-typescript-helpers"; +import type { paths } from "../generated/schema.js"; +import { AuthenticationError, AuthorizationError, NexlaError, NotFoundError, RateLimitError, ResourceConflictError, ServerError, ValidationError, isRecord } from "../errors.js"; +import { AccessTokenAuthProvider } from "../auth/access-token.js"; +import { ServiceKeyAuthProvider } from "../auth/service-key.js"; +import type { AuthProvider } from "../auth/types.js"; +import { createFetchWithRetry, type RetryOptions } from "./http.js"; +import type { RequestOptions } from "./types.js"; +import type { OperationData, OperationId, OperationInit } from "./operation-types.js"; +import { createGeneratedResources, type GeneratedResourceClients } from "../resources/generated/index.js"; +import { WebhooksClient } from "../webhooks/index.js"; + +export interface NexlaClientOptions { + serviceKey?: string; + accessToken?: string; + baseUrl?: string; + apiVersion?: string; + tokenRefreshMargin?: number; + retry?: RetryOptions; + fetch?: typeof fetch; + userAgent?: string; + webhookApiKey?: string; +} + +export class NexlaClient { + readonly raw: Client; + readonly baseUrl: string; + readonly apiVersion: string; + readonly acceptHeader: string; + readonly authProvider: AuthProvider; + readonly webhooks?: WebhooksClient; + + + constructor(options: NexlaClientOptions = {}) { + const serviceKey = options.serviceKey ?? process.env.NEXLA_SERVICE_KEY; + const accessToken = options.accessToken ?? process.env.NEXLA_ACCESS_TOKEN; + + if (!serviceKey && !accessToken) { + throw new NexlaError( + "Either serviceKey or accessToken must be provided (or set NEXLA_SERVICE_KEY/NEXLA_ACCESS_TOKEN)." + ); + } + if (serviceKey && accessToken) { + throw new NexlaError("Cannot provide both serviceKey and accessToken."); + } + + this.baseUrl = (options.baseUrl ?? process.env.NEXLA_API_URL ?? "https://dataops.nexla.io/nexla-api").replace(/\/$/, ""); + this.apiVersion = options.apiVersion ?? "v1"; + this.acceptHeader = `application/vnd.nexla.api.${this.apiVersion}+json`; + + const fetchImpl = options.fetch ?? globalThis.fetch; + const fetchWithRetry = createFetchWithRetry(fetchImpl, options.retry); + + if (serviceKey) { + const authOptions = { + serviceKey, + baseUrl: this.baseUrl, + apiVersion: this.apiVersion, + tokenRefreshMargin: options.tokenRefreshMargin ?? 3600, + fetchFn: fetchImpl + } as const; + const provider = new ServiceKeyAuthProvider( + options.userAgent ? { ...authOptions, userAgent: options.userAgent } : authOptions + ); + this.authProvider = provider; + } else { + this.authProvider = new AccessTokenAuthProvider(accessToken ?? ""); + } + + this.raw = createClient({ baseUrl: this.baseUrl, fetch: fetchWithRetry }); + + this.raw.use({ + onRequest: async ({ request }) => { + const headers = new Headers(request.headers); + const skipAuth = headers.get("x-nexla-skip-auth") === "true"; + headers.delete("x-nexla-skip-auth"); + + if (!headers.has("Accept")) headers.set("Accept", this.acceptHeader); + if (!headers.has("Content-Type") && request.method !== "GET" && request.method !== "HEAD") { + headers.set("Content-Type", "application/json"); + } + if (options.userAgent) { + headers.set("User-Agent", options.userAgent); + } + if (!skipAuth) { + const token = await this.authProvider.getAccessToken(); + headers.set("Authorization", `Bearer ${token}`); + } + + return new Request(request, { headers }); + } + }); + + if (options.webhookApiKey) { + this.webhooks = new WebhooksClient({ apiKey: options.webhookApiKey, fetch: fetchImpl }); + } + + Object.assign(this, createGeneratedResources(this)); + } + + async request>( + method: Method, + path: Path, + init?: RequestOptions + ): Promise { + const execute = async (): Promise => { + const result = await this.invoke(method, path, init); + const response = result as { data?: unknown; error?: unknown; response?: Response }; + if (response.error) { + throw this.mapError(response.error, response.response ?? new Response(null, { status: 500 }), method, path as string); + } + return response.data as unknown; + }; + + try { + return await execute(); + } catch (error) { + if (error instanceof AuthenticationError && this.authProvider.isRefreshable) { + await this.authProvider.refreshAccessToken(); + return await execute(); + } + throw error; + } + } + + async requestOperation( + _operationId: OpId, + method: HttpMethod, + path: string, + init?: OperationInit + ): Promise> { + const data = await this.request(method, path as PathsWithMethod, init as RequestOptions); + return data as OperationData; + } + + async logout(): Promise { + await this.authProvider.logout(); + } + + private invoke>( + method: Method, + path: Path, + init?: RequestOptions + ) { + switch (method) { + case "get": + return this.raw.GET(path as never, init as never); + case "post": + return this.raw.POST(path as never, init as never); + case "put": + return this.raw.PUT(path as never, init as never); + case "patch": + return this.raw.PATCH(path as never, init as never); + case "delete": + return this.raw.DELETE(path as never, init as never); + case "options": + return this.raw.OPTIONS(path as never, init as never); + case "head": + return this.raw.HEAD(path as never, init as never); + case "trace": + return this.raw.TRACE(path as never, init as never); + default: + throw new NexlaError(`Unsupported HTTP method: ${method}`); + } + } + + private mapError(errorBody: unknown, response: Response, method?: string, path?: string): NexlaError { + const message = extractErrorMessage(errorBody, response) ?? `Request failed with status ${response.status}`; + const context: Record = { + method: method?.toUpperCase(), + path, + url: response.url, + status_code: response.status, + }; + const options = { statusCode: response.status, response: errorBody, context }; + + if (response.status === 401) { + return new AuthenticationError(message, options); + } + if (response.status === 403) { + return new AuthorizationError(message, options); + } + if (response.status === 404) { + return new NotFoundError(message, options); + } + if (response.status === 409) { + return new ResourceConflictError(message, options); + } + if (response.status === 422 || response.status === 400) { + return new ValidationError(message, options); + } + if (response.status === 429) { + const retryAfter = parseRetryAfter(response.headers.get("retry-after")); + return new RateLimitError(message, retryAfter === undefined ? options : { ...options, retryAfter }); + } + if (response.status >= 500) { + return new ServerError(message, options); + } + + return new NexlaError(message, options); + } +} + +const extractErrorMessage = (body: unknown, response: Response): string | undefined => { + if (isRecord(body)) { + const message = body["message"]; + if (typeof message === "string") return message; + const error = body["error"]; + if (typeof error === "string") return error; + } + if (response.statusText) return response.statusText; + return undefined; +}; + +const parseRetryAfter = (value: string | null): number | undefined => { + if (!value) return undefined; + const parsed = Number(value); + return Number.isNaN(parsed) ? undefined : parsed; +}; + +export interface NexlaClient extends GeneratedResourceClients {} diff --git a/packages/ts-sdk/src/client/operation-types.ts b/packages/ts-sdk/src/client/operation-types.ts new file mode 100644 index 0000000..145c909 --- /dev/null +++ b/packages/ts-sdk/src/client/operation-types.ts @@ -0,0 +1,19 @@ +import type { FetchResponse, RequestOptions } from "openapi-fetch"; +import type { MediaType } from "openapi-typescript-helpers"; +import type { operations } from "../generated/schema.js"; + +export type OperationId = keyof operations; + +export type OperationInit = RequestOptions; + +export type OperationResponse = FetchResponse< + operations[OpId], + OperationInit, + MediaType +>; + +export type OperationData = OperationResponse extends { + data: infer D; +} + ? D + : never; diff --git a/packages/ts-sdk/src/client/types.ts b/packages/ts-sdk/src/client/types.ts new file mode 100644 index 0000000..6e9b767 --- /dev/null +++ b/packages/ts-sdk/src/client/types.ts @@ -0,0 +1,14 @@ +import type { paths } from "../generated/schema.js"; + +export type HttpMethod = "get" | "post" | "put" | "patch" | "delete" | "options" | "head" | "trace"; + +export interface RequestOptions { + params?: { + query?: Record; + path?: Record; + }; + headers?: Record; + body?: unknown; +} + +export type NexlaPaths = paths; diff --git a/packages/ts-sdk/src/errors.ts b/packages/ts-sdk/src/errors.ts new file mode 100644 index 0000000..b6544a5 --- /dev/null +++ b/packages/ts-sdk/src/errors.ts @@ -0,0 +1,114 @@ +export type ErrorDetails = Record; + +export class NexlaError extends Error { + readonly details: ErrorDetails; + readonly operation: string | undefined; + readonly resourceType: string | undefined; + readonly resourceId: string | undefined; + readonly step: string | undefined; + readonly context: ErrorDetails; + readonly originalError: Error | undefined; + readonly statusCode: number | undefined; + readonly response: unknown | undefined; + + constructor( + message: string, + options: { + details?: ErrorDetails; + operation?: string; + resourceType?: string; + resourceId?: string; + step?: string; + context?: ErrorDetails; + originalError?: Error; + statusCode?: number; + response?: unknown; + } = {} + ) { + super(message); + this.name = "NexlaError"; + this.details = options.details ?? {}; + this.operation = options.operation; + this.resourceType = options.resourceType; + this.resourceId = options.resourceId; + this.step = options.step; + this.context = options.context ?? {}; + this.originalError = options.originalError; + this.statusCode = options.statusCode; + this.response = options.response; + } + + getErrorSummary(): Record { + return { + message: this.message, + step: this.step, + operation: this.operation, + resource_type: this.resourceType, + resource_id: this.resourceId, + details: this.details, + context: this.context, + status_code: this.statusCode, + response: this.response, + original_error: this.originalError?.message + }; + } +} + +export class AuthenticationError extends NexlaError { + constructor(message = "Authentication failed", options: ConstructorParameters[1] = {}) { + super(message, { operation: "authentication", ...options }); + this.name = "AuthenticationError"; + } +} + +export class AuthorizationError extends NexlaError { + constructor(message = "Authorization failed", options: ConstructorParameters[1] = {}) { + super(message, options); + this.name = "AuthorizationError"; + } +} + +export class NotFoundError extends NexlaError { + constructor(message = "Resource not found", options: ConstructorParameters[1] = {}) { + super(message, options); + this.name = "NotFoundError"; + } +} + +export class ValidationError extends NexlaError { + constructor(message = "Validation failed", options: ConstructorParameters[1] = {}) { + super(message, options); + this.name = "ValidationError"; + } +} + +export class RateLimitError extends NexlaError { + readonly retryAfter: number | undefined; + + constructor( + message = "Rate limit exceeded", + options: ConstructorParameters[1] & { retryAfter?: number } = {} + ) { + super(message, options); + this.name = "RateLimitError"; + this.retryAfter = options.retryAfter; + } +} + +export class ServerError extends NexlaError { + constructor(message = "Server error", options: ConstructorParameters[1] = {}) { + super(message, options); + this.name = "ServerError"; + } +} + +export class ResourceConflictError extends NexlaError { + constructor(message = "Resource conflict", options: ConstructorParameters[1] = {}) { + super(message, options); + this.name = "ResourceConflictError"; + } +} + +export const isRecord = (value: unknown): value is Record => { + return typeof value === "object" && value !== null; +}; diff --git a/packages/ts-sdk/src/generated/resource-map.ts b/packages/ts-sdk/src/generated/resource-map.ts new file mode 100644 index 0000000..2219e33 --- /dev/null +++ b/packages/ts-sdk/src/generated/resource-map.ts @@ -0,0 +1,455 @@ +/** + * Auto-generated resource map from OpenAPI. + * Do not edit manually. + */ + +export const resourceMap = { + "api_keys": {}, + "approval_requests": {}, + "async_tasks": { + "list": { + "path": "/async_tasks", + "method": "get" + }, + "create": { + "path": "/async_tasks", + "method": "post" + }, + "get": { + "path": "/async_tasks/{task_id}", + "method": "get" + }, + "delete": { + "path": "/async_tasks/{task_id}", + "method": "delete" + } + }, + "attribute_transforms": { + "list": { + "path": "/attribute_transforms", + "method": "get" + }, + "create": { + "path": "/attribute_transforms", + "method": "post" + }, + "get": { + "path": "/attribute_transforms/{attribute_transform_id}", + "method": "get" + }, + "update": { + "path": "/attribute_transforms/{attribute_transform_id}", + "method": "put" + }, + "delete": { + "path": "/attribute_transforms/{attribute_transform_id}", + "method": "delete" + } + }, + "auth_parameters": {}, + "auth_templates": {}, + "catalog_configs": {}, + "cluster_endpoints": {}, + "clusters": {}, + "code_containers": { + "list": { + "path": "/code_containers", + "method": "get" + }, + "create": { + "path": "/code_containers", + "method": "post" + }, + "get": { + "path": "/code_containers/{code_container_id}", + "method": "get" + }, + "update": { + "path": "/code_containers/{code_container_id}", + "method": "put" + }, + "delete": { + "path": "/code_containers/{code_container_id}", + "method": "delete" + } + }, + "connectors": {}, + "credentials": { + "list": { + "path": "/data_credentials", + "method": "get" + }, + "create": { + "path": "/data_credentials", + "method": "post" + }, + "get": { + "path": "/data_credentials/{credential_id}", + "method": "get" + }, + "update": { + "path": "/data_credentials/{credential_id}", + "method": "put" + }, + "delete": { + "path": "/data_credentials/{credential_id}", + "method": "delete" + } + }, + "cubejs": {}, + "custom_data_flows": {}, + "dashboard_transforms": {}, + "data_credentials_groups": {}, + "data_flows": {}, + "data_schemas": {}, + "destinations": { + "list": { + "path": "/data_sinks", + "method": "get" + }, + "create": { + "path": "/data_sinks", + "method": "post" + }, + "get": { + "path": "/data_sinks/{sink_id}", + "method": "get" + }, + "update": { + "path": "/data_sinks/{sink_id}", + "method": "put" + }, + "delete": { + "path": "/data_sinks/{sink_id}", + "method": "delete" + } + }, + "doc_containers": {}, + "flow_nodes": {}, + "flow_triggers": {}, + "flows": { + "list": { + "path": "/flows", + "method": "get" + }, + "get": { + "path": "/flows/{flow_id}", + "method": "get" + }, + "delete": { + "path": "/flows/{flow_id}", + "method": "delete" + } + }, + "lookups": { + "list": { + "path": "/data_maps", + "method": "get" + }, + "create": { + "path": "/data_maps", + "method": "post" + }, + "get": { + "path": "/data_maps/{data_map_id}", + "method": "get" + }, + "update": { + "path": "/data_maps/{data_map_id}", + "method": "put" + }, + "delete": { + "path": "/data_maps/{data_map_id}", + "method": "delete" + } + }, + "marketplace": {}, + "mcp_sessions": {}, + "nexsets": { + "list": { + "path": "/data_sets", + "method": "get" + }, + "create": { + "path": "/data_sets", + "method": "post" + }, + "get": { + "path": "/data_sets/{set_id}", + "method": "get" + }, + "update": { + "path": "/data_sets/{set_id}", + "method": "put" + }, + "delete": { + "path": "/data_sets/{set_id}", + "method": "delete" + } + }, + "notification_channel_settings": { + "list": { + "path": "/notification_channel_settings", + "method": "get" + }, + "create": { + "path": "/notification_channel_settings", + "method": "post" + }, + "get": { + "path": "/notification_channel_settings/{notification_channel_setting_id}", + "method": "get" + }, + "update": { + "path": "/notification_channel_settings/{notification_channel_setting_id}", + "method": "put" + }, + "delete": { + "path": "/notification_channel_settings/{notification_channel_setting_id}", + "method": "delete" + } + }, + "notification_settings": { + "list": { + "path": "/notification_settings", + "method": "get" + }, + "create": { + "path": "/notification_settings", + "method": "post" + }, + "get": { + "path": "/notification_settings/{notification_setting_id}", + "method": "get" + }, + "update": { + "path": "/notification_settings/{notification_setting_id}", + "method": "put" + }, + "delete": { + "path": "/notification_settings/{notification_setting_id}", + "method": "delete" + } + }, + "notification_types": { + "list": { + "path": "/notification_types", + "method": "get" + } + }, + "notifications": { + "list": { + "path": "/notifications", + "method": "get" + }, + "get": { + "path": "/notifications/{notification_id}", + "method": "get" + }, + "delete": { + "path": "/notifications/{notification_id}", + "method": "delete" + } + }, + "org_auth_configs": { + "list": { + "path": "/api_auth_configs", + "method": "get" + }, + "create": { + "path": "/api_auth_configs", + "method": "post" + }, + "get": { + "path": "/api_auth_configs/{auth_config_id}", + "method": "get" + }, + "update": { + "path": "/api_auth_configs/{auth_config_id}", + "method": "put" + }, + "delete": { + "path": "/api_auth_configs/{auth_config_id}", + "method": "delete" + } + }, + "org_tiers": {}, + "organizations": { + "list": { + "path": "/orgs", + "method": "get" + }, + "get": { + "path": "/orgs/{org_id}", + "method": "get" + }, + "update": { + "path": "/orgs/{org_id}", + "method": "put" + } + }, + "projects": { + "list": { + "path": "/projects", + "method": "get" + }, + "create": { + "path": "/projects", + "method": "post" + }, + "get": { + "path": "/projects/{project_id}", + "method": "get" + }, + "update": { + "path": "/projects/{project_id}", + "method": "put" + }, + "delete": { + "path": "/projects/{project_id}", + "method": "delete" + } + }, + "quarantine_settings": {}, + "resource_parameters": {}, + "runtimes": { + "list": { + "path": "/runtimes", + "method": "get" + }, + "create": { + "path": "/runtimes", + "method": "post" + }, + "get": { + "path": "/runtimes/{runtime_id}", + "method": "get" + }, + "update": { + "path": "/runtimes/{runtime_id}", + "method": "put" + }, + "delete": { + "path": "/runtimes/{runtime_id}", + "method": "delete" + } + }, + "search_health": {}, + "self_signup_blocked_domains": { + "list": { + "path": "/self_signup_blocked_domains", + "method": "get" + }, + "create": { + "path": "/self_signup_blocked_domains", + "method": "post" + }, + "update": { + "path": "/self_signup_blocked_domains/{domain_id}", + "method": "put" + }, + "delete": { + "path": "/self_signup_blocked_domains/{domain_id}", + "method": "delete" + } + }, + "service_keys": {}, + "sources": { + "list": { + "path": "/data_sources", + "method": "get" + }, + "create": { + "path": "/data_sources", + "method": "post" + }, + "get": { + "path": "/data_sources/{source_id}", + "method": "get" + }, + "update": { + "path": "/data_sources/{source_id}", + "method": "put" + }, + "delete": { + "path": "/data_sources/{source_id}", + "method": "delete" + } + }, + "teams": { + "list": { + "path": "/teams", + "method": "get" + }, + "create": { + "path": "/teams", + "method": "post" + }, + "get": { + "path": "/teams/{team_id}", + "method": "get" + }, + "update": { + "path": "/teams/{team_id}", + "method": "put" + }, + "delete": { + "path": "/teams/{team_id}", + "method": "delete" + } + }, + "tool_sets": {}, + "tools": {}, + "transforms": { + "list": { + "path": "/transforms", + "method": "get" + }, + "create": { + "path": "/transforms", + "method": "post" + }, + "get": { + "path": "/transforms/{transform_id}", + "method": "get" + }, + "update": { + "path": "/transforms/{transform_id}", + "method": "put" + }, + "delete": { + "path": "/transforms/{transform_id}", + "method": "delete" + } + }, + "user_settings": { + "list": { + "path": "/user_settings", + "method": "get" + } + }, + "user_tiers": {}, + "users": { + "list": { + "path": "/users", + "method": "get" + }, + "create": { + "path": "/users", + "method": "post" + }, + "get": { + "path": "/users/{user_id}", + "method": "get" + }, + "update": { + "path": "/users/{user_id}", + "method": "put" + } + }, + "validators": {}, + "vendor_endpoints": {}, + "vendors": {} +} as const; + +export type ResourceMap = typeof resourceMap; diff --git a/packages/ts-sdk/src/generated/schema.ts b/packages/ts-sdk/src/generated/schema.ts new file mode 100644 index 0000000..bb0b4ed --- /dev/null +++ b/packages/ts-sdk/src/generated/schema.ts @@ -0,0 +1,20072 @@ +/** + * This file was auto-generated by openapi-typescript. + * Do not make direct changes to the file. + */ + + +/** OneOf type helpers */ +type Without = { [P in Exclude]?: never }; +type XOR = (T | U) extends object ? (Without & U) | (Without & T) : T | U; +type OneOf = T extends [infer Only] ? Only : T extends [infer A, infer B, ...infer Rest] ? OneOf<[XOR, ...Rest]> : never; + +export interface paths { + "/data_credentials": { + /** + * Get All Credentials + * @description Returns all data credentials accessible to the authenticated user. + */ + get: operations["get_data_credentials"]; + /** + * Create a Credential + * @description Creates a Nexla data credential with the specified configuration in your Nexla account. + * + * > Note: `name`, `credentials_type`, and `credentials` are required. + */ + post: operations["create_data_credential"]; + }; + "/data_credentials/{credential_id}": { + /** + * Get Credential by ID + * @description Returns a credential object if a valid ID is provided. + */ + get: operations["get_data_credential"]; + /** + * Update Credential + * @description Updates a data credential in the authenticated user's account. + * + * > Note: This method does not perform partial updating of the `credentials` object. The entire `credentials` object will be updated if this is added to the payload. + */ + put: operations["update_data_credential"]; + /** + * Delete a Credential + * @description Deletes a credential from your Nexla account. + */ + delete: operations["delete_data_credential"]; + }; + "/data_credentials/{credential_id}?expand=1": { + /** + * Get Credential by ID with expanded references + * @description Returns a credential object along with advanced information about associated references if a valid ID is provided. + */ + get: operations["get_data_credential_expanded"]; + }; + "/data_credentials/{credential_id}/probe": { + /** + * Test credential validity + * @description Use this endpoint to check whether or not a credential is valid. + */ + get: operations["data_credential_probe"]; + }; + "/data_credentials/{credential_id}/probe/tree": { + /** + * Preview Storage Structure + * @description Use this endpoint to preview the structure/hierarchy of storage to which this credential grants access. For example, you can use this endpoint to see the folder and file structure of a file storage system or the table-column structure of a database. + * This can be used to inspect the directory hierarchy of file content storage or the database schema of a database/warehouse storage system. Note that this endpoint is only valid for credentials for storage systems wherein a storage structure needs to be reviewed. + */ + post: operations["preview_storage_structure"]; + }; + "/data_credentials/{credential_id}/probe/sample": { + /** + * Preview Connector Content + * @description Use this endpoint to preview the data content in a storage system. + * + * 1. For file systems, this can be used to preview the file content of any specific file. + * 2. For database systems, it can be used to preview sample rows from a table or query result. + * 3. For the rest connector, it can be used to preview the results of any API request. + * 4. For streaming connectors, it can be used to preview some records in a topic. + * + * For most connectors, it can also be used to determine the type of records that might be detected in the resulting Nexset. + */ + post: operations["preview_connector_content"]; + }; + "/flows": { + /** + * Get All Flows + * @description Returns all flows accessible to the authenticated user. + */ + get: operations["get_flows"]; + }; + "/flows/{flow_id}": { + /** + * Get Flow by ID + * @description Returns a flow object if a valid flow ID is provided. + */ + get: operations["get_flow_by_id"]; + /** + * Delete a Flow + * @description Deletes a flow from your Nexla account. + */ + delete: operations["delete_flow"]; + }; + "/flows/{flow_id}/activate": { + /** + * Activate a Flow + * @description To activate the entire flow, use either the `origin_node_id` from any data source, set or sink in the flow, or include the ?all=1 or ?full_tree=1 query parameter. + * + * >**Note**: + * > 1. All endpoints for activating or pausing a flow operate on the specific resource given and all of the flow nodes downstream from that resource. This allows for pausing and activating sub-flows while leaving the rest of the flow state unchanged. + * > + * > 2. You can also activate a flow by using the id of the `data_source`/ `data_set` / `data_sink` that the flow node is linked to. See relevant endpoints in the API references for those resources. + */ + put: operations["flow_activate_with_flow_id"]; + }; + "/flows/{flow_id}/pause": { + /** + * Pause a Flow + * @description To pause the entire flow, use either the `origin_node_id` from any data source, set or sink in the flow, or include the ?all=1 or ?full_tree=1 query parameter. + * + * >**Note**: + * > 1. All endpoints for activating or pausing a flow operate on the specific resource given and all of the flow nodes downstream from that resource. This allows for pausing and activating sub-flows while leaving the rest of the flow state unchanged. + * > + * > 2. You can also pause a flow by using the id of the `data_source`/ `data_set` / `data_sink` that the flow node is linked to. See relevant endpoints in the API references for those resources. + */ + put: operations["flow_pause_with_flow_id"]; + }; + "/flows/{flow_id}/copy": { + /** + * Copy a Flow + * @description Use this endpoint to create a copy of an existing flow. + */ + post: operations["flow_copy_with_flow_id"]; + }; + "/flows/{flow_id}/docs/recommendation": { + /** + * Generate an AI suggestion for flow documentation + * @description Request a suggestion for Flow documentation. GenAI has to be configured properly for this request, or else you get a message with an error. + */ + post: operations["flow_docs_recommendation"]; + }; + "/{resource_type}/{resource_id}/flow": { + /** + * Get Flow (by Resource ID) + * @description Returns a flow object if a valid resource type and resource ID is provided. + * + * > Note: This is a variant of flow endpoints where the flow node can referenced not by its own ID, but by the ID of the unique resource that is linked to that flow node. + */ + get: operations["get_flow_by_resource_id"]; + /** + * Delete a Flow (by Resource ID) + * @description Deletes a flow from your Nexla account. + * + * > Note: This is a variant of flow endpoints where the flow node can referenced not by its own ID, but by the ID of the unique resource that is linked to that flow node. + */ + delete: operations["delete_flow_by_resource_id"]; + }; + "/{resource_type}/{resource_id}/activate": { + /** + * Activate a Flow (with Resource ID) + * @description To activate the entire flow include the ?all=1 or ?full_tree=1 query parameter. + * + * >**Note**: + * > 1. All endpoints for activating or pausing a flow operate on the specific resource given and all of the flow nodes downstream from that resource. This allows for pausing and activating sub-flows while leaving the rest of the flow state unchanged. + * > 2. This is a variant of flow endpoints where the flow node can referenced not by its own ID, but by the ID of the unique resource that is linked to that flow node. + */ + put: operations["flow_activate_with_resource_id"]; + }; + "/{resource_type}/{resource_id}/pause": { + /** + * Pause a Flow (with Resource ID) + * @description To pause the entire flow include the entire flow include the ?all=1 or ?full_tree=1 query parameter. + * + * >**Note**: + * > 1. All endpoints for activating or pausing a flow operate on the specific resource given and all of the flow nodes downstream from that resource. This allows for pausing and activating sub-flows while leaving the rest of the flow state unchanged. + * > + * > 2. This is a variant of flow endpoints where the flow node can referenced not by its own ID, but by the ID of the unique resource that is linked to that flow node. + */ + put: operations["flow_pause_with_resource_id"]; + }; + "/data_sources": { + /** + * Get All Sources + * @description Returns all data sources accessible to the authenticated user. + */ + get: operations["get_data_sources"]; + /** + * Create a Source + * @description Creates a new data source in the authenticated user's account. + * + * Depending on the type of source you want to create (`source_type`), properties like `source_config` and `data_credentials_id` will require appropriate configuration. + * + * > Note: `name`, `source_type`, `source_config` and `data_credentials_id` are required. + */ + post: operations["create_data_source"]; + }; + "/data_sources/{source_id}": { + /** + * Get Source by ID + * @description Returns a source object if a valid ID is provided. + */ + get: operations["get_data_source"]; + /** + * Update a Source + * @description Updates a data source in the authenticated user's account. + * + * Depending on the type of source you want to update (`source_type`), properties like `source_config` and `data_credentials_id` will require appropriate configuration. + * + * > Note: This method does not perform partial updating of `source_config`. The entire `source_config` object will be updated if this is added to the payload. + */ + put: operations["update_data_source"]; + /** + * Delete a Source + * @description Deletes a source from your Nexla account. + */ + delete: operations["delete_data_source"]; + }; + "/data_sources/{source_id}?expand=1": { + /** + * Get Source by ID with Expanded References + * @description Returns a source object along with advanced information about associated references if a valid ID is provided. + */ + get: operations["get_data_source_expanded"]; + }; + "/data_sources/{source_id}/activate": { + /** + * Activate a Source + * @description Activate a paused data source. + */ + put: operations["activate_source"]; + }; + "/data_sources/{source_id}/pause": { + /** + * Pause a Source + * @description Pause an active data source. + */ + put: operations["pause_source"]; + }; + "/data_sources/{source_id}/copy": { + /** + * Copy a Source + * @description Use this endpoint to create a copy of an existing flow. + */ + post: operations["copy_source"]; + }; + "/data_sets": { + /** + * Get All Nexsets + * @description Retrieves all Nexsets accessible to the authenticated user. + */ + get: operations["get_nexsets"]; + /** + * Create a Nexset + * @description Creates a Nexset from another Nexset. + * + * The endpoint accepts a parent Nexset ID along with all transform and validation rules that should be applied to the parent Nexset. + * + * The two payload variants reflect the following two ways of specifying transform rules: + * 1. Attach the transform code that should be applied: Set `has_custom_transform: false`, and attach a `transform` code snippet. + * 2. Use the ID of a reusable record transform: Set `has_custom_transform: false`, and attach the `transform_id` of the record transform to be applied. + */ + post: operations["create_nexset"]; + }; + "/data_sets/{set_id}": { + /** + * Get a Nexset + * @description Returns a Nexset object if a valid ID is provided. + */ + get: operations["get_nexset"]; + /** + * Update a Nexset + * @description Updates a Nexset in the authenticated user's account. + */ + put: operations["update_nexset"]; + /** + * Delete a Nexset + * @description Deletes a Nexset from the authenticated user's account. + */ + delete: operations["delete_nexset"]; + }; + "/data_sets/{set_id}/activate": { + /** + * Activate Nexset + * @description Activates a paused Nexset. + */ + put: operations["activate_nexset"]; + }; + "/data_sets/{set_id}/pause": { + /** + * Pause Nexset + * @description Pauses an active Nexset. + */ + put: operations["pause_nexset"]; + }; + "/data_sets/{set_id}/copy": { + /** + * Copy Nexset + * @description Use this endpoint to create a clone of an existing Nexset. + */ + post: operations["copy_nexset"]; + }; + "/data_sets/{set_id}/samples": { + /** + * Get Nexset Samples + * @description Use this endpoint to fetch some sample records from this Nexset. Use the relevant query parameters to control whether the samples returned are from the live Nexset topic or the Nexset sample cache. + */ + get: operations["get_nexset_samples"]; + }; + "/data_sets/{data_set_id}/docs/recommendation": { + /** + * Generate an AI suggestion for Nexset documentation + * @description Request a suggestion for Nexset documentation. GenAI has to be configured properly for this request, or else you get a message with an error. + */ + post: operations["data_set_docs_recommendation"]; + }; + "/data_sinks": { + /** + * Get All Sinks + * @description Retrieves all data sinks accessible to the authenticated user. + */ + get: operations["get_data_sinks"]; + /** + * Create a Sink + * @description Creates a Nexla data_sink with the specified configuration in your Nexla account. + * + * > Note: `name` ,`data_set_id`, `sink_type`, `sink_config` and `data_credentials_id` are required. + */ + post: operations["create_data_sink"]; + }; + "/data_sinks/{sink_id}": { + /** + * Get Sink by ID + * @description Returns a data_sink object if a valid ID is provided. + */ + get: operations["get_data_sink"]; + /** + * Update Sink + * @description Updates a data_sink object in the authenticated user's account. + * + * > Note: This method does not perform partial updating of the `sink_config` object. The entire `sink_config` object will be updated if this is added to the payload. + */ + put: operations["update_data_sink"]; + /** + * Delete a Sink + * @description Deletes a sink from your Nexla account. + */ + delete: operations["delete_data_sink"]; + }; + "/data_sinks/{sink_id}?expand=1": { + /** + * Get Sink by ID with Expanded References + * @description Returns a data_sink object along with advanced information about associated references if a valid ID is provided. + */ + get: operations["get_data_sink_expanded"]; + }; + "/data_sinks/{sink_id}/activate": { + /** + * Activate a Sink + * @description Activate a paused data sink. + */ + put: operations["activate_data_sink"]; + }; + "/data_sinks/{sink_id}/pause": { + /** + * Pause a Sink + * @description Pause an active data sink. + */ + put: operations["pause_data_sink"]; + }; + "/data_sinks/{sink_id}/copy": { + /** + * Copy a Sink + * @description Use this endpoint to create a copy of an existing data sink. + */ + post: operations["copy_data_sink_source"]; + }; + "/data_maps": { + /** + * Get all Data Maps + * @description Retrieves all lookups (data maps) accessible to the authenticated user. + */ + get: operations["get_data_maps"]; + /** + * Create a Static Data Map + * @description Creates a new static data map in the authenticated user's account. Dynamic data maps can only be created by creating a Destination (Sink) of the type `data_map`. + * + * For statically assigned data maps, you can choose to add data rows to the data map by either of the following methods: + * 1. Send data map entries with this request. In this case, the rows of data are sent as a `data_map` array of objects. + * 2. Send data map entries as a separate call to add/update entries. + * + * You must include `map_primary_key` to specify which map attribute should be used for data matching. + */ + post: operations["create_static_data_map"]; + }; + "/data_maps/{data_map_id}": { + /** + * Get Data Map by ID + * @description Retrieves a data map object if a valid ID is provided. + * + * This call to `/data_maps` **does not** return data map entries, as they can be a large array of objects for big data maps. + * + * You can include the `expand` query parameter to fetch the data map entries of smaller static data maps. + */ + get: operations["get_data_map"]; + /** + * Update Data Map Metadata + * @description Updates a data map in the authenticated user's account. + * + * This endpoint is suitable for updating the metadata of a data map. We recommend using the data map entries update and delete endpoints to update data map rows. + */ + put: operations["update_data_map_metadata"]; + /** + * Delete a Data Map + * @description Deletes a data map from your Nexla account. + */ + delete: operations["delete_data_map"]; + }; + "/data_maps/{data_map_id}/entries": { + /** + * Upsert Static Data Map Entries + * @description Updates the entries in a static data map. Use this endpoint to add new entries or update the row corresponding to a specific key. + */ + put: operations["upsert_data_map_entries"]; + }; + "/data_maps/{data_map_id}/entries/{entry_keys}": { + /** + * Check Data Map Entries + * @description Returns the rows of data from the data map that matches a desired key or key pattern. + * + * This endpoint can be used to check whether the data map contains rows of data that match the desired key, keys, or key patterns. Key names should be provided in the path in the format described below. + */ + get: operations["check_data_map_entries"]; + /** + * Delete Data Map Entries + * @description Deletes specific entries from the data map. + * + * Use this endpoint to remove specific entries from the data map. + */ + delete: operations["delete_data_map_entries"]; + }; + "/transforms": { + /** + * Get all Reusable Record Transforms + * @description Reusable record transforms are reusable code blocks that can be used to modify an input record of a Nexset into an output record of that Nexset. + * Use this endpoint to fetch all reusable record transforms. + */ + get: operations["get_reusable_record_transforms"]; + /** + * Create a Reusable Record Transform + * @description Create a new reusable record transform. + */ + post: operations["create_reusable_record_transform"]; + }; + "/transforms/{transform_id}": { + /** + * Get A Reusable Record Transform + * @description Returns a reusable record transform object if a valid ID is provided. + */ + get: operations["get_reusable_record_transform"]; + /** + * Update Reusable Record Transform + * @description Updates a transform in the authenticated user's account. + */ + put: operations["update_reusable_record_transform"]; + /** + * Delete a Reusable Record Transform + * @description Use this endpoint to delete a reusable record transform. + */ + delete: operations["delete_reusable_record_transform"]; + }; + "/transforms/{transform_id}/copy": { + /** + * Copy a Reusable Record Transform + * @description Use this endpoint to create a copy of an existing reusable record transform. + */ + post: operations["copy_transform"]; + }; + "/transforms/public": { + /** + * Get all Public Reusable Record Transforms + * @description The Nexla team regularly adds common reusable record transforms that are made available to all Nexla accounts. + * + * Use this endpoint to fetch all such "publicly" available reusable record transforms. + */ + get: operations["get_public_reusable_record_transforms"]; + }; + "/attribute_transforms": { + /** + * Get all Attribute Transforms + * @description Reusable attribute transforms are reusable code blocks that can be used to define the value of an output attribute in a Nexset. These code blocks can be used to enhance the set of transforms available to end users when using the Nexset Designer. + * + * Use this endpoint to fetch all attribute transforms accessible to the authenticated user. + */ + get: operations["get_attribute_transforms"]; + /** + * Create an Attribute Transform + * @description Create a new attribute transform. + */ + post: operations["create_attribute_transform"]; + }; + "/attribute_transforms/{attribute_transform_id}": { + /** + * Get Attribute Transform by ID + * @description Returns an attribute transform object if a valid ID is provided. + */ + get: operations["get_attribute_transform"]; + /** + * Update Attribute Transform + * @description Updates an attribute transform in the authenticated user's account. + */ + put: operations["update_attribute_transform"]; + /** + * Delete an Attribute Transform + * @description Deletes an attribute transform from your Nexla account. + */ + delete: operations["delete_attribute_transform"]; + }; + "/attribute_transforms/public": { + /** + * Get all Public Attribute Transforms + * @description The Nexla team regularly adds common reusable attribute transforms that are made available to all Nexla accounts. + * + * Use this endpoint to fetch all such "publicly" available reusable attribute transforms. + */ + get: operations["get_public_attribute_transforms"]; + }; + "/code_containers": { + /** + * Get all Code Containers + * @description Use this endpoint to fetch all code containers accessible to the authenticated user. + */ + get: operations["get_code_containers"]; + /** + * Create a Code Container + * @description Use this endpoint to create a new code container. + */ + post: operations["create_code_container"]; + }; + "/code_containers/{code_container_id}": { + /** + * Get Code Container by ID + * @description Returns a code container object if a valid ID is provided. + */ + get: operations["get_code_container"]; + /** + * Update a Code Container + * @description Updates a code container in the authenticated user's account. + */ + put: operations["update_code_container"]; + /** + * Delete a Code Container + * @description Deletes a code container from the authenticated user's account. + */ + delete: operations["delete_code_container"]; + }; + "/code_containers/{code_container_id}/copy": { + /** + * Copy a Code Container + * @description Use this endpoint to create a copy of an existing code container. + */ + post: operations["copy_code_container"]; + }; + "/code_containers/public": { + /** + * Get all Public Code Containers + * @description The Nexla team regularly adds common code containers that are made available to all Nexla accounts. + * + * Use this endpoint to fetch all such "publicly" available code containers. + */ + get: operations["get_public_code_containers"]; + }; + "/projects": { + /** + * Get all Projects + * @description Retrieves a list of all projects accessible to the authenticated user. + */ + get: operations["get_projects"]; + /** + * Create a project + * @description Creates a project with the specified configuration. Note that flows can also be attached to the project later by calling endpoints to update the project. + */ + post: operations["create_project"]; + }; + "/projects/{project_id}": { + /** + * Get Project by ID + * @description Returns a project if a valid ID is provided. + */ + get: operations["get_project"]; + /** + * Modify a Project + * @description Modifies a project's information and settings if a valid ID and body are provided. + */ + put: operations["update_project"]; + /** + * Delete Project by ID + * @description Deletes a project if a valid ID is provided. Note that flows belonging to the project will only be removed from the project and will not be deleted. + */ + delete: operations["delete_project"]; + }; + "/projects/{project_id}/flows": { + /** + * Get Project Flows + * @description Returns a list of flows belonging to a project. + */ + get: operations["get_project_flows"]; + /** + * Add Flows to Project + * @description Adds a list of flows to a project. The existing flow list is retained and merged with the new flow list. + */ + put: operations["add_project_flows"]; + /** + * Replace Project Flows List + * @description Replaces the list of flows belonging to a project. Existing flows are removed from the project. + */ + post: operations["replace_project_flows"]; + /** + * Remove Flows From A Project + * @description Removes data flows from a project. If no request body is provided, all flows belonging to the project will be removed. The flows themselves will not be deleted, but they will no longer belong to the project. + */ + delete: operations["remove_project_flows"]; + }; + "/projects/{project_id}/data_flows": { + /** + * Get Project Flows (Deprecated) + * @description Returns a list of flows belonging to a project. + * + * > **Note**: This version of the endpoint has been deprecated. The returned flow response does not reference the new unique flow ids, instead references composite data flow ids of the type `{resource_type}/{resource_id}`. See get_project_flows for a new version of this endpoint that references unique `flow_id`. + */ + get: operations["get_project_flows_(deprecated)"]; + /** + * Add Flows to Project (Deprecated) + * @description Adds a list of flows to a project. The existing flow list is retained and merged with the new flow list. + * + * > **Note**: This version of the endpoint has been deprecated. The request body and response does not reference flows with new unique flow_ids, instead references composite data flow ids of the type `{resource_type}/{resource_id}`. See add_project_flows for a new version of this endpoint that references unique `flow_id`. + */ + put: operations["add_project_flows_(deprecated)"]; + /** + * Replace Project Flows List (Deprecated) + * @description Replaces the list of flows belonging to a project. Existing flows are removed from the project. + * + * > **Note**: This version of the endpoint has been deprecated. The request body and response does not reference flows with new unique flow_ids, instead references composite data flow ids of the type `{resource_type}/{resource_id}`. See replace_project_flows for a new version of this endpoint that references unique `flow_id`. + */ + post: operations["replace_project_flows_(deprecated)"]; + /** + * Remove Flows From A Project (Deprecated) + * @description Removes data flows from a project. If no request body is provided, all flows belonging to the project will be removed. The flows themselves will not be deleted, but they will no longer belong to the project. + * + * > **Note**: This version of the endpoint has been deprecated. The request body and response does not reference flows with new unique flow_ids, instead references composite data flow ids of the type `{resource_type}/{resource_id}`. See remove_project_flows for a new version of this endpoint that references unique `flow_id`. + */ + delete: operations["remove_project_flows_(deprecated)"]; + }; + "/orgs": { + /** + * Get all Organizations + * @description Returns all organizations accessible to the authenticated user. + */ + get: operations["get_orgs"]; + }; + "/orgs/{org_id}": { + /** + * Get Organization by ID + * @description Returns an organization if a valid ID is provided. + */ + get: operations["get_org"]; + /** + * Update an Organization + * @description Updates properties of an organization. + */ + put: operations["update_org"]; + }; + "/orgs/{org_id}/members": { + /** + * Get All Members in Organization + * @description Retrieves a list of all users in an organization. + */ + get: operations["get_org_members"]; + /** + * Update Organization Members + * @description Add or update members in an organization. This endpoint can also be used to modify an existing member's role in the organization. + * + * When adding a new member using their email id, if a user account for that email id does not exist on the platform then a new user account will be created. If the user already exists on the platform as a member of a different organization then their membership will get updated to include this organization also. + */ + put: operations["update_org_members"]; + /** + * Remove Members from an Organization. + * @description Removes one or more members from the organization. Note that this will not delete the user account from the platform, but will remove the user's ability to access this organization's resources. + */ + delete: operations["delete_org_members"]; + }; + "/teams": { + /** + * Get all Teams + * @description Returns all teams accessible to the authenticated user. + */ + get: operations["get_teams"]; + /** + * Create a team + * @description Creates a team with the specified configuration and members. + */ + post: operations["create_team"]; + }; + "/teams/{team_id}": { + /** + * Get Team by ID + * @description Returns a team if a valid ID is provided. + */ + get: operations["get_team"]; + /** + * Modify a Team + * @description Modifies a team's information and settings if a valid ID and body are provided. + */ + put: operations["update_team"]; + /** + * Delete Team by ID + * @description Deletes a team if a valid ID is provided. + */ + delete: operations["delete_team"]; + }; + "/teams/{team_id}/members": { + /** + * Get Team Members + * @description Returns a list of the members belonging to a team. + */ + get: operations["get_team_members"]; + /** + * Add Members to A Team + * @description Adds a list of members to a team. The existing list of members will be retained and merged with the new list of members. + */ + put: operations["add_team_members"]; + /** + * Replace Team Members List + * @description Replaces the list of members belonging to a team. Existing members will be removed from the team. + */ + post: operations["replace_team_members"]; + /** + * Remove Team Members + * @description Removes members from a team. If no request body is provided, all members belonging to the team will be removed. + */ + delete: operations["delete_team_members"]; + }; + "/users": { + /** + * Get All Users + * @description Returns all users that can be viewed by authenticated user. + */ + get: operations["get_users"]; + /** + * Create a User + * @description Create a new user in this environment. + * + * > This requires admin access to the provided organization. + */ + post: operations["create_user"]; + }; + "/users?expand=1": { + /** + * Get All Users with Expanded References + * @description Returns all users that can be viewed by the authenticated user. + */ + get: operations["get_users_expand"]; + }; + "/users/{user_id}": { + /** + * Get User by ID + * @description Returns a user if a valid ID is provided. + */ + get: operations["get_user"]; + /** + * Modify a User + * @description Modifies a user's information and settings if a valid ID and body are provided + */ + put: operations["update_user"]; + }; + "/users/{user_id}?expand=1": { + /** + * Get User by ID with Expanded References + * @description Returns a user if a valid ID is provided. + */ + get: operations["get_user_expand"]; + }; + "/users/current": { + /** + * Get info on current user + * @description Returns the user information of the currently logged-in user, including org memberships and current org info. + */ + get: operations["get_current_user"]; + }; + "/user_settings": { + /** + * Get the current user's settings + * @description Returns all the settings for the current user. + */ + get: operations["get_user_settings"]; + }; + "/notifications": { + /** + * Get All Notifications + * @description Returns all notifications in the authenticated user's account. Note that this only includes notifications generated to be displayed in the Nexla UI. + */ + get: operations["get_notifications"]; + }; + "/notifications/{notification_id}": { + /** + * Get a Notification + * @description Returns a notification if a valid ID is provided. + */ + get: operations["get_notification"]; + /** + * Delete a Notification + * @description Deletes a notification if a valid ID is provided. + */ + delete: operations["delete_notifications"]; + }; + "/notifications/all": { + /** + * Delete All Notifications + * @description Deletes all notifications belonging to the authenticated user. Note that this is only the list of notifications generated to be displayed in the Nexla UI. + */ + delete: operations["delete_all_notifications"]; + }; + "/notifications/count": { + /** + * Get Notifications Count + * @description Returns the total number of notifications in the authenticated user's account. Note that this only includes notifications generated to be displayed in the Nexla UI. + */ + get: operations["get_notification_count"]; + }; + "/notifications/mark_read": { + /** + * Mark Notification Read + * @description Use this endpoint to mark one, multiple, or all notifications as read. To mark a list of notifications, send an array of notification IDs as the payload. To mark all notifications, send the notification_id query parameter with the value `all`. + */ + put: operations["notifications_mark_read"]; + }; + "/notifications/mark_unread": { + /** + * Mark Notification Unread + * @description Use this endpoint to mark one, multiple, or all notifications as read. To mark a list of notifications, send an array of notification IDs as the payload. To mark all notifications, send the notification_id query parameter with the value `all`. + */ + put: operations["notifications_mark_unread"]; + }; + "/notification_types": { + /** + * Get All Notification Types + * @description Fetches a list of all notifications supported by Nexla in this environment. + * + * When users choose whether or not some notifications are enabled, their choices are saved in `notification_settings` and linked to the ID of the relevant notification type. + */ + get: operations["get_notification_types"]; + }; + "/notification_types/list": { + /** + * Get One Notification Type + * @description Fetches details about a specific notification type supported by Nexla in this environment. + */ + get: operations["list_notification_type"]; + }; + "/notification_channel_settings": { + /** + * List Notification Channel Settings + * @description Notification channel settings contain configuration settings relevant to where notifications should be delivered. For example, the settings for the `EMAIL` channel contain the email addresses to which notifications can be sent. + * + * You can maintain multiple configuration settings for the same channel to route notifications for specific resources and types to different locations. + * + * This endpoint lists all notification channel settings in the authenticated user's account. + */ + get: operations["list_notification_channel_settings"]; + /** + * Create a Notification Channel Setting + * @description Create a new configuration for a notification channel. + * + * You can maintain multiple configuration settings for the same channel to route notifications for specific resources and types to different locations. + */ + post: operations["create_notification_channel_setting"]; + }; + "/notification_channel_settings/{notification_channel_setting_id}": { + /** + * Get a Notification Channel Setting + * @description Returns a notification channel setting if a valid ID is provided. + */ + get: operations["get_notification_channel_setting"]; + /** + * Update a Notification Channel Setting + * @description Update the configuration of a notification channel setting. + */ + put: operations["update_notification_channel_setting"]; + /** + * Delete a Notification Channel Setting + * @description Deletes a notification channel setting if a valid ID is provided. + */ + delete: operations["delete_notification_channel_setting"]; + }; + "/notification_settings": { + /** + * List Notification Settings + * @description This endpoint lists all notification settings in the authenticated user's account. + * + * + * Notification settings contain the following user settings: + * 1. Whether the user wants to be notified about a specific event (`status` of a `notification_type` on a `notification_resource_type`) + * 2. If yes, on what `channel` the user wants to be notified + * 3. The configuration of the channel (`notification_channel_setting_id`) + * 4. Configuration parameters affect when the notification should be fired. This is usually left empty to use platform defaults, but it is relevant when users want to override the default settings of some notifications, such as `Source Data Delayed` + */ + get: operations["list_notification_settings"]; + /** + * Create a Notification Setting + * @description Create a setting to designate whether, when, and how a specific notification should be fired. + */ + post: operations["create_notification_setting"]; + }; + "/notification_settings/{notification_setting_id}": { + /** + * Get a Notification Setting + * @description Returns a notification if a valid ID is provided. + */ + get: operations["get_notification_setting"]; + /** + * Modify a Notification Setting + * @description Modifies a notification if a valid ID and body are provided. + */ + put: operations["update_notification_setting"]; + /** + * Delete a Notification Setting + * @description Delete a notification setting if a valid ID is provided. + */ + delete: operations["delete_notification_setting"]; + }; + "/notification_settings/notification_types/{notification_type_id}": { + /** + * Get Notification Settings for an Event + * @description Use this endpoint to fetch all notification settings of a specific type. + * + * This can be used as a filter that is easy to use to understand, which returns all notifications that a user can expect to receive for a specific event. + */ + get: operations["list_notification_settings_by_type"]; + }; + "/notification_settings/{resource_type}/{resource_id}": { + /** + * Get Notification Settings For a Resource + * @description Use this endpoint to fetch all notification settings for a given resource. + * + * This can be used as a filter that is easy to understand, which returns all notifications that a user can expect to receive for a specific resource. + */ + get: operations["list_resource_notification_settings"]; + }; + "/orgs/{org_id}/flows/account_metrics": { + /** + * Get Total Account Metrics for An Organization + * @description Retrieves total account utilization metrics for an organization. The result consists of aggregated information about records processed within the specified date range by all resources owned by users in the organization. + */ + get: operations["org_account_metrics_total"]; + }; + "/users/{user_id}/flows/account_metrics": { + /** + * Get Total Account Metrics for a User + * @description Retrieves total account utilization metrics for a user in an organization. The result consists of aggregated information about records processed within the specified date range by all resources owned by the user. + */ + get: operations["user_account_metrics_total"]; + }; + "/users/{user_id}/flows/dashboard": { + /** + * Get 24 Hour Flow Stats for a User + * @description Retrieves the metrics and processing status of each flow that processed data in the last 24 hours. + * + * Each item reflects the total number of records processed by each stage of all flows accessible by the user that processed any data in the specified time window. + */ + get: operations["user_24_hour_flow_stats"]; + }; + "/users/{user_id}/metrics": { + /** + * Get Daily Data Processing Metrics for a User + * @description Retrieves daily data processing metrics of all sources or all destinations owned by a user. + */ + get: operations["user_metrics_daily"]; + }; + "/{resource_type}/{resource_id}/metrics": { + /** + * Get Daily Metrics for a Resource of a Flow + * @description Retrieves daily data processing metrics of a `data_source`, `data_set`, or `data_sink`. + */ + get: operations["get_resource_metrics_daily"]; + }; + "/{resource_type}/{resource_id}/metrics/run_summary": { + /** + * Get Metrics By Run ID for a Resource of a Flow + * @description Retrieves data processing metrics of a `data_source`, `data_set`, or `data_sink`. The reported metrics are grouped by run id to indicate the number of records processed during each ingestion cycle of this flow. + */ + get: operations["get_resource_metrics_by_run"]; + }; + "/data_flows/{resource_type}/{resource_id}/metrics": { + /** + * Get Metrics for a Flow + * @description Retrieves data processing metrics of a flow. Metrics are aggregated for each node of the flow for the specified time range. They can be be further grouped by run id to indicate the number of records processed during each ingestion cycle of this flow. + * + * > Note: This is a variant of flow endpoints where the flow node can referenced not by its own ID, but by the ID of the unique resource that is linked to that flow node. + */ + get: operations["get_flow_metrics"]; + }; + "/data_flows/{resource_type}/{resource_id}/logs": { + /** + * Get Flow Execution Logs for Run ID of a Flow + * @description Retrieves flow execution logs for a specific run id of a flow. + * + * > Note: This is a variant of flow endpoints where the flow node can referenced not by its own ID, but by the ID of the unique resource that is linked to that flow node. + */ + get: operations["get_flow_logs_for_run_id"]; + }; + "/data_sources/{source_id}/audit_log": { + /** + * Get Audit Log for a Data Source + * @description Retrieves the history of changes made to the properties of a data source. + */ + get: operations["get_data_source_audit_log"]; + }; + "/data_sinks/{sink_id}/audit_log": { + /** + * Get Audit Log for a Data Sink + * @description Retrieves the history of changes made to the properties of a data sink. + */ + get: operations["get_data_sink_audit_log"]; + }; + "/data_sets/{set_id}/audit_log": { + /** + * Get Audit Log for a Nexset + * @description Retrieves the history of changes made to the properties of a Nexset. + */ + get: operations["get_nexset_audit_log"]; + }; + "/data_credentials/{credential_id}/audit_log": { + /** + * Get Audit Log for a Data Credential + * @description Retrieves the history of changes made to the properties of a data credential. + */ + get: operations["get_data_credential_audit_log"]; + }; + "/data_maps/{data_map_id}/audit_log": { + /** + * Get Audit Log for a Data Map + * @description Retrieves the history of changes made to the properties of a data map. + */ + get: operations["get_data_map_audit_log"]; + }; + "/data_schemas/{schema_id}/audit_log": { + /** + * Get Audit Log for a Data Schema + * @description Retrieves the history of changes made to the properties of a data schema. + */ + get: operations["get_data_schema_audit_log"]; + }; + "/code_containers/{code_container_id}/audit_log": { + /** + * Get Audit Log for a Code Container + * @description Retrieves the history of changes made to the properties of a code container. This endpoint can also be used to fetch the history of changes made to any transform object. + */ + get: operations["get_code_container_audit_log"]; + }; + "/projects/{project_id}/audit_log": { + /** + * Get Audit Log for a Project + * @description Retrieves the history of changes made to the properties of a project. + */ + get: operations["get_project_audit_log"]; + }; + "/doc_containers/{doc_container_id}/audit_log": { + /** + * Get Audit Log for a Document + * @description Retrieves the history of changes made to the properties of a document. + */ + get: operations["get_doc_container_audit_log"]; + }; + "/users/{user_id}/audit_log": { + /** + * Get Audit Log for a User + * @description Retrieves the history of changes made to the properties of a user. + */ + get: operations["get_user_audit_log"]; + }; + "/orgs/{org_id}/audit_log": { + /** + * Get Audit Log for an Organization + * @description Retrieves the history of changes made to the properties of an organization. + */ + get: operations["get_org_audit_log"]; + }; + "/teams/{team_id}/audit_log": { + /** + * Get Audit Log for a Team + * @description Retrieves the history of changes made to the properties of a team. + */ + get: operations["get_team_audit_log"]; + }; + "/users/{user_id}/quarantine_settings": { + /** + * Get Quarantine Data Export Settings for A User + * @description Retrieve Quarantine Data Export Settings for all resources owned by a user. + * + * Nexla detects errors during different stages of data flow such as ingestion, transformation, and output. Error records are quarantined and accessible to the user via APIs as well as files. With Quarantine Data Export Settings, you can configure Nexla to write files containing information about erroneous records across all resources owned by a user. + * + * > This endpoint returns a 404 status code if no Quarantine Data Export Settings have been configured for the user. + */ + get: operations["get_user_quarantine_data_export_settings"]; + /** + * Update Quarantine Data Export Settings for A User + * @description Updates Quarantine Data Export Settings for all resources owned by a user so that all erroneous records can be automatically exported by the platform to a file system regularly. + */ + put: operations["update_user_quarantine_data_export_settings"]; + /** + * Set Quarantine Data Export Settings for A User + * @description Sets Quarantine Data Export Settings for all resources owned by a user so that all erroneous records can be automatically exported by the platform to a file system regularly. + */ + post: operations["create_quarantine_data_export_settings"]; + /** + * Delete Quarantine Data Export Settings for A User + * @description Deletes Updates Quarantine Data Export Settings for all resources owned by a user. Deleting this setting will ensure the platform stops exporting all erroneous records for resources owned by the user to a file storage. + */ + delete: operations["delete_user_quarantine_data_export_settings"]; + }; + "/approval_requests/pending": { + /** + * Get all pending approval requests. + * @description Use this endpoint to fetch all pending approval requests that are not assigned to any users. + */ + get: operations["get_pending_approval_requests"]; + }; + "/approval_requests/requested": { + /** + * Get all requested approval requests by the user. + * @description Use this endpoint to fetch all approval requests that are requested by the user. + */ + get: operations["get_requested_approval_requests"]; + }; + "/approval_requests/{request_id}/approve": { + /** + * Approve pending approval requests + * @description Use this endpoint to approve pending approval requests that are assigned to user or it's unassigned. + */ + put: operations["approve_approval_request"]; + }; + "/approval_requests/{request_id}/reject": { + /** + * Reject pending approval requests + * @description Use this endpoint to reject pending approval requests that are assigned to user or it's unassigned. + */ + delete: operations["reject_approval_request"]; + }; + "/data_sources/{data_source_id}/accessors": { + /** + * Get Access Rules on Data Source + * @description Returns a list of the access-control rules set for this data source. + */ + get: operations["get_data_source_accessors"]; + /** + * Add Access Rules on Data Source + * @description Adds a list of accessors to a data source. The existing accessors list is retained and merged with the new accessors list. + */ + put: operations["add_data_source_accessors"]; + /** + * Replace Access Rules on Data Source + * @description Replaces the list of accessors belonging to a data source. Existing accessors will be removed from the data source. + */ + post: operations["replace_data_source_accessors"]; + /** + * Delete Access Rules on Data Source + * @description Removes access-control rules from a data source. If no request body is provided, all rules associated with the data source will be removed. + */ + delete: operations["delete_data_source_accessors"]; + }; + "/data_sets/{data_set_id}/accessors": { + /** + * Get Access Rules on Nexset + * @description Returns a list of the access-control rules set for this Nexset. + */ + get: operations["get_nexset_accessors"]; + /** + * Add Access Rules on Nexset + * @description Adds new access-control rules to this Nexset. + */ + put: operations["add_nexset_accessors"]; + /** + * Replace Access Rules on Nexset + * @description Replaces the list of access-control rules set for this Nexset. Existing rules will be removed from the Nexset, and only these new rules will be applied. + */ + post: operations["replace_nexset_accessors"]; + /** + * Delete Access Rules on Nexset + * @description Removes access-control rules from a Nexset. If no request body is provided, all rules associated with the Nexset will be removed. + */ + delete: operations["delete_nexset_accessors"]; + }; + "/data_sinks/{data_sink_id}/accessors": { + /** + * Get Access Rules on Data Sink + * @description Returns a list of the access-control rules set for this data sink. + */ + get: operations["get_data_sink_accessors"]; + /** + * Add Access Rules on Data Sink + * @description Adds new access-control rules to this data sink. + */ + put: operations["add_data_sink_accessors"]; + /** + * Replace Access Rules on Data Sink + * @description Replaces the list of access-control rules set for this data sink. Existing rules will be removed from the data sink, and only these new rules will be applied. + */ + post: operations["replace_data_sink_accessors"]; + /** + * Delete Access Rules on Data Sink + * @description Removes access-control rules from a data sink. If no request body is provided, all rules associated with the data sink will be removed. + */ + delete: operations["delete_data_sink_accessors"]; + }; + "/data_maps/{data_map_id}/accessors": { + /** + * Get Access Rules on Data Map + * @description Returns a list of the access-control rules set for this data map. + */ + get: operations["get_data_map_accessors"]; + /** + * Add Access Rules on Data Map + * @description Adds new access-control rules to this data map. + */ + put: operations["add_data_map_accessors"]; + /** + * Replace Access Rules on Data Map + * @description Replaces the list of access-control rules set for this data map. Existing rules will be removed from the data map, and only these new rules will be applied. + */ + post: operations["replace_data_map_accessors"]; + /** + * Delete Access Rules on Data Map + * @description Removes access-control rules from a data map. If no request body is provided, all rules associated with the data map will be removed. + */ + delete: operations["delete_data_map_accessors"]; + }; + "/data_credentials/{data_credential_id}/accessors": { + /** + * Get Access Rules on Data Credential + * @description Returns a list of the access-control rules set for this data credential. + */ + get: operations["get_data_credential_accessors"]; + /** + * Add Access Rules on Data Credential + * @description Adds new access-control rules to this data credential. + */ + put: operations["add_data_credential_accessors"]; + /** + * Replace Access Rules on Data Credential + * @description Replaces the list of access-control rules set for this data credential. Existing rules will be removed from the data credential, and only these new rules will be applied. + */ + post: operations["replace_data_credential_accessors"]; + /** + * Delete Access Rules on Data Credential + * @description Removes access-control rules from a data credential. If no request body is provided, all rules associated with the data credential will be removed. + */ + delete: operations["delete_data_credential_accessors"]; + }; + "/projects/{project_id}/accessors": { + /** + * Get Project Accessors + * @description Returns a list of the access-control rules set for this project. + */ + get: operations["get_project_accessors"]; + /** + * Add Project Accessors + * @description Adds new access-control rules to this project. + */ + put: operations["add_project_accessors"]; + /** + * Replace Access Rules on Project + * @description Replaces the list of access-control rules set for this project. Existing rules will be removed from the project, and only these new rules will be applied. + */ + post: operations["replace_project_accessors"]; + /** + * Delete Project Accessors + * @description Removes access-control rules from a project. If no request body is provided, all rules associated with the project will be removed. + */ + delete: operations["delete_project_accessors"]; + }; + "/flows/{flow_id}/accessors": { + /** + * Get Access Rules on Flow + * @description Returns a list of the access-control rules set for this flow. + */ + get: operations["get_flow_accessors"]; + /** + * Add Access Rules on Flow + * @description Adds new access-control rules to this data flow. + */ + put: operations["add_flow_accessors"]; + /** + * Replace Access Rules on Flow + * @description Replaces the list of access-control rules set for this flow. Existing rules will be removed from the flow, and only these new rules will be applied. + */ + post: operations["replace_flow_accessors"]; + /** + * Delete Access Rules on Flow + * @description Removes access-control rules from a data flow. If no request body is provided, all rules associated with the data flow will be removed. + */ + delete: operations["delete_flow_accessors"]; + }; + "/data_flows/{data_flow_id}/accessors": { + /** + * Get Access Rules on Flow (Deprecated) + * @description Returns a list of the access-control rules set for this data flow. + * + * > **Note**: This version of the endpoint has been deprecated. It uses a composite data flow id of the type `{resource_type}/{resource_id}`. See get_flow_accessors for a new version of this endpoint that uses unique `flow_id`. + */ + get: operations["get_flow_accessors_(deprecated)"]; + /** + * Add Access Rules on Flow (Deprecated) + * @description Add new access-control rules to this data flow. This version uses a composite data flow id of the type `{resource_type}/{resource_id}`. + * + * > **Note**: This version of the endpoint has been deprecated. It uses a composite data flow id of the type `{resource_type}/{resource_id}`. See add_flow_accessors for a new version of this endpoint that uses unique `flow_id`. + */ + put: operations["add_flow_accessors_(deprecated)"]; + /** + * Replace Access Rules on Flow (Deprecated) + * @description Replace the list of access-control rules set for this data flow. Existing rules will be removed from the data flow, and only these new rules will be applied. This version uses a composite data flow id of the type `{resource_type}/{resource_id}`. + * + * > **Note**: This version of the endpoint has been deprecated. It uses a composite data flow id of the type `{resource_type}/{resource_id}`. See replace_flow_accessors for a new version of this endpoint that uses unique `flow_id`. + */ + post: operations["replace_flow_accessors_(deprecated)"]; + /** + * Delete Access Rules on Flow (Deprecated) + * @description Remove access-control rules from a data flow. If no request body is provided, all rules associated with the data flow will be removed. This version uses a composite data flow id of the type `{resource_type}/{resource_id}`. + * + * > **Note**: This version of the endpoint has been deprecated. It uses a composite data flow id of the type `{resource_type}/{resource_id}`. See delete_flow_accessors for a new version of this endpoint that uses unique `flow_id`. + */ + delete: operations["delete_flow_accessors_(deprecated)"]; + }; + "/data_schemas/{data_schema_id}/accessors": { + /** + * Get Access Rules on Data Schema + * @description Returns a list of the access-control rules set for this data schema. + */ + get: operations["get_data_schema_accessors"]; + /** + * Add Access Rules on Data Schema + * @description Adds new access-control rules to this data schema. + */ + put: operations["add_data_schema_accessors"]; + /** + * Replace Access Rules on Data Schema + * @description Replaces the list of access-control rules set for this data schema. Existing rules will be removed from the data schema, and only these new rules will be applied. + */ + post: operations["replace_data_schema_accessors"]; + /** + * Delete Access Rules on Data Schema + * @description Removes access-control rules from a data schema. If no request body is provided, all rules associated with the data schema will be removed. + */ + delete: operations["delete_data_schema_accessors"]; + }; + "/doc_containers/{doc_container_id}/accessors": { + /** + * Get Access Rules on Document + * @description Returns a list of the access-control rules set for this document. + */ + get: operations["get_doc_container_accessors"]; + /** + * Add Access Rules on Document + * @description Adds new access-control rules to this document. + */ + put: operations["add_doc_container_accessors"]; + /** + * Replace Access Rules on Document + * @description Replaces the list of access-control rules set for this document. Existing rules will be removed from the document, and only these new rules will be applied. + */ + post: operations["replace_doc_container_accessors"]; + /** + * Delete Access Rules on Document + * @description Removes access-control rules from a document. If no request body is provided, all rules associated with the document will be removed. + */ + delete: operations["delete_doc_container_accessors"]; + }; + "/code_containers/{code_container_id}/accessors": { + /** + * Get Access Rules on Code Container + * @description Returns a list of the access-control rules set for this code container. + */ + get: operations["get_code_container_accessors"]; + /** + * Add Access Rules on Code Container + * @description Adds new access-control rules to this code container. + */ + put: operations["add_code_container_accessors"]; + /** + * Replace Access Rules on Code Container + * @description Replaces the list of access-control rules set for this code container. Existing rules will be removed from the code container, and only these new rules will be applied. + */ + post: operations["replace_code_container_accessors"]; + /** + * Delete Access Rules on Code Container + * @description Removes access-control rules from a code container. If no request body is provided, all rules associated with the code container will be removed. + */ + delete: operations["delete_code_container_accessors"]; + }; + "/teams/{team_id}/accessors": { + /** + * Get Team Accessors + * @description Returns a list of the access-control rules set for this team. + */ + get: operations["get_team_accessors"]; + /** + * Add Team Accessors + * @description Adds new access-control rules to this team. + */ + put: operations["add_team_accessors"]; + /** + * Replace Team Accessors List + * @description Replaces the list of access-control rules set for this team. Existing rules will be removed from the team, and only these new rules will be applied. + */ + post: operations["replace_team_accessors"]; + /** + * Delete Team Accessors + * @description Removes access-control rules from a team. If no request body is provided, all rules associated with the team will be removed. + */ + delete: operations["delete_team_accessors"]; + }; + "/marketplace/domains": { + /** + * Get marketplace domains. + * @description Use this endpoint to fetch marketplace domains. You need a read permission for the org. + */ + get: operations["get_domains"]; + /** + * Create marketplace domains. + * @description Use this endpoint to create marketplace domains. You need a manage permission for the org. + */ + post: operations["create_domains"]; + }; + "/marketplace/domains/for_org": { + /** + * Get marketplace domains for organization. + * @description Use this endpoint to fetch marketplace domains for a specific organization. You need a read permission for the org. + */ + get: operations["get_domains_for_org"]; + }; + "/marketplace/domains/{domain_id}": { + /** + * Get a single marketplace domain. + * @description Use this endpoint to fetch a marketplace domain. You need a read permission for the domain. + */ + get: operations["get_domain"]; + /** + * Update a single marketplace domain. + * @description Use this endpoint to update a marketplace domain. You need a manage permission for the domain. + */ + put: operations["update_domain"]; + /** + * Create a single marketplace domain. + * @description Use this endpoint to create a marketplace domain. You need a manage permission for the org. + */ + post: operations["create_domain"]; + /** + * Delete a single marketplace domain. + * @description Use this endpoint to delete a marketplace domain. You need a manage permission for the domain to delete it. + */ + delete: operations["delete_domain"]; + }; + "/marketplace/domains/{domain_id}/items": { + /** + * Get marketplace items for a domain. + * @description Use this endpoint to fetch marketplace items for a domain. You need a read permission for the domain. + */ + get: operations["get_domain_items"]; + /** + * Create a marketplace item for a domain. + * @description Use this endpoint to create a marketplace item for a domain. You need a manage permission for the domain. + */ + post: operations["create_domain_item"]; + }; + "/marketplace/domains/{domain_id}/custodians": { + /** + * Get custodians for a marketplace domain. + * @description Use this endpoint to fetch custodians for a marketplace domain. You need a read permission for the domain. + */ + get: operations["get_domain_custodians"]; + /** + * Update custodians for a marketplace domain. + * @description Use this endpoint to update custodians for a marketplace domain. You need a manage permission for the domain. + */ + put: operations["update_domain_custodians"]; + /** + * Add custodians to a marketplace domain. + * @description Use this endpoint to add custodians to a marketplace domain. You need a manage permission for the domain. + */ + post: operations["add_domain_custodians"]; + /** + * Remove custodians from a marketplace domain. + * @description Use this endpoint to remove custodians from a marketplace domain. You need a manage permission for the domain. + */ + delete: operations["remove_domain_custodians"]; + }; + "/orgs/{org_id}/custodians": { + /** + * Get organization custodians. + * @description Use this endpoint to fetch custodians of organization. Org read permission is required to access this endpoint. + */ + get: operations["get_org_custodians"]; + /** + * Update organization custodians. + * @description Users listed within the request body will be updated as custodians for the organization. Users can be identified by their email or id. + */ + put: operations["update_org_custodians"]; + /** + * Add organization custodians. + * @description Users listed within the request body will be added as custodians to the organization. Users can be identified by their email or id. + */ + post: operations["add_org_custodians"]; + /** + * Remove organization custodians. + * @description Users listed within the request body will be deleted as custodians from the organization. Users can be identified by their email or id. + */ + delete: operations["remove_org_custodians"]; + }; + "/token": { + /** + * Login with Basic Authentication + * @description Use this endpoint for authentication if your organization allows basic authentication. A successful authentication attempt will result in an `access_token` that can be used to make authenticated requests to other API endpoints. The `access_token` automatically expires after a fixed duration, but you can also call the `/logout` endpoint to invalidate the access token at the end of your session. + * + * Nexla supports various methods of authentication, including Basic (email/password), Google SSO, and custom SAML- or OIDC-based SSO. One or more of these methods might be allowed in any organization, depending on the configuration chosen by the administrators. Instead of using this endpoint to start a session programmatically, we recommend performing authentication through the Nexla UI and using the Nexla Session Token (available in Tools >> Nexla Session Token) to connect to the API programmatically. + * + * > Note: A user might belong to multiple organizations. This method initiates an authenticated session in their default organization. + */ + post: operations["login_with_basic_auth"]; + }; + "/token/logout": { + /** + * Logout + * @description Ends the current session and invalidates the `NexlaSessionToken` for future requests. + */ + post: operations["logout"]; + }; + "/limits": { + /** + * Get current rate limit and usage + * @description Returns the API rate limiting categories and the user's current usage + */ + get: operations["limits"]; + }; + "/api_auth_configs": { + /** + * Get auth configs. + * @description Get the authentication configurations for the API. This will return auth configs owned by current user. + */ + get: operations["get_api_auth_configs"]; + /** + * Create auth config. + * @description Create a new authentication configuration for the API. + */ + post: operations["create_api_auth_config"]; + }; + "/api_auth_configs/all": { + /** + * Get all auth configs. + * @description Get the authentication configurations for the API. This will return all auth configs. Super-admin privilege is required. + */ + get: operations["get_all_api_auth_configs"]; + }; + "/api_auth_configs/{auth_config_id}": { + /** + * Get auth configs. + * @description Get the authentication configurations by it's ID. + */ + get: operations["get_api_auth_configs"]; + /** + * Update auth config. + * @description Update an authentication configuration for the API. + */ + put: operations["update_api_auth_config"]; + /** + * Delete auth config. + * @description Delete an authentication configuration for the API. + */ + delete: operations["delete_api_auth_config"]; + }; + "/signup": { + /** + * Sign Up + * @description This endpoint is used for users to register in the system. Once signup process is completed (email is verified, manual approval by admin may be required), + * user can set a password and login to the system. + * Email verification link is sent to the email provided by the user. + * + * Optionally, it allows for logged in user to be called. In this case, email verification will be skipped, and new org is created immediately. + */ + post: operations["self_sign_up"]; + }; + "/signup/verify_email": { + /** + * Verify Email + * @description This endpoint is used to verify the email address of the user. + * The user will be able to set a password and login to the system after the email is verified (unless manual admin approval is required). + */ + get: operations["verify_email"]; + }; + "/self_signup_requests": { + /** + * List Self Sign Up Requests + * @description Returns a list of self sign up requests for an admin. + */ + get: operations["get_self_signup_requests"]; + }; + "/self_signup_requests/{request_id}/approve": { + /** + * Approve Self Sign Up Request + * @description This endpoint is used to approve a self sign up request. System admin access required. + * The user will be able to set a password and login to the system after the request is approved. + */ + put: operations["approve_self_sign_up_request"]; + }; + "/self_signup_blocked_domains": { + /** + * List self-sign-up blocked domains for admins. + * @description Returns a list of domains that are blocked for self-sign-up. Requires admin access. + */ + get: operations["get_self_signup_blocked_domains"]; + /** + * Add self-sign-up blocked domain for admins. + * @description Adds a domain to the list of domains that are blocked for self-sign-up. Requires admin access. + */ + post: operations["add_self_signup_blocked_domain"]; + }; + "/self_signup_blocked_domains/{domain_id}": { + /** + * Update self-sign-up blocked domain for admins. + * @description Updates a domain in the list of domains that are blocked for self-sign-up. Requires admin access. + */ + put: operations["update_self_signup_blocked_domain"]; + /** + * Delete self-sign-up blocked domain for admins. + * @description Deletes a domain from the list of domains that are blocked for self-sign-up. Requires admin access. + */ + delete: operations["delete_self_signup_blocked_domain"]; + }; + "/orgs/{org_id}/auth_settings": { + /** + * Get auth settings for org. + * @description Get the authentication settings for the given org. This allows to enable or disable specific auth configs for the org. + */ + get: operations["get_api_auth_settings"]; + }; + "/orgs/{org_id}/auth_settings/{auth_setting_id}": { + /** + * Update auth config (enable/disable). + * @description Update an authentication configuration for the API. + */ + put: operations["update_api_auth_config"]; + }; + "/async_tasks": { + /** + * Get async operations list for current user. + * @description Get a list of async operations for current user. Returns type, arguments, status, results etc. + */ + get: operations["get_async_tasks"]; + /** + * Create an async operation. + * @description Create an async operation. Returns the task id and other related data. Checks if the user has permission to create the task with all entities, and other preconditions. + */ + post: operations["create_async_task"]; + }; + "/async_tasks/of_type/{task_type}": { + /** + * Get async operations list for current user of a specific type. + * @description Get a list of async operations for current user of a specific type. Returns type, arguments, status, results etc. + */ + get: operations["get_async_tasks_of_type"]; + }; + "/async_tasks/by_status/{status}": { + /** + * Get async operations list for current user by status + * @description Get a list of async operations for current user of a specific type. Returns type, arguments, status, results etc. + */ + get: operations["get_async_tasks_by_status"]; + }; + "/async_tasks/types": { + /** + * Get async operation types + * @description Get a list of async operation types. Returns type, arguments, status, results etc. + */ + get: operations["get_async_task_types"]; + }; + "/async_tasks/explain_arguments/{task_type}": { + /** + * Get async operation arguments for a specific type with descriptions + * @description Get a list of async operation arguments for a specific type with descriptions. + */ + get: operations["get_async_tasks_explain_arguments"]; + }; + "/async_tasks/{task_id}": { + /** + * Get async operation by ID + * @description Get an async operation by ID. Returns type, arguments, status, results and other fields. + */ + get: operations["get_async_task"]; + /** + * Delete async operation by ID + * @description Delete an async operation by ID. Returns the task id and other related data. + */ + delete: operations["delete_async_task"]; + }; + "/async_tasks/{task_id}/rerun": { + /** + * Rerun async operation + * @description Rerun an async operation. This is used to re-run an async operation. The task will be re-created and executed with the same arguments. + */ + post: operations["rerun_async_task"]; + }; + "/async_tasks/{task_id}/result": { + /** + * Get async operation result + * @description Get the result of an async operation. + */ + get: operations["get_async_task_result"]; + }; + "/async_tasks/{task_id}/download_link": { + /** + * Get download link for async operation result + * @description Get a download link for the result of an async operation. + */ + get: operations["get_async_task_download_link"]; + }; + "/async_tasks/{task_id}/acknowledge": { + /** + * Acknowledge async operation + * @description Acknowledge an async operation. This is used to confirm that the user has seen the results of the async operation. After that, if tasks has stored results, they will be deleted. + */ + post: operations["acknowledge_async_task"]; + }; + "/runtimes": { + /** + * Get all Custom Runtimes + * @description Retrieves a list of all custom runtimes defined for the organization. + */ + get: operations["get_runtimes"]; + /** + * Create a Custom Runtime + * @description Creates a custom runtime with the specified configuration. + */ + post: operations["create_runtime"]; + }; + "/runtimes/{runtime_id}": { + /** + * Get a custom runtime by ID + * @description Retrieves a custom runtime + */ + get: operations["get_runtime"]; + /** + * Update a Custom Runtime + * @description Updates a custom runtime. + */ + put: operations["update_runtime"]; + /** + * Delete a Custom Runtime + * @description Creates a custom runtime with the specified configuration. + */ + delete: operations["delete_runtime"]; + }; + "/runtimes/{runtime_id}/activate": { + /** + * Activate a Custom Runtime + * @description Activates a custom runtime with the specified ID. + */ + put: operations["activate_runtime"]; + }; + "/runtimes/{runtime_id}/pause": { + /** + * Pause a Custom Runtime + * @description Pause a custom runtime with the specified ID. + */ + put: operations["pause_runtime"]; + }; + "/gen_ai_integration_configs": { + /** + * Get all GenAI configs in org + * @description Retrieves all GenAI configurations accessible to the authenticated user. + */ + get: operations["get_gen_ai_configs"]; + /** + * Create a GenAI config + * @description Creates a GenAI configuration. + */ + post: operations["create_gen_ai_config"]; + }; + "/gen_ai_integration_configs/{gen_ai_config_id}": { + /** + * Get GenAI Integration Config + * @description Retrieves a GenAI integration configuration by ID. + */ + get: operations["get_gen_ai_integration_config"]; + /** + * Update GenAI Integration Config + * @description Updates a GenAI integration configuration by ID. + */ + put: operations["update_gen_ai_integration_config"]; + /** + * Delete GenAI Integration Config + * @description Deletes a GenAI integration configuration by ID. + */ + delete: operations["delete_gen_ai_integration_config"]; + }; + "/gen_ai_org_settings": { + /** + * Get all bindings of GenAI configs of the org for specified usages. + * @description Retrieves all activated GenAI configurations for the org. + */ + get: operations["get_gen_ai_org_settings"]; + /** + * Create a binding of GenAI config for the org for specific usage. + * @description Activates a GenAI configuration for specific usage. All other bindings for the same usage will be deactivated. + */ + post: operations["create_gen_ai_org_setting"]; + }; + "/gen_ai_org_settings/{gen_ai_org_setting_id}": { + /** + * Get Org GenAI binding + * @description Retrieves a GenAI configuration binding by ID. + */ + get: operations["get_gen_ai_org_setting"]; + /** + * Delete GenAI Config binding for org. + * @description Delete GenAI config binding for an org (disables a GenAI configuration usage). + */ + delete: operations["delete_gen_ai_org_setting"]; + }; + "/gen_ai_org_settings/active_config": { + /** + * Shows active GenAI Configuration for specific usage + * @description Shows active GenAI Configuration for specific usage + */ + get: operations["gen_ai_org_settings_show_active"]; + }; +} + +export interface webhooks { + "send_one_record": { + /** Send one record to Webhook */ + post: operations["send_one_record"]; + }; + "send_many_records": { + /** + * Send many records to Webhook + * @description Send an array of JSON objects. Nexla will treat each object as a unique record for the webhook. + */ + post: operations["send_many_records"]; + }; +} + +export interface components { + schemas: { + owner: { + id?: number; + full_name?: string; + /** Format: email */ + email?: string; + }; + org: { + id?: number; + name?: string; + email_domain?: string; + email?: null; + client_identifier?: null; + }; + /** + * @description This property reflects all the permissions the user/team/organization has to this resource. + * + * 1. `collaborator`: The user/team/organization can view the resource but not make any modifications to it. + * 2. `operator`: The user/team/organization can view the resource and can activate/pause it, but not make any other modifications to it. + * 3. `administrator`: The user/team/organization has complete administrative rights to this resource. + * 4. `owner`: This user created the resource and so has complete administrative rights to it. + */ + AccessRoles: ("owner" | "collaborator" | "operator" | "admin")[]; + data_credential: { + name?: string; + description?: string; + credentials_type?: string; + }; + database_data_credential: { + credentials?: { + /** + * @description __Requires SSH Tunnel for access?__: If your database is not publicly accessible, Nexla enables connecting via an SSH tunnel. When using this method, Nexla will connect to a bastion host via SSH, and the database connection will occur from the SSH host. + * + * Default value: `false` + */ + has_ssh_tunnel?: boolean; + /** + * @description __SSH Tunnel Host__: To connect via an SSH tunnel, you need to use a bastion host running an SSH tunnel server with access to your database. Enter the SSH tunnel hostname or IP address of the bastion host. + * + * __Required__ if `has_ssh_tunnel` is `true` + */ + "tunnel.bastion.host"?: string; + /** + * @description __SSH Tunnel Port__: Enter the port of the tunnel bastion host to which Nexla can connect. + * + * __Required__ if `has_ssh_tunnel` is `true` + * + * Default Value: `22` + */ + "tunnel.bastion.port"?: number; + /** + * @description __Username for tunnel__: As part of setting up the bastion host, you also need to create an SSH user for Nexla. Enter that username here. Usually, this is set to `nexla`. + * + * __Required__ if `has_ssh_tunnel` is `true` + */ + "tunnel.bastion.user"?: string; + }; + }; + as400_data_credential: { + credentials_type: "as400"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is usually an IP address or text in the format __company.domain.com__. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name?: string; + }; + } & components["schemas"]["database_data_credential"]; + aws_athena_data_credential: { + credentials_type: "aws_athena"; + } & Omit & { + credentials?: { + /** + * @description __Authentication Method__: Credentials can be configured to grant access via various AWS permission mechanisms. Please select the authentication method you wish to use. + * + * @default Access Key + * @enum {string} + */ + "aws.auth.type": "Access Key"; + /** @description __AWS Access Key__: AWS Access Key ID for Athena database access. */ + "access.key.id": string; + /** @description __AWS Secret Key__: AWS Secret Key ID for Athena database access. */ + "secret.key": string; + /** + * @description __AWS Region__: AWS region of the Athena database. + * + * @default us-east-1 + */ + "aws.region": string; + /** + * @description __Schema Name__: Specify the schema or database containing the tables you wish to query. + * + * @default default + */ + schema_name: string; + /** @description __S3 Path For Query Results__: Specify an S3 path in the current region to save query results. Use the format `s3://bucket-name/path/to/object/`. */ + "s3.output.location": string; + }; + } & components["schemas"]["database_data_credential"]; + file_data_credential: { + credentials?: { + /** + * @description __Handle File Encryption/Decryption?__: The platform can be configured to process encrypted files such that a Source will decrypt files before scanning, and a Destination will encrypt the generated files before uploading them to your storage. + * + * Default value: `false` + */ + file_encryption_enabled?: boolean; + /** + * @description __File Encryption Protocol__: The type of file encryption protocol that should be used be used for encrypting/decrypting files. + * + * __Required__ if `file_encryption_enabled` is `true` + * + * Default value: `pgp` + * + * @enum {string} + */ + "encrypt.standard"?: "pgp"; + /** + * @description __External User ID__: The ID of the user whose public key is to be used for encryption/decryption. + * + * __Required__ if `file_encryption_enabled` is `true` and `encrypt.standard` is `pgp` + */ + "external.user.id"?: string; + /** + * @description __External User's Public Key__: The external user's public key that will be used for the encryption/decryption of files. + * + * __Required__ if `file_encryption_enabled` is `true` and `encrypt.standard` is `pgp` + */ + "external.public.key"?: string; + /** + * @description __Your User ID for Private Key__: Set this to the user ID used to generate the PGP private key. + * + * __Required__ if `file_encryption_enabled` is `true` and `encrypt.standard` is `pgp` + */ + "encrypt.user.id"?: string; + /** + * @description __Your Password for Private Key__: Set this to the password of the user ID used to generate the PGP private key. + * + * __Required__ if `file_encryption_enabled` is `true` and `encrypt.standard` is `pgp` + */ + "encrypt.private.password"?: string; + /** + * @description __Your Private Key__: Enter your PGP private key. This private key (and the associated user id and password) will be used for encrypting/decrypting files. + * + * __Required__ if `file_encryption_enabled` is `true` and `encrypt.standard` is `pgp` + */ + "encrypt.private.key"?: string; + }; + }; + azure_blb_data_credential: { + credentials_type: "azure_blb"; + } & Omit & ({ + credentials?: { + /** + * @description __Azure Storage Account Name__: The name of the storage account that you wish to access. + * + * __Applicable and required__ if `azure_blb.auth.type` is `SAS Token` or `Storage Account Key` + */ + "storage.account.name"?: string; + /** + * @description __Authentication Mechanism__: Credentials can be configured to allow Azure access through different authentication mechanisms. Select the type of Authentication method you wish to use. + * + * * `SAS Token` for Shared Access Signature Token authentication. + * + * * `Connection String` for Key Connection String authentication. + * + * * `Storage Account Key` for Storage Account Key authentication. + * + * @enum {string} + */ + "azure_blb.auth.type": "SAS Token" | "Connection String" | "Storage Account Key"; + /** + * @description __Shared Access Signature Token__: Enter the shared access signature token for the Azure storage that you wish to access. + * + * __Required__ if `azure_blb.auth.type` is `SAS Token` + */ + "sas.token"?: string; + /** + * @description __Key Connection String__: Enter the Azure connection string for the Azure storage that you wish to access. + * + * __Required__ if `azure_blb.auth.type` is `Connection String` + */ + "key.connection.string"?: string; + /** + * @description __Storage Account Key__: Enter the Azure storage account key for the Azure storage that you wish to access. + * + * __Required__ if `azure_blb.auth.type` is `Storage Account Key` + */ + "storage.account.key"?: string; + }; + }) & components["schemas"]["file_data_credential"]; + azure_data_lake_data_credential: { + credentials_type: "azure_data_lake"; + } & Omit & ({ + credentials?: { + /** @description __Azure Storage Account Name__: The name of the storage account that you wish to access. */ + "storage.account.name": string; + /** + * @description __Authentication Mechanism__: Credentials can be configured to allow Azure access through different authentication mechanisms. Select the type of Authentication method you wish to use. + * + * * `SAS Token` for Shared Access Signature Token authentication. + * + * * `Connection String` for Key Connection String authentication. + * + * * `Storage Account Key` for Storage Account Key authentication. + * + * @enum {string} + */ + "azure_dl.auth.type": "SAS Token" | "Connection String" | "Storage Account Key"; + /** + * @description __Shared Access Signature Token__: Enter the shared access signature token for the Azure storage that you wish to access. + * + * __Required__ if `azure_dl.auth.type` is `SAS Token` + */ + "sas.token"?: string; + /** + * @description __Key Connection String__: Enter the Azure connection string for the Azure storage that you wish to access. + * + * __Required__ if `azure_dl.auth.type` is `Connection String` + */ + "key.connection.string"?: string; + /** + * @description __Storage Account Key__: Enter the Azure storage account key for the Azure storage that you wish to access. + * + * __Required__ if `azure_dl.auth.type` is `Storage Account Key` + */ + "storage.account.key"?: string; + }; + }) & components["schemas"]["file_data_credential"]; + azure_synapse_data_credential: { + credentials_type: "azure_synapse"; + } & Omit & ({ + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is usually an IP address or text in the format __company.domain.com__. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name?: string; + /** @description __Schema Name__: Enter the schema name for the database to which you wish to connect. */ + schema_name?: string; + /** + * @description __Connection Mode / Application Intent__: You can restrict database connectivity capabilities by specifying the connection mode. Select __Read Only__ to connect to the database in __readonly__ mode. + * + * * `readonly`: Connect in read-only mode. + * + * * `readwrite`: Connect in read-write mode. + * + * Default value: `readwrite` + * + * @enum {string} + */ + "database.field.applicationIntent"?: "readonly" | "readwrite"; + }; + }) & components["schemas"]["database_data_credential"]; + bigquery_data_credential: { + credentials_type: "bigquery"; + } & Omit & { + credentials?: { + /** + * @description __Is Service Account Authentication?__ We support multiple ways of authenticating your BigQuery account. We recommend using the Google Service Account (System User Authentication) method, as it is best suited for accessing your data and is tied to the service account instead of the individual user account. + * * Set to `true` if you wish to choose Google Service Account (System User Authentication). + * * Set this to `false` if you wish to authenticate using 3-legged OAuth. + * Default Value: `false` + */ + is_service_account: boolean; + /** @description __Project ID__: The Project ID to which the database you wish to access belongs. */ + project_id: string; + /** + * @description __Service Account JSON credentials content__: This is the content of the service account credentials JSON file generated by Google Cloud IAM, which is added to the payload as a JSON object. + * + * __Required__ if `is_service_account` is `true` + */ + json_creds?: Record; + /** + * @description __End User Authentication Type__: This must be set to `OAUTH2` if you are choosing the 3-legged OAuth authentication mechanism. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "auth.type"?: "OAUTH2"; + /** + * @description __Client ID__: The Client ID of your OAuth 2.0 client. + * + * __Required__ if `is_service_account` is `false` + */ + "oauth2.client.id"?: string; + /** + * @description __Client Secret__: The Client Secret of your OAuth 2.0 client. + * + * __Required__ if `is_service_account` is `false` + */ + "oauth2.client.secret"?: string; + /** + * @description __Access Token URL__: The OAuth 2.0 Token URL used for fetching the token from the API token server. This must be `https://www.googleapis.com/oauth2/v4/token`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "oauth2.access.token.url"?: "https://www.googleapis.com/oauth2/v4/token"; + /** + * @description __Token Type__: The token type that should be used when attaching the OAuth2.0 token to the request. This must be `Bearer`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "oauth2.token.type.override"?: "Bearer"; + /** + * @description __Access Token URL method__: The request method used for the OAuth 2.0 Token URL. This must be `POST`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "oauth2.access.token.method"?: "POST"; + /** + * @description __Nexla OAuth 2.0 connector codename__: Unique codename for the Nexla token refresh mechanism, which is used to identify this connector. This must be `bigquery`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.name"?: "bigquery"; + /** + * @description __Token Type__: The token type that should be used when attaching the OAuth2.0 token to the request. This must be `Bearer`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.token_type"?: "Bearer"; + /** + * @description __Authentication Payload Mode__: Set how the OAuth 2.0 Client ID and Client Secret should be sent with the Token URL. This must be set to `header`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.auth_scheme"?: "header"; + /** + * @description __API Scopes__: Scopes that should be added to the OAuth token calls. This should be `https://www.googleapis.com/auth/bigquery https://www.googleapis.com/auth/devstorage.read_write`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.scopes"?: "https://www.googleapis.com/auth/bigquery https://www.googleapis.com/auth/devstorage.read_write"; + /** + * @description __Refresh Token URL__: The OAuth 2.0 Token Refresh URL that is used for fetching a new access token using the current access token and refresh token. This must be `https://www.googleapis.com/oauth2/v4/token`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.refresh_url"?: "https://www.googleapis.com/oauth2/v4/token"; + /** + * @description __Requires Auto-Refresh of Token__: Access tokens are short-lived, and the platform should automatically continuously refresh the token to retain a valid access token. This must be `true`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {boolean} + */ + "vendor.has_token_expiration_ts"?: true; + /** + * @description __Access token expiration time__: Set this to the expiration time (in seconds) of the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. This is usually `3599` for a 3-legged OAuth workflow. + * + * __Required__ if `is_service_account` is `false` + */ + "vendor.token_expires_in"?: number; + /** + * @description __Access Token__: Set this to the access token received when you made the first token request as part of the the 3-legged OAuth 2.0 token fetching sequence. + * + * __Required__ if `is_service_account` is `false` + */ + access_token?: string; + /** + * @description __Access Token__: Set this to the access token received when you made the first token request as part of the the 3-legged OAuth 2.0 token fetching sequence. Note that this is the same as `access_token` above but is required in the payload to allow Nexla to automatically continuously refresh tokens. + * + * __Required__ if `is_service_account` is `false` + */ + "vendor.access_token"?: string; + /** + * @description __Refresh Token__: Set this to the refresh token received when you made the first token request as part of the the 3-legged OAuth 2.0 token fetching sequence. + * + * __Required__ if `is_service_account` is `false` + */ + "vendor.refresh_token"?: string; + /** + * @description __GCS Location for staging data__: Before loading data from/to Bigquery, we might need to stage the data in a temporary GCS location. This temporary location is created by the platform automatically if the user has permission to create a bucket/path. + * + * But you can override bucket creation and instead name a specific GCS location where temporary staging files will be created. + */ + "gcs.location"?: string; + /** + * @description __Temporary Dataset__: Before loading data from/to BigQuery we might need to stage the data in a temporary dataset. This temporary dataset is created by the platform automatically if the user has permission to create a dataset. + * + * However, you can override dataset creation and instead name a specific existing dataset as the temporary dataset to be used. + */ + "temp.table.dataset"?: string; + }; + } & components["schemas"]["file_data_credential"]; + box_data_credential: { + credentials_type: "box"; + } & Omit & { + credentials?: { + /** + * @description __End User Authentication Type__: This must be set to `OAUTH2` if you are choosing the 3-legged OAuth authentication mechanism. + * + * @enum {string} + */ + "auth.type": "OAUTH2"; + /** @description __Client ID__: The Client ID of your OAuth 2.0 client. */ + "oauth2.client.id": string; + /** @description __Client Secret__: The Client Secret of your OAuth 2.0 client. */ + "oauth2.client.secret": string; + /** + * @description __Access Token URL__: The OAuth 2.0 Token URL used for fetching the token from the API token server. This must be `https://api.box.com/oauth2/token`. + * + * @enum {string} + */ + "oauth2.access.token.url": "https://api.box.com/oauth2/token"; + /** + * @description __Token Type__: The token type that should be used when attaching the OAuth2.0 token to the request. This must be `Bearer`. + * + * @enum {string} + */ + "oauth2.token.type.override": "Bearer"; + /** + * @description __Access Token URL method__: The request method used for the OAuth 2.0 Token URL. This must be `POST`. + * + * @enum {string} + */ + "oauth2.access.token.method": "POST"; + /** + * @description __Nexla OAuth 2.0 connector codename__: Unique codename for the Nexla token refresh mechanism, which is used to identify this connector. This must be `box`. + * + * @enum {string} + */ + "vendor.name": "box"; + /** + * @description __Token Type__: The token type that should be used when attaching the OAuth2.0 token to the request. This must be `Bearer`. + * + * @enum {string} + */ + "vendor.token_type": "Bearer"; + /** + * @description __Authentication Payload Mode__: Set how the OAuth 2.0 Client ID and Client Secret should be sent with the Token URL. This must be set to `query`. + * + * @enum {string} + */ + "vendor.auth_scheme": "query"; + /** + * @description __Refresh Token URL__: The OAuth 2.0 Token Refresh URL that is used for fetching a new access token using the current access token and refresh token. This must be `https://api.box.com/oauth2/token`. + * + * @enum {string} + */ + "vendor.refresh_url": "https://www.googleapis.com/oauth2/v4/token"; + /** + * @description __Requires Auto-Refresh of Token__: Access tokens are short-lived, and the platform should automatically continuously refresh the token to retain a valid access token. This must be `true`. + * + * @enum {boolean} + */ + "vendor.has_token_expiration_ts": true; + /** @description __Access token expiration time__: Set this to the expiration time (in seconds) of the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. This is usually `3599` for a 3-legged OAuth workflow. */ + "vendor.token_expires_in": number; + /** @description __Access Token__: Set this to the access token received when you made the first token request as part of the the 3-legged OAuth 2.0 token fetching sequence. */ + access_token: string; + /** @description __Access Token__: Set this to the access token received when you made the first token request as part of the the 3-legged OAuth 2.0 token fetching sequence. Note that this is the same as `access_token` above but is required in the payload to allow Nexla to automatically continuously refresh tokens. */ + "vendor.access_token": string; + /** @description __Refresh Token__: Set this to the refresh token received when you made the first token request as part of the the 3-legged OAuth 2.0 token fetching sequence. */ + "vendor.refresh_token": string; + }; + } & components["schemas"]["file_data_credential"]; + cloudsql_mysql_data_credential: { + credentials_type: "cloudsql_mysql"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is usually an IP address or text in the format `company.domain.com`. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name?: string; + }; + } & components["schemas"]["database_data_credential"]; + cloudsql_postgres_data_credential: { + credentials_type: "cloudsql_postgres"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is in the format `company.domain.com`. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name: string; + /** @description __Schema Name__: Enter the schema name for the tables in which you are interested. */ + schema_name?: string; + }; + } & components["schemas"]["database_data_credential"]; + cloudsql_sqlserver_data_credential: { + credentials_type: "cloudsql_sqlserver"; + } & Omit & ({ + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is usually an IP address or text in the format __company.domain.com__. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name?: string; + /** @description __Schema Name__: Enter the schema name for the database to which you wish to connect. */ + schema_name?: string; + /** + * @description __Connection Mode / Application Intent__: You can restrict database connectivity capabilities by specifying the connection mode. Select __Read Only__ to connect to the database in __readonly__ mode. + * + * * `readonly`: Connect in read-only mode. + * + * * `readwrite`: Connect in read-write mode. + * + * Default value: `readwrite` + * + * @enum {string} + */ + "database.field.applicationIntent"?: "readonly" | "readwrite"; + }; + }) & components["schemas"]["database_data_credential"]; + stream_data_credential: { + credentials?: { + /** + * @description __Requires SSH Tunnel for access?__: If your server is not publicly accessible, Nexla enables connecting via an SSH tunnel. When using this method, Nexla will connect to a bastion host via SSH, and the server connection will occur from the SSH host. + * + * Default value: `false` + */ + has_ssh_tunnel?: boolean; + /** + * @description __SSH Tunnel Host__: To connect via an SSH tunnel, you need to use a bastion host running an SSH tunnel server with access to your database. Enter the SSH tunnel hostname or IP address of the bastion host. + * + * __Required__ if `has_ssh_tunnel` is `true` + */ + "tunnel.bastion.host"?: string; + /** + * @description __SSH Tunnel Port__: Enter the port of the tunnel bastion host to which Nexla can connect. + * + * __Required__ if `has_ssh_tunnel` is `true` + * + * Default Value: `22` + */ + "tunnel.bastion.port"?: number; + /** + * @description __Username for tunnel__: As part of setting up the bastion host, you also need to create an SSH user for Nexla. Enter that username here. Usually, this is set to `nexla`. + * + * __Required__ if `has_ssh_tunnel` is `true` + */ + "tunnel.bastion.user"?: string; + }; + }; + confluent_kafka_data_credential: { + credentials_type: "confluent_kafka"; + } & Omit & { + credentials?: { + /** @description __Kafka Bootstrap Servers__: Enter the addresses of the Kafka brokers. These are usually in the form of Virtual IP addresses. */ + "target.bootstrap.servers": string; + /** + * @description __Security Protocol__: Choose the appropriate security protocol for your Kafka channels. + * + * * `SASL_SSL`: SASL-authenticated SSL channel + * + * @enum {string} + */ + "security.protocol": "SASL_SSL"; + /** @description __Confluent Cloud API Key__: Enter the API Key for the Confluent Cloud cluster. */ + "sasl.jaas.username": string; + /** @description __Confluent Cloud API Secret__: Enter the API Secret required to connect to the Confluent Cloud cluster. */ + "sasl.jaas.password": string; + /** + * @description __Consumer Group ID__: Enter the Group ID of the group to which Nexla consumers will belong. + * + * This is usually set to `nexla-consumer`. + */ + "group.id.prefix"?: string; + }; + } & components["schemas"]["stream_data_credential"]; + databricks_data_credential: { + credentials_type: "databricks"; + } & Omit & ({ + credentials?: { + /** + * @description __Authentication Type__: Select the authentication type for your databricks instance. + * + * * `token` - Token Based Authentication: Use this authentication method if you are using a personal access token to authenticate with Databricks. See https://docs.databricks.com/aws/en/dev-tools/auth/pat#pat-user for more information. + * + * * `oidc` - OAuth Authentication: Use this authentication method if you are using OAuth2.0 to authenticate with Databricks. This method is recommended for production environments. + * + * @enum {string} + */ + databricks_auth_type: "token" | "oidc"; + /** + * @description __URL Format__: You can set the Databricks authentication information as a JDBC URL or as parts that will be combined by Nexla to create the connection string. + * + * * `jdbc_url` - JDBC URL: Set the JDBC URL to your databricks location. This should be in the form `jdbc:spark:/...` or `jdbc:databricks:/...`. + * + * * `http_path_parts` - HTTP Path Parts: Set the HTTP Path for the SQL Endpoint. This is usually in the form `sql/protocolv1/o//0916-102516-naves603` and available in __JDBC settings__ in your databricks console. + * + * @enum {string} + */ + "ui.url_format": "jdbc_url" | "http_path_parts"; + /** + * @description __JDBC URL__: Enter the JDBC URL to your databricks location. This should be in the form `jdbc:databricks:/...` or `jdbc:spark:/...`. + * + * **Note**: For **OAuth Authentication** please ensure that the URL does not include the `AuthMech` and `Auth_Flow` parameters. The Databricks Console for copying connection strings often includes these parameters, so you may need to remove them before using the connection string in Nexla. + * + * __Required__ if `ui.url_format` is `jdbc_url`. + */ + url?: string; + /** + * @description __Host__: Enter the hostname for your database. This is in the format `company.domain.com`. Do not include the connection protocol. + * __Required__ if `ui.url_format` is `http_path_parts`. + */ + host?: string; + /** + * @description __Port__: Enter the port used to access your database. + * __Required__ if `ui.url_format` is `http_path_parts`. + */ + port?: number; + /** + * @description __HTTP Path__: Enter the HTTP Path of the SQL Endpoint. + * This is usually in the form `sql/protocolv1/o//0916-102516-naves603` and is available in __JDBC settings__ in your Databricks console. + * __Required__ if `ui.url_format` is `http_path_parts`. + */ + http_path?: string; + /** + * @description __Username__: Enter the username used to access your database. + * __Required__ if `ui.url_format` is `http_path_parts`. + */ + username?: string; + /** + * @description __Password__: Enter the password used to access your database. + * __Required__ if `databricks_auth_type` is `token` and `ui.url_format` is `http_path_parts`. + */ + password?: string; + /** + * @description __OAuth2 Authentication Mechanism__: Set the authentication mechanism for your databricks instance. + * __Required__ if `databricks_auth_type` is `oidc`. + * __Applicable__ only if `databricks_auth_type` is `oidc`. + * + * @enum {string} + */ + "databricks.field.AuthMech"?: 11; + /** + * @description __OAuth2 Authentication Flow__: Set the authentication flow for your databricks instance. + * __Required__ if `databricks_auth_type` is `oidc`. + * __Applicable__ only if `databricks_auth_type` is `oidc`. + * + * @enum {string} + */ + "databricks.field.Auth_Flow"?: 1; + /** + * @description __Service Principal UUID/Application ID__: Enter the Service Principal UUID/Application ID for the Databricks warehouse you wish to connect to. + * __Required__ if `databricks_auth_type` is `oidc`. + * __Applicable__ only if `databricks_auth_type` is `oidc`. + */ + "databricks.field.OAuth2ClientId"?: string; + /** + * @description __Service Principal Secret__: Enter the Service Principal Secret for the Databricks you wish to connect to. + * __Required__ if `databricks_auth_type` is `oidc`. + * __Applicable__ only if `databricks_auth_type` is `oidc`. + */ + "databricks.field.OAuth2Secret"?: string; + /** + * @description __Database Name__: Enter the database name if you want to connect to a specific database. + * Usually, this is set to: `spark`. + */ + database_name?: string; + /** + * @description __Schema Name__: Enter the schema name for the database to which you wish to connect. + * Usually, this is set to `default`. + */ + schema_name?: string; + /** + * @description __Databricks Cloud Type__: Select the cloud environment type of your Databricks instance. + * Usually, this is the Databricks cloud environment, but we also support connecting to Databricks instances running in other cloud environments. + * + * @enum {string} + */ + "databricks.cloud.type": "AWS" | "Azure" | "databricks" | "GCP"; + /** + * @description __Nexla Credential ID for temporary data staging__: Before loading data to Databricks we need to stage the data temporarily. In the default configuration, the staging data is stored in a Nexla private location, but you can configure the platform to stage data in your custom Cloud storage location. + * Enter the __Nexla Credential ID__ of your S3 or Azure Blob storage account where you want Nexla to write temporary and final data. Leave this blank if you want Nexla to handle temporary data storage. + * __Applicable__ if `databricks.cloud.type` is `AWS` or `Azure`. + * Ignore this property if you do not want to use your custom data-staging area. + */ + "databricks.cloud.credentials.id"?: number; + /** + * @description __S3 Path for Temporary Data Staging__: If you are choosing to store temporary data in your own AWS S3 location, please configure the S3 path where the temporary data should be staged. Note that this path must be accessible to the Nexla Credential ID entered in the previous field. + * __Applicable and required__ only if `databricks.cloud.type` is `AWS` and you are setting a valid credential ID for `databricks.cloud.credentials.id`. + */ + "databricks.temp.s3.bucket"?: string; + /** + * @description __S3 Path for Delta Tables__: If you are choosing to store temporary data in your own AWS S3 location, please configure the S3 path where the delta tables should be staged. Note that this path must be to Nexla Credential ID provided earlier. + * Leave this blank for creating Delta tables in DBFS. + * __Applicable and required__ only if `databricks.cloud.type` is `AWS` and you are setting a valid credential ID for `databricks.cloud.credentials.id`. + */ + "databricks.destination.s3.bucket"?: string; + /** + * @description __Azure Path for Temporary Data Staging__: If you are choosing to store temporary data in your own Azure Blob location, please configure the path where the temporary data should be created. Note that this path must be accessible to the Nexla Credential ID entered in the previous field. + * __Applicable and required__ only if `databricks.cloud.type` is `Azure` and you are setting a valid credential ID for `databricks.cloud.credentials.id`. + */ + "databricks.temp.azure.bucket"?: string; + /** + * @description __Azure Path for Delta Tables__: If you are choosing to store temporary data in your own Azure Blob location, please configure the path where the delta tables should be created. Note that this path must be accessible to the Nexla Credential ID provided earlier. + * Leave this blank for creating Delta tables in DBFS. + * __Applicable and required__ only if `databricks.cloud.type` is `Azure` and you are setting a valid credential ID for `databricks.cloud.credentials.id`. + */ + "databricks.destination.azure.bucket"?: string; + }; + }) & components["schemas"]["database_data_credential"]; + db2_data_credential: { + credentials_type: "db2"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is in the format `company.domain.com`. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name?: string; + }; + } & components["schemas"]["database_data_credential"]; + delta_lake_azure_blb_data_credential: { + credentials_type: "delta_lake_azure_blb"; + } & Omit & ({ + credentials?: { + /** + * @description __Azure Storage Account Name__: The name of the storage account that you wish to access. + * + * __Applicable and required__ if `azure_blb.auth.type` is `SAS Token` or `Storage Account Key` + */ + "storage.account.name"?: string; + /** + * @description __Authentication Mechanism__: Credentials can be configured to allow Azure access through different authentication mechanisms. Select the type of Authentication method you wish to use. + * + * * `SAS Token` for Shared Access Signature Token authentication. + * + * * `Connection String` for Key Connection String authentication. + * + * * `Storage Account Key` for Storage Account Key authentication. + * + * @enum {string} + */ + "azure_blb.auth.type": "SAS Token" | "Connection String" | "Storage Account Key"; + /** + * @description __Shared Access Signature Token__: Enter the shared access signature token for the Azure storage that you wish to access. + * + * __Required__ if `azure_blb.auth.type` is `SAS Token` + */ + "sas.token"?: string; + /** + * @description __Key Connection String__: Enter the Azure connection string for the Azure storage that you wish to access. + * + * __Required__ if `azure_blb.auth.type` is `Connection String` + */ + "key.connection.string"?: string; + /** + * @description __Storage Account Key__: Enter the Azure storage account key for the Azure storage that you wish to access. + * + * __Required__ if `azure_blb.auth.type` is `Storage Account Key` + */ + "storage.account.key"?: string; + }; + }) & components["schemas"]["file_data_credential"]; + delta_lake_azure_data_lake_data_credential: { + credentials_type: "delta_lake_azure_data_lake"; + } & Omit & ({ + credentials?: { + /** @description __Azure Storage Account Name__: The name of the storage account that you wish to access. */ + "storage.account.name": string; + /** + * @description __Authentication Mechanism__: Credentials can be configured to allow Azure access through different authentication mechanisms. Select the type of Authentication method you wish to use. + * + * * `SAS Token` for Shared Access Signature Token authentication. + * + * * `Connection String` for Key Connection String authentication. + * + * * `Storage Account Key` for Storage Account Key authentication. + * + * @enum {string} + */ + "azure_dl.auth.type": "SAS Token" | "Connection String" | "Storage Account Key"; + /** + * @description __Shared Access Signature Token__: Enter the shared access signature token for the Azure storage that you wish to access. + * + * __Required__ if `azure_dl.auth.type` is `SAS Token` + */ + "sas.token"?: string; + /** + * @description __Key Connection String__: Enter the Azure connection string for the Azure storage that you wish to access. + * + * __Required__ if `azure_dl.auth.type` is `Connection String` + */ + "key.connection.string"?: string; + /** + * @description __Storage Account Key__: Enter the Azure storage account key for the Azure storage that you wish to access. + * + * __Required__ if `azure_dl.auth.type` is `Storage Account Key` + */ + "storage.account.key"?: string; + }; + }) & components["schemas"]["file_data_credential"]; + delta_lake_s3_data_credential: { + credentials_type: "delta_lake_s3"; + } & Omit & ({ + credentials?: { + /** + * @description __Authentication Mechanism__ Credentials can be configured to allow S3 access through different AWS permissions mechanisms. Select the type of Authentication method you wish to use. + * * `Access Key` - Select this option if you wish to use AWS Access and Secret keys for your S3 access. + * * `ARN` - Select this option if you wish to use IAM ARN for authentication. + * * `Instance Role` - Select this option if you wish to access S3 using an IAM Instance Role. + * @enum {string} + */ + s3_auth_type: "Access Key" | "ARN" | "Instance Role"; + /** + * @description __AWS Access Key__ + * + * __Required__ if `s3_auth_type` is `Access Key` + */ + access_key?: string; + /** + * @description __AWS Secret Key__ + * + * __Required__ if `s3_auth_type` is `Access Key` + */ + secret_key?: string; + /** + * @description __External Id__: The external ID if S3 has been configured for federated access. + * + * __Required__ if `s3_auth_type` is `ARN` + */ + external_id?: string; + /** + * @description Region for AWS S3 Account. + * + * Default value: `us-east-1` + */ + region?: string; + /** @description __IAM ARN__: The IAM Amazon Resource Name (ARN) for which these permissions are applicable. This should be entered in the format `arn:partition:service:region:account:resource`. */ + arn?: string; + /** @description __S3 Path list access is limited to__: Set this property to `` or `/` if your AWS admin has restricted access to only a specific bucket or a path inside a bucket. */ + "test.path"?: string; + /** + * @description __Enable Client Side Encryption?__: The platform can be configured to encrypt/decrypt S3 objects that require client-side encryption using the AWS Key Management System (KMS). Set this option if KMS encryption is applicable. + * + * Default value: `false` + */ + has_client_encryption?: boolean; + /** + * @description __Client Side Encryption Mode__ : Select the type of KMS encryption mode that is applicable for this credential. + * + * __Required__ if `has_client_encryption` is `true` + * + * @enum {string} + */ + encryption_mode?: "EncryptionOnly" | "AuthenticatedEncryption" | "StrictAuthenticatedEncryption"; + /** + * @description __Amazon KMS Key for Encryption__: The KMS Key used for encrypting/decrypting objects. Please ensure that this user has appropriate KMS permissions. + * + * __Required__ if `has_client_encryption` is `true` + */ + kms_key?: string; + /** + * @description __Enable Server Side Encryption?__: The platform can be configured to encrypt/decrypt S3 objects that require server-side encryption. + * + * This can be achieved using either __Amazon S3-managed encryption keys (SSE-S3)__ or the __AWS Key Management Service (SSE-KMS)__. Set this option if server-side encryption is applicable. + * + * Default value: `false` + */ + "sse.enabled"?: boolean; + /** + * @description __Key ARN for SSE with KMS__: The Key ARN if you want server-side encryption to be performed via the AWS Key Management System. You can leave this blank if you want to use Amazon S3-managed encryption keys. + * + * __Required__ if `sse.enabled` is `true` + */ + "sse.kms_key.arn"?: string; + }; + }) & components["schemas"]["file_data_credential"]; + dropbox_data_credential: { + credentials_type: "dropbox"; + } & Omit & { + credentials?: { + /** @description __Access Token__: Access token used to connect to your Dropbox account. See the Dropbox developer documentation for instructions on how to generate an access token. */ + access_token: string; + }; + } & components["schemas"]["file_data_credential"]; + nosql_data_credential: { + credentials?: Record; + }; + dynamodb_data_credential: { + credentials_type: "dynamodb"; + } & Omit & { + credentials?: { + /** + * @description __Authentication Mechanism__: Credentials can be configured to allow access through different AWS permissions mechanisms. Select the type of Authentication method you wish to use. + * + * - __Access Key__: Use your AWS Access Key and Secret Key to authenticate. + * + * @enum {string} + */ + auth_type: "Access Key"; + /** + * @description __DynamoDB Endpoint__: Endpoint for accessing your DynamoDB instance. Usually of the form `dynamodb.us-east-2.amazonaws.com`. + * + * __Applicable and required__ if `auth_type` is `Access Key`. + */ + "dynamodb.endpoint"?: string; + /** + * @description __AWS Access Key__: Your AWS Access Key. + * + * __Applicable and required__ if `auth_type` is `Access Key`. + */ + "access.key"?: string; + /** + * @description __AWS Secret Key__: Your AWS Secret Key. + * + * __Applicable and required__ if `auth_type` is `Access Key`. + */ + "secret.key"?: string; + /** + * @description __Region__: AWS Region for DynamoDB. Defaults to `us-east-1`. + * + * __Applicable__ if `auth_type` is `Access Key`. + */ + "dynamodb.region"?: string; + }; + } & components["schemas"]["nosql_data_credential"]; + firebase_data_credential: { + credentials_type: "firebase"; + } & Omit & ({ + credentials?: { + /** + * @description __Authentication Type__: We currently only support __System User Authentication__ method as it is best suited for accessing your own data and is tied to a service account instead of an individual user account. Please contact your Nexla support team if you need __End User Authentication__ mechanism. + * + * * `true`: __System User Authentication__ + * * `false`: __End User Authentication__ (Currently not supported) + * + * @enum {string} + */ + is_service_account?: true | false; + /** + * @description __Project ID__: Enter the Project ID that the Firebase store you wish to access belongs to. + * + * __Applicable and required__ if `is_service_account` is `false`. + */ + project_id?: string; + /** + * @description __Credentials File Content__: Enter the content of the JSON file that contains the Firebase service account credentials. This file is usually downloaded from the Firebase console. + * + * __Applicable and required__ if `is_service_account` is `true`. + */ + json_creds?: string; + }; + }) & components["schemas"]["nosql_data_credential"]; + firebolt_data_credential: { + credentials_type: "firebolt"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name: string; + }; + } & components["schemas"]["database_data_credential"]; + ftp_data_credential: { + credentials_type: "ftp"; + } & Omit & ({ + credentials?: { + /** @description __Host__: Enter the FTP server URL. Do not include the connection protocol. */ + host: string; + /** + * @description __Port__: Enter the port used to access your FTP server. + * + * This is usually 21 for FTP and 22 for secure FTP. + */ + port: number; + /** + * @description __FTP Type__: Select the type of FTP protocol that is applicable for this connection. + * + * @enum {string} + */ + "ftp.type": "ftp" | "ftps" | "sftp"; + /** + * @description __Anonymous Access?__: Select this option if the connection should be made as an anonymous user. + * + * Only applicable if `ftp.type` is `ftp` or `ftps` + * + * Default value: `false` + */ + anonymous?: boolean; + /** + * @description __Username__: Enter the username used to access your FTP server. + * + * Only applicable if `ftp.type` is `ftp` or `ftps` or `anonymous` is `false`. + */ + username: string; + /** + * @description __Password__: Enter the password used to access your FTP server. + * + * Only applicable if `ftp.type` is `ftp` or `ftps` or `anonymous` is `false` and you are not using `private.key` for authentication + */ + password: string; + /** + * @description __Private Key__: You can use a private key for authentication instead of a password. + * + * Only applicable if you are not using `password` for authentication + */ + "private.key"?: string; + /** + * @description __Passphrase for Private Key__: Enter the passphrase for the private key file if it was generated with one. + * + * Only applicable if `ftp.type` is `sftp` and the `private.key` has a passphrase + */ + passphrase?: string; + /** + * @description __FTP mode__: Select whether the connection should be established in active or passive FTP connection mode. Usually, the default setting should be sufficient. + * + * Default value: `passive.local` + * + * @enum {string} + */ + "ftp.mode"?: "active" | "passive.local" | "passive.remote"; + }; + }) & components["schemas"]["file_data_credential"]; + gcp_alloydb_data_credential: { + credentials_type: "gcp_alloydb"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is in the format `company.domain.com`. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name: string; + /** @description __Schema Name__: Enter the schema name for the tables in which you are interested. */ + schema_name?: string; + }; + } & components["schemas"]["database_data_credential"]; + gcp_spanner_data_credential: { + credentials_type: "gcp_spanner"; + } & Omit & { + credentials?: { + /** + * @description __Is Service Account Authentication?__ We currently only support the __System User Authentication__ method, as it is best suited for accessing your data and is tied to the service account instead of the individual user account. Please contact your Nexla support team if you need the __End User Authentication__ mechanism. + * * Set to `true` if you wish to choose Google Service Account (System User Authentication). + */ + is_service_account: boolean; + /** @description __Project ID__: Enter the Project ID to which the GCP Spanner database that you wish to access belongs. */ + project_id: string; + /** + * @description __Service Account JSON credentials content__: This is the content of the service account credentials JSON file generated by Google Cloud IAM, which is added to the payload as a JSON object. + * + * __Required__ if `is_service_account` is `true` + */ + json_creds: Record; + /** @description __Instance ID__: Enter the Instance ID to which the GCP Spanner database that you wish to access belongs. */ + instance_id: string; + /** @description Enter the Schema to which the GCP Spanner database that you wish to access belongs. Leave this field blank to use the default schema. */ + schema_name: string; + /** @description Enter the Database name of the GCP Spanner database that you wish to access. */ + database_name: string; + }; + } & components["schemas"]["database_data_credential"]; + gcs_data_credential: { + credentials_type: "gcs"; + } & Omit & { + credentials?: { + /** + * @description __Is Service Account Authentication?__ We support multiple ways of authenticating your GCS account. We recommend using the Google Service Account (System User Authentication) method, as it is best suited for accessing your data and is tied to the service account instead of the individual user account. + * * Set to `true` if you wish to choose Google Service Account (System User Authentication). + * * Set this to `false` if you wish to authenticate using 3-legged OAuth. + * Default Value: `false` + */ + is_service_account: boolean; + /** @description __Project ID__: The Project ID to which the GCS paths you wish to access belong. */ + project_id: string; + /** + * @description __Service Account JSON credentials content__: This is the content of the service account credentials JSON file generated by Google Cloud IAM, which is added to the payload as a JSON object. + * + * __Required__ if `is_service_account` is `true` + */ + json_creds?: Record; + /** + * @description __End User Authentication Type__: This must be set to `OAUTH2` if you are choosing the 3-legged OAuth authentication mechanism. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "auth.type"?: "OAUTH2"; + /** + * @description __Client ID__: The Client ID of your OAuth 2.0 client. + * + * __Required__ if `is_service_account` is `false` + */ + "oauth2.client.id"?: string; + /** + * @description __Client Secret__: The Client Secret of your OAuth 2.0 client. + * + * __Required__ if `is_service_account` is `false` + */ + "oauth2.client.secret"?: string; + /** + * @description __Access Token URL__: The OAuth 2.0 Token URL used for fetching the token from the API token server. This must be `https://www.googleapis.com/oauth2/v4/token`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "oauth2.access.token.url"?: "https://www.googleapis.com/oauth2/v4/token"; + /** + * @description __Token Type__: The token type that should be used when attaching the OAuth2.0 token to the request. This must be `Bearer`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "oauth2.token.type.override"?: "Bearer"; + /** + * @description __Access Token URL method__: The request method used for the OAuth 2.0 Token URL. This must be `POST`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "oauth2.access.token.method"?: "POST"; + /** + * @description __Nexla OAuth 2.0 connector codename__: Unique codename for the Nexla token refresh mechanism, which is used to identify this connector. This must be `gcs`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.name"?: "gcs"; + /** + * @description __Token Type__: The token type that should be used when attaching the OAuth2.0 token to the request. This must be `Bearer`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.token_type"?: "Bearer"; + /** + * @description __Authentication Payload Mode__: Set how the OAuth 2.0 Client ID and Client Secret should be sent with the Token URL. This must be set to `header`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.auth_scheme"?: "header"; + /** + * @description __API Scopes__: Scopes that should be added to the OAuth token calls. This should be `https://www.googleapis.com/auth/devstorage.read_write`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.scopes"?: "https://www.googleapis.com/auth/devstorage.read_write"; + /** + * @description __Refresh Token URL__: The OAuth 2.0 Token Refresh URL used for fetching a new access token using the current access token and refresh token. This must be `https://www.googleapis.com/oauth2/v4/token`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.refresh_url"?: "https://www.googleapis.com/oauth2/v4/token"; + /** + * @description __Requires Auto-Refresh of Token__: GCS Access tokens are short-lived, and the platform should automatically continuously refresh the token to retain a valid access token. This must be `true`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {boolean} + */ + "vendor.has_token_expiration_ts"?: true; + /** + * @description __Access token expiration time__: Set this to the expiration time (in seconds) of the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. This is usually `3599` for a GCS 3-legged OAuth workflow. + * + * __Required__ if `is_service_account` is `false` + */ + "vendor.token_expires_in"?: number; + /** + * @description __Access Token__: Set this to the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. + * + * __Required__ if `is_service_account` is `false` + */ + access_token?: string; + /** + * @description __Access Token__: Set this to the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. Note that this is the same as `access_token` above but is required in the payload to allow Nexla to automatically continuously refresh tokens. + * + * __Required__ if `is_service_account` is `false` + */ + "vendor.access_token"?: string; + /** + * @description __Refresh Token__: Set this to the refresh token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. + * + * __Required__ if `is_service_account` is `false` + */ + "vendor.refresh_token"?: string; + /** @description __GCS Path list access is limited to__ : Set this property to `` or `/` if your GCS admin has restricted access to only a specific bucket or a path inside a bucket. */ + "test.path"?: string; + }; + } & components["schemas"]["file_data_credential"]; + gdrive_data_credential: { + credentials_type: "gdrive"; + } & Omit & { + credentials?: { + /** + * @description __Is Service Account Authentication?__ We support multiple ways of authenticating your account. We recommend using the Google Service Account (System User Authentication) method, as it is best suited for accessing your data and is tied to the service account instead of the individual user account. + * * Set to `true` if you wish to choose Google Service Account (System User Authentication). + * * Set this to `false` if you wish to authenticate using 3-legged OAuth. + * Default Value: `false` + */ + is_service_account: boolean; + /** @description __Project ID__: The Project ID to which the GCS paths that you wish to access belong. */ + project_id: string; + /** + * @description __Service Account JSON credentials content__: This is the content of the service account credentials JSON file generated by Google Cloud IAM, which is added to the payload as a JSON object. + * + * __Required__ if `is_service_account` is `true` + */ + json_creds?: Record; + /** + * @description __End User Authentication Type__: This must be set to `OAUTH2` if you are choosing the 3-legged OAuth authentication mechanism. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "auth.type"?: "OAUTH2"; + /** + * @description __Client ID__: The Client ID of your OAuth 2.0 client. + * + * __Required__ if `is_service_account` is `false` + */ + "oauth2.client.id"?: string; + /** + * @description __Client Secret__: The Client Secret of your OAuth 2.0 client. + * + * __Required__ if `is_service_account` is `false` + */ + "oauth2.client.secret"?: string; + /** + * @description __Access Token URL__: The OAuth 2.0 Token URL used for fetching the token from the API token server. This must be `https://www.googleapis.com/oauth2/v4/token`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "oauth2.access.token.url"?: "https://www.googleapis.com/oauth2/v4/token"; + /** + * @description __Token Type__: The token type that should be used when attaching the OAuth2.0 token to the request. This must be `Bearer`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "oauth2.token.type.override"?: "Bearer"; + /** + * @description __Access Token URL method__: The request method used for the OAuth 2.0 Token URL. This must be `POST`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "oauth2.access.token.method"?: "POST"; + /** + * @description __Nexla OAuth 2.0 connector codename__: Unique codename for the Nexla token refresh mechanism, which is used to identify this connector. This must be `gdrive`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.name"?: "gdrive"; + /** + * @description __Token Type__: The token type that should be used when attaching the OAuth2.0 token to the request. This must be `Bearer`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.token_type"?: "Bearer"; + /** + * @description __Authentication Payload Mode__: Set how the OAuth 2.0 Client ID and Client Secret should be sent with the Token URL. This must be set to `header`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.auth_scheme"?: "header"; + /** + * @description __API Scopes__: Scopes that should be added to the OAuth token calls. This should be `https://www.googleapis.com/auth/drive`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.scopes"?: "https://www.googleapis.com/auth/drive"; + /** + * @description __Refresh Token URL__: The OAuth 2.0 Token Refresh URL that is used for fetching a new access token using the current access token and refresh token. This must be `https://www.googleapis.com/oauth2/v4/token`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.refresh_url"?: "https://www.googleapis.com/oauth2/v4/token"; + /** + * @description __Requires Auto-Refresh of Token__: Access tokens are short-lived, and the platform should automatically continuously refresh the token to retain a valid access token. This must be `true`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {boolean} + */ + "vendor.has_token_expiration_ts"?: true; + /** + * @description __Access token expiration time__: Set this to the expiration time (in seconds) of the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. This is usually `3599` for a 3-legged OAuth workflow. + * + * __Required__ if `is_service_account` is `false` + */ + "vendor.token_expires_in"?: number; + /** + * @description __Access Token__: Set this to the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. + * + * __Required__ if `is_service_account` is `false` + */ + access_token?: string; + /** + * @description __Access Token__: Set this to the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. Note that this is the same as `access_token` above but is required in the payload to allow Nexla to automatically continuously refresh tokens. + * + * __Required__ if `is_service_account` is `false` + */ + "vendor.access_token"?: string; + /** + * @description __Refresh Token__: Set this to the refresh token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. + * + * __Required__ if `is_service_account` is `false` + */ + "vendor.refresh_token"?: string; + }; + } & components["schemas"]["file_data_credential"]; + google_pubsub_data_credential: { + credentials_type: "google_pubsub"; + } & Omit & { + credentials?: { + /** + * @description __Is Service Account Authentication?__ We support multiple ways of authenticating your account. We recommend using the Google Service Account (System User Authentication) method, as it is best suited for accessing your data and is tied to the service account instead of the individual user account. + * * Set to `true` if you wish to choose Google Service Account (System User Authentication). + * * Set this to `false` if you wish to authenticate using 3-legged OAuth. + * Default Value: `false` + */ + is_service_account: boolean; + /** @description __Project ID__: The Project ID to which the resources that wish to access belong. */ + project_id: string; + /** + * @description __Service Account JSON credentials content__: This is the content of the service account credentials JSON file generated by Google Cloud IAM, which is added to the payload as a JSON object. + * + * __Required__ if `is_service_account` is `true` + */ + json_creds?: Record; + /** + * @description __End User Authentication Type__: This must be set to `OAUTH2` if you are choosing the 3-legged OAuth authentication mechanism. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "auth.type"?: "OAUTH2"; + /** + * @description __Client ID__: The Client ID of your OAuth 2.0 client. + * + * __Required__ if `is_service_account` is `false` + */ + "oauth2.client.id"?: string; + /** + * @description __Client Secret__: The Client Secret of your OAuth 2.0 client. + * + * __Required__ if `is_service_account` is `false` + */ + "oauth2.client.secret"?: string; + /** + * @description __Access Token URL__: The OAuth 2.0 Token URL used for fetching the token from the API token server. This must be `https://www.googleapis.com/oauth2/v4/token`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "oauth2.access.token.url"?: "https://www.googleapis.com/oauth2/v4/token"; + /** + * @description __Token Type__: The token type that should be used when attaching the OAuth2.0 token to the request. This must be `Bearer`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "oauth2.token.type.override"?: "Bearer"; + /** + * @description __Access Token URL method__: The request method used for the OAuth 2.0 Token URL. This must be `POST`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "oauth2.access.token.method"?: "POST"; + /** + * @description __Nexla OAuth 2.0 connector codename__: The unique codename for the Nexla token refresh mechanism, which is used to identify this connector. This must be `google_pubsub`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.name"?: "google_pubsub"; + /** + * @description __Token Type__: The token type that should be used when attaching the OAuth2.0 token to the request. This must be `Bearer`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.token_type"?: "Bearer"; + /** + * @description __Authentication Payload Mode__: Set how the OAuth 2.0 Client ID and Client Secret should be sent with the Token URL. This must be set to `header`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.auth_scheme"?: "header"; + /** + * @description __API Scopes__: Scopes that should be added to the OAuth token calls. This should be `https://www.googleapis.com/auth/pubsub`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.scopes"?: "https://www.googleapis.com/auth/pubsub"; + /** + * @description __Refresh Token URL__: The OAuth 2.0 Token Refresh URL that is used for fetching a new access token using the current access token and refresh token. This must be `https://www.googleapis.com/oauth2/v4/token`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {string} + */ + "vendor.refresh_url"?: "https://www.googleapis.com/oauth2/v4/token"; + /** + * @description __Requires Auto-Refresh of Token__: Access tokens are short-lived, and the platform should automatically continuously refresh the token to retain a valid access token. This must be `true`. + * + * __Required__ if `is_service_account` is `false` + * + * @enum {boolean} + */ + "vendor.has_token_expiration_ts"?: true; + /** + * @description __Access token expiration time__: Set this to the expiration time (in seconds) of the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. This is usually `3599` for a 3-legged OAuth workflow. + * + * __Required__ if `is_service_account` is `false` + */ + "vendor.token_expires_in"?: number; + /** + * @description __Access Token__: Set this to the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. + * + * __Required__ if `is_service_account` is `false` + */ + access_token?: string; + /** + * @description __Access Token__: Set this to the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. Note that this is the same as `access_token` above but is required in the payload to allow Nexla to automatically continuously refresh tokens. + * + * __Required__ if `is_service_account` is `false` + */ + "vendor.access_token"?: string; + /** + * @description __Refresh Token__: Set this to the refresh token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. + * + * __Required__ if `is_service_account` is `false` + */ + "vendor.refresh_token"?: string; + }; + } & components["schemas"]["stream_data_credential"]; + hana_jdbc_data_credential: { + credentials_type: "hana_jdbc"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is usually an IP address or text in the format `company.domain.com`. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name?: string; + }; + } & components["schemas"]["database_data_credential"]; + hive_data_credential: { + credentials_type: "hive"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is usually an IP address or text in the format `company.domain.com`. Do not include the connection protocol. */ + host: string; + /** + * @description __Port__: Enter the port used to access your database. + * + * Default value: `10000` + */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Schema Name__: Enter the schema name for your database. */ + schema_name: string; + }; + } & components["schemas"]["database_data_credential"]; + jms_data_credential: { + credentials_type: "jms"; + } & Omit & ({ + credentials?: { + /** + * @description __JMS Vendor__: Select the type of vendor for this JMS connector. + * + * @enum {string} + */ + "jms.vendor": "tibco" | "activemq"; + /** @description __Server URL__: Enter the address of the JMS Server. This is usually a Virtual IP address. */ + url: string; + /** @description __Username__: Enter the username used to access this server. */ + username: string; + /** @description __Password__: Enter the password used to access this server. */ + password: string; + }; + }) & components["schemas"]["stream_data_credential"]; + kafka_data_credential: { + credentials_type: "kafka"; + } & Omit & ({ + credentials?: { + /** @description __Kafka Bootstrap Servers__: Enter the addresses of the Kafka brokers. These are usually in the form of Virtual IP addresses. */ + "target.bootstrap.servers": string; + /** + * @description __Security Protocol__: Choose the appropriate security protocol for your Kafka channels. + * + * * `PLAINTEXT`: Un-authenticated, non-encrypted channel + * + * * `SASL_PLAINTEXT`: SASL-authenticated non-encrypted channel + * + * * `SASL_SSL`: SASL-authenticated SSL channel + * + * * `SSL`: SSL channel + * + * @enum {string} + */ + "security.protocol": "PLAINTEXT" | "SASL_PLAINTEXT" | "SASL_SSL" | "SSL"; + /** + * @description __Authentication Configuration (SASL JAAS format)__: Enter the Java Authentication and Authorization Service (JAAS) configuration to be used for SASL Authentication. This is usually in the format `org.apache.kafka.common.security.plain.PlainLoginModule required username= password=;`. + * + * __Required__ if `security.protocol` is `SASL_PLAINTEXT` or `SASL_SSL` + */ + "sasl.jaas.config"?: string; + /** + * @description __Consumer Group ID__: Enter the Group ID of the group to which Nexla consumers will belong. + * + * This is usually set to `nexla-consumer`. + */ + "group.id.prefix"?: string; + }; + }) & components["schemas"]["stream_data_credential"]; + min_io_s3_data_credential: { + credentials_type: "min_io_s3"; + } & Omit & ({ + credentials?: { + /** @description __MinIO Host__: The custom host URL for MinIO. */ + nx_external_host: string; + /** + * @description __Authentication Type__: The authentication mechanism for MinIO. + * + * * `min_io_s3`: Use this option to authenticate with MinIO S3. + * + * @enum {string} + */ + "nx_external.auth.type": "min_io_s3"; + /** @description __MinIO S3 Credentials__: Authentication credentials as a JSON string. Should include 'access_key_id' and 'secret_key'. */ + "nx_external.auth.props": string; + /** @description MinIO Access Key for accessing the MinIO S3 bucket. */ + access_key?: string; + /** @description MinIO Secret Key for accessing the MinIO S3 bucket. */ + secret_key?: string; + /** @description __S3 Path list access is limited to__: Set this property to `` or `/` if your AWS admin has restricted access to only a specific bucket or a path inside a bucket. */ + "test.path"?: string; + /** + * @description __Enable Client Side Encryption?__: The platform can be configured to encrypt/decrypt S3 objects that require client-side encryption using the AWS Key Management System (KMS). Set this option if KMS encryption is applicable. + * + * Default value: `false` + */ + has_client_encryption?: boolean; + /** + * @description __Client Side Encryption Mode__: Select the type of KMS encryption mode that is applicable for this credential. + * + * __Required__ if `has_client_encryption` is `true` + * + * @enum {string} + */ + encryption_mode?: "EncryptionOnly" | "AuthenticatedEncryption" | "StrictAuthenticatedEncryption"; + /** + * @description __Amazon KMS Key for Encryption__: The KMS Key used for encrypting/decrypting objects. Please ensure that this user has appropriate KMS permissions. + * + * __Required__ if `has_client_encryption` is `true` + */ + kms_key?: string; + /** + * @description __Enable Server Side Encryption?__: The platform can be configured to encrypt/decrypt S3 objects that require server-side encryption. + * + * This can be done using either __Amazon S3-managed encryption keys (SSE-S3)__ or the __AWS Key Management Service (SSE-KMS)__. Set this option if server-side encryption is applicable. + * + * Default value: `false` + */ + "sse.enabled"?: boolean; + /** + * @description __Key ARN for SSE with KMS__: The Key ARN if you want server-side encryption to be performed via the AWS Key Management System. You can leave this field blank if you want to use Amazon S3-managed encryption keys. + * + * __Required__ if `sse.enabled` is `true` + */ + "sse.kms_key.arn"?: string; + }; + }) & components["schemas"]["file_data_credential"]; + mongo_data_credential: { + credentials_type: "mongo"; + } & Omit & ({ + credentials?: { + /** + * @description __URL Format__: You can enter the MongoDB authentication information as a MongoDB Java Connection URL (Version 3.4 or later) or as parts that will be combined by Nexla to create the connection string. + * + * * `connection_url`: __MongoDB Connection URL__ - Enter the MongoDB Java connection URL. For example, in MongoDB Atlas, please select connect your application → Driver: Java → version 3.4 or later. + * + * * `http_path_parts`: __HTTP Path Parts__ + * + * @enum {string} + */ + "ui.url_format"?: "connection_url" | "http_path_parts"; + /** + * @description __Connection URL__: Enter the MongoDB Connection URL of your MongoDB instance location. This should be in the form `mongodb://...`. + * + * For example, in MongoDB Atlas, please select connect your application → Driver: Java → version 3.4 or later. + * + * Note that currently, we only support connection URLs that direct to a specific database, so your connection string must be in the form `mongodb://:@.../?ssl=true...`. + * + * __Applicable and Required__ if `ui.url_format` is `connection_url`. + */ + url?: string; + /** + * @description __Host__: Enter the hostname for your database. This is usually an IP address or text in the format __company.domain.com__. Do not include the connection protocol. + * + * __Applicable and Required__ if `ui.url_format` is `http_path_parts`. + */ + host?: string; + /** + * @description __Port__: Enter the port number for your database. + * + * Default value: `27017`. + * + * __Applicable and Required__ if `ui.url_format` is `http_path_parts`. + */ + port?: number; + /** + * @description __Username__: Enter the username for your database. + * + * __Applicable and Required__ if `ui.url_format` is `http_path_parts`. + */ + username?: string; + /** + * @description __Password__: Enter the password for your database. + * + * __Applicable and Required__ if `ui.url_format` is `http_path_parts`. + */ + password?: string; + /** + * @description __Database__: Limit access to a specific database. + * + * __Applicable__ if `ui.url_format` is `http_path_parts`. + */ + database?: string; + }; + }) & components["schemas"]["nosql_data_credential"]; + mysql_data_credential: { + credentials_type: "mysql"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is usually an IP address or text in the format `company.domain.com`. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name?: string; + }; + } & components["schemas"]["database_data_credential"]; + netsuite_jdbc_data_credential: { + credentials_type: "netsuite_jdbc"; + } & Omit & { + credentials?: { + /** @description __URL__: The database connection URL for Netsuite. */ + url: string; + /** + * @description __Access using Token-Based Authentication?__ Enable this checkbox to configure a token-based authentication mechanism. See https://docs.oracle.com/en/cloud/saas/netsuite/ns-online-help/article_163239884825.html for more information. + * + * Default value: `false` + */ + tba_enabled: boolean; + /** + * @description __Username__: Enter the username used to access your database. + * + * __Applicable and required__ if `tba_enabled` is `false` + */ + username?: string; + /** + * @description __Password__: Enter the password used to access your database. + * + * __Applicable and required__ if `tba_enabled` is `false` + */ + password?: string; + /** + * @description __NetSuite account ID__: Your NetSuite account ID. You can find this value on the SuiteAnalytics Connect Driver Download Page under Your Configuration. + * + * __Applicable and required__ if `tba_enabled` is `true` + */ + account_id?: string; + /** + * @description __Consumer key__: The consumer key for the integration record. This string was created when you created the integration record. + * + * __Applicable and required__ if `tba_enabled` is `true` + */ + consumer_key?: string; + /** + * @description __Consumer Secret__: The consumer secret for the integration record. This string was created when you created the integration record. + * + * __Applicable and required__ if `tba_enabled` is `true` + */ + consumer_secret?: string; + /** + * @description __Token__: This is a string identifier or ID of a token that represents a unique combination of a user, a role, and an integration record. + * + * __Applicable and required__ if `tba_enabled` is `true` + */ + token?: string; + /** + * @description __Token Secret__: Token Secret generated when the Access Token was created. + * + * __Applicable and required__ if `tba_enabled` is `true` + */ + token_secret?: string; + /** + * @description __Server Zone ID__: A timestamp is needed to generate a token password. This timestamp must be within plus or minus five (+ or – 5) minutes of the server time. If your server is not in the UTC zone, please specify the correct server time zone. + * + * Default value: `UTC` + * + * __Applicable__ if `tba_enabled` is `true` + */ + server_zone_id?: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name?: string; + /** @description __Schema Name__: Enter the schema name for the tables in which you are interested. */ + schema_name?: string; + }; + } & components["schemas"]["database_data_credential"]; + nexla_monitor_data_credential: { + credentials_type: "nexla_monitor"; + } & Omit; + oracle_data_credential: { + credentials_type: "oracle"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is usually an IP address or text in the format `company.domain.com`. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name: string; + /** @description __Schema Name__: Enter the schema name for the tables in which you are interested. */ + schema_name?: string; + }; + } & components["schemas"]["database_data_credential"]; + oracle_autonomous_data_credential: { + credentials_type: "oracle_autonomous"; + } & Omit & { + credentials?: { + /** + * @description __Oracle Wallet__: Oracle Autonomous connections require a wallet that contains a collection of files, including key and other information required to connect to your database. + * + * Add the content of your Oracle wallet file downloaded from the database. This wallet is a zip file. Encode the zip file content as a Base64-encoded string in the request. + */ + oracle_wallet: string; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name?: string; + /** @description __Schema Name__: Enter the schema name for the tables in which you are interested. */ + schema_name?: string; + }; + } & components["schemas"]["database_data_credential"]; + vector_db_data_credential: { + credentials?: Record; + }; + pinecone_data_credential: { + credentials_type: "pinecone"; + } & Omit & { + credentials?: { + /** @description __API Key__: Enter the API key for your Pinecone account. */ + api_key: string; + }; + } & components["schemas"]["vector_db_data_credential"]; + postgres_data_credential: { + credentials_type: "postgres"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is in the format `company.domain.com`. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name: string; + /** @description __Schema Name__: Enter the schema name for the tables in which you are interested. */ + schema_name?: string; + }; + } & components["schemas"]["database_data_credential"]; + redshift_data_credential: { + credentials_type: "redshift"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is usually in the format `[name].[id].[region].redshift.amazonaws.com`. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name: string; + /** @description __Schema Name__: Enter the schema name for the tables in which you are interested. */ + schema_name?: string; + }; + } & components["schemas"]["database_data_credential"]; + rest_data_credential: { + credentials_type: "rest"; + } & Omit & ({ + credentials?: { + /** + * @description __Authentication Mechanism__: Credentials can be configured to allow API access through different rest connector authentication mechanisms supported by the API. Select the authentication mechanism that is best suited for your use case. + * + * __Tips__: + * + * 1. Select `NONE` if it is a public API. + * + * 2. Select `NONE` and set appropriate `request.headers` if the API requires hardcoded headers. + * + * 3. If the API requires API Key in header, while the option above will work, we recommend selecting `API_KEY` and setting appropriate options for how API Key is included in Header. + * + * 3. Select `OAUTH2` for 2-legged or 3-legged Oauth2 Authentication. + * + * 4. Choose appropriate settings if the API also requires `JWT` or `HMAC` signature verification. + * + * __Values__: + * * `NONE`: __No Auth Mode__ Use this mode if the API does not require any authentication, or if the authentication information is sent as custom request headers or url query parameters. + * + * * `BASIC`: __Basic Auth__ - Use this mode for any API that requires a verified username and password to be sent with every request. This username password is sent as `Basic ` on the Authorization request header. + * + * * `API_KEY`: __Api Key__ - Use this mode for any API that a key-value api key pair to be sent with every request, either as a Request Header or a Query parameter. + * + * * `TOKEN`: __Token__ - Use this mode for any API that requires fetching a short-lived token before authenticated calls can be made for other endpoints. + * + * * `OAUTH1`: __OAuth 1.0__ - Use this mode for any API that requires multi-step OAuth1.0 based authentication workflow. Nexla supports 2-legged and 3-legged OAuth workflows and has ability to automatically refresh short-lived tokens. + * + * * `OAUTH2`: __OAuth 2.0__ - Use this mode for any API that requires multi-step OAuth2.0 based authentication workflow. Nexla supports 2-legged and 3-legged OAuth workflows and has ability to automatically refresh short-lived tokens. + * + * * `AWS_SIGNATURE`: __AWS Signature__ - Use this authorization workflow for Amazon Web Services requests. This is a special type of HMAC (Hash Message Authentication Code) authentication specifically for AWS. + * + * * `gcp_service_account`: __Google Service Account__ - Use this authorization workflow for Google Service Account authentication. + * + * @enum {string} + */ + "auth.type": "NONE" | "BASIC" | "API_KEY" | "TOKEN" | "OAUTH1" | "OAUTH2" | "AWS_SIGNATURE" | "gcp_service_account"; + /** + * @description __Username__: Enter the username field for basic authentication. Nexla will automatically combine this with the password and Base64-encode the pair. + * + * __Applicable and required__ if `auth.type` is `BASIC` or `TOKEN` + */ + "basic.username"?: string; + /** + * @description __Password__: Enter the password field for basic authentication. Nexla will automatically combine this with the username and Base64-encode the pair. + * + * __Applicable and required__ if `auth.type` is `BASIC` or `TOKEN` + */ + "basic.password"?: string; + /** + * @description __Where to add api key on API requests__: Configure how the api key should be included in subsequent requests. + * + * __Applicable and required__ if `auth.type` is `API_KEY` + * + * @enum {string} + */ + "api.key.include.mode"?: "HEADER" | "URL_PARAMETER"; + /** + * @description __API Key Parameter Name__: If the API Key is sent as __URL_PARAMETER__: Enter the URL query parameter name that is used for sending the API Key as a URL parameter. + * + * If the API Key is sent as __Header__: Enter the request header name that should be used for sending the API Key. + * + * __Applicable and required__ if `auth.type` is `API_KEY` + */ + "api.key.auth.key"?: string; + /** + * @description __API Key Parameter Value__: If the API Key is sent as __URL_PARAMETER__: Enter the value that should be sent with the API Key Parameter Name query parameter. + * + * If the API Key is sent as __Header__: Enter the Header value that should be sent with the API Key Parameter Name request header. + * + * + * __Applicable and required__ if `auth.type` is `API_KEY` + */ + "api.key.auth.value"?: string; + /** + * @description __Fetch Token URL: URL to get token__: Enter the Token URL that should be accessed to fetch a token from the API token server. + * + * __Applicable and required__ if `auth.type` is `TOKEN` + */ + "token.auth.url"?: string; + /** + * @description __Fetch Token URL: HTTP method__: URL to get token__: Enter the request method for the Token URL. This is usually `POST`. + * + * __Applicable and required__ if `auth.type` is `TOKEN` + * + * @enum {string} + */ + "token.auth.method"?: "GET" | "POST" | "PUT"; + /** + * @description __Fetch Token URL: Header name for user/pwd__: Enter the name of the request header if the token URL requires authentication information like the username/password to be sent in a header when requesting a token. + * + * __Applicable and required__ if `auth.type` is `TOKEN` + */ + "token.auth.token.header.name"?: string; + /** + * @description __Fetch Token URL: Request Body__: Enter the request body in JSON-object format if the token URL requires a payload to be sent when fetching a token. + * + * __Applicable and required__ if `auth.type` is `TOKEN` + */ + "token.auth.body"?: string; + /** + * @description __Fetch Token URL: Response format__: Select the response format of the Token URL request. + * + * This is usually `json` + * + * __Applicable and required__ if `auth.type` is `TOKEN` + * + * @enum {string} + */ + "token.auth.response.format"?: "xml" | "json" | "plain"; + /** + * @description __Fetch Token URL Response: Path to Token__: Enter the path of the token property in the response. + * + * This should be a valid __JSON Path__ or __XPath__ input depending on the __Fetch Token URL: Response format__ + * + * + * __Applicable and required__ if `auth.type` is `TOKEN` and `token.auth.response.format` is `xml` or `json` + */ + "token.auth.token.path"?: string; + /** + * @description __Where to add token on API requests__: Configure how the generated token should be included in subsequent requests. + * + * This is usually `HEADER` + * + * __Applicable and required__ if `auth.type` is `TOKEN` + * + * @enum {string} + */ + "token.auth.token.include.mode"?: "HEADER" | "URL_PARAMETER"; + /** + * @description __URL Parameter for Token in API Requests__: If the token is sent as a __URL_PARAMETER__, enter the URL parameter name used to send the token as a URL parameter. + * + * __Applicable and required__ if `auth.type` is `TOKEN` and `token.auth.token.include.mode` is `URL_PARAMETER` + */ + "token.auth.token.url.parameter"?: string; + /** + * @description __Header Name for Token in API Requests__: If the token is sent as a __Header__, enter the header name used to send the token. + * + * __Applicable and required__ if `auth.type` is `TOKEN` and `token.auth.token.include.mode` is `HEADER` + * + * Usually, this is `Authorization` + */ + "token.auth.request.header.name"?: string; + /** + * @description __Send cookie in a header?__: If the `token.auth.token.include.mode` is set to `Header`, the platform can be configured to send a cookie together with a token in a request header. Set this option if the vendor requires a cookie for generating a token. + * + * __Applicable__ if `auth.type` is `TOKEN` and `token.auth.token.include.mode` is `HEADER`. + * + * Default: `false` + */ + "token.auth.token.include.cookie"?: boolean; + /** + * @description __Header Prefix for Token in API Requests__: If the token is sent as a __Header__, enter the prefix (if applicable) that should be prepended to the header value. + * + * __Applicable and required__ if `auth.type` is `TOKEN` and `token.auth.token.include.mode` is `HEADER`. + * + * Usually, this is `Bearer`. + */ + "token.auth.request.header.prefix"?: string; + /** + * @description __Type of OAuth1 Exchange__: Choose whether the OAuth token exchange should be 2-legged (i.e., server-server) or 3-legged (i.e., involving end-user authentication from the UI). + * + * __Note__: 3-legged OAuth clients require a Redirect URL when setting up the client. Make sure you have set `/oauth1Auth` as the redirect URL when setting up the client. Replace `` with the root URL of your Nexla site. + * + * __Applicable and required__ if `auth.type` is `OAUTH1` + * + * @enum {string} + */ + "oauth1.token_exchange_type"?: "2-legged" | "3-legged"; + /** + * @description __URL for Request Token__: Enter the OAuth 1.0 Request URL used for fetching the request token. This is the first step of the 3-legged OAuth1.0 flow. + * + * __Applicable and required__ if `auth.type` is `OAUTH1` and `oauth1.token_exchange_type` is `3-legged` + */ + "oauth1.request.url"?: string; + /** + * @description __(Optional) Request Token URL Parameters__: Enter any optional parameters that should be attached to the Request URL. + * + * This should be in the form `key1=value1&key2=value2`. You __do not__ need to include the `oauth_callback` and `oauth_consumer_key`. + * + * __Applicable__ if `auth.type` is `OAUTH1` and `oauth1.token_exchange_type` is `3-legged` + */ + "oauth1.request_url.req_url_params"?: string; + /** + * @description __Authorization URL__: Enter the OAuth 1.0 Authorization URL used to initiate user authorization. This is the second step of the 3-legged OAuth1.0 flow. + * + * __Applicable and required__ if `auth.type` is `OAUTH1` and `oauth1.token_exchange_type` is `3-legged` + */ + "oauth1.auth.url"?: string; + /** + * @description __(Optional) Authorization URL Parameters__: Enter any optional parameters that should be attached to the Authorization URL. + * + * This should be in the form `key1=value1&key2=value2`. You __do not__ need to include properties that are autogenerated or received in previous steps. + * + * __Applicable__ if `auth.type` is `OAUTH1` and `oauth1.token_exchange_type` is `3-legged` + */ + "oauth1.auth_url.auth_url_params"?: string; + /** + * @description __Access Token URL__: Enter the OAuth 1.0 Token URL used to convert the request token into an access token. This is the final step of the 3-legged OAuth1.0 flow. + * + * __Applicable and required__ if `auth.type` is `OAUTH1` and `oauth1.token_exchange_type` is `3-legged` + */ + "oauth1.token.url"?: string; + /** + * @description __(Optional) Access Token URL Parameters__: Enter any optional parameters that should be attached to the Token URL. + * + * This should be in the form `0key1=value1&key2=value2`. You __do not__ need to include properties that are autogenerated or received in previous steps. + * + * __Applicable__ if `auth.type` is `OAUTH1` and `oauth1.token_exchange_type` is `3-legged` + */ + "oauth1.token_url.token_url_params"?: string; + /** + * @description __Signature Method__: Some API vendors require requests to be signed with a dynamic signature algorithm. Select the signature method that should be used to sign the requests. + * + * Default Value: `PLAINTEXT` + * + * __Applicable and required__ if `auth.type` is `OAUTH1` + * + * @enum {string} + */ + "oauth1.signature.method"?: "PLAINTEXT" | "HMAC-SHA1" | "HMAC-SHA256"; + /** + * @description __Consumer Key__: Enter the consumer key for your OAuth 1.0 client. + * + * __Applicable and required__ if `auth.type` is `OAUTH1` + */ + "oauth1.consumer.key"?: string; + /** + * @description __Consumer Secret__: Enter the consumer secret for your OAuth 1.0 client. + * + * __Applicable and required__ if `auth.type` is `OAUTH1` + */ + "oauth1.consumer.secret"?: string; + /** + * @description __Access Token__: Enter the access token for your OAuth 1.0 client. + * + * __Applicable and required__ if `auth.type` is `OAUTH1` + */ + "oauth1.access.token"?: string; + /** + * @description __Access Token Secret__: Enter the access token secret for your OAuth 1.0 client. + * + * __Applicable and required__ if `auth.type` is `OAUTH1` + */ + "oauth1.access.token.secret"?: string; + /** + * @description __OAuth1 Advanced Configuration (JSON)__: Enter any other advanced configuration parameters like Realm, Version, etc. that might be required. This information should be entered as a valid JSON object. + * + * Default Value: `{}` + * + * __Applicable__ if `auth.type` is `OAUTH1` + */ + "oauth1.extra.parameters"?: string; + /** + * @description __Type of OAuth2 Exchange__: Choose whether the OAuth token exchange should be 2-legged (i.e., server-server) or 3-legged (i.e., involving end-user authentication from the UI). + * + * __Note__: 3-legged OAuth clients require a Redirect URL when setting up the client. Make sure you have set `/oauth2Auth` as the redirect URL when setting up the client. Replace `` with the root URL of this site. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` + */ + "oauth2.token_exchange_type"?: string; + /** + * @description __Client ID__: Enter the Client ID for your OAuth 2.0 client. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` + */ + "oauth2.client.id"?: string; + /** + * @description __Client Secret__: Enter the Client Secret for your OAuth 2.0 client. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` + */ + "oauth2.client.secret"?: string; + /** + * @description __Authorization URL__: Enter the OAuth 2.0 Authorization URL used to initiate user authorization. This is the first step of the 3-legged OAuth1.0 flow. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` and `oauth2.token_exchange_type` is `3-legged` + */ + "oauth2.auth.url"?: string; + /** + * @description __(Optional) Authorization Parameters__: Enter any optional parameters that should be attached to the Authorization URL. + * + * This should be in the form `key1=value1&key2=value2`. You __do not__ need to include properties like the __client_id__, __grant_type__, or __client_secret__. + * + * __Applicable__ if `auth.type` is `OAUTH2` and `oauth2.token_exchange_type` is `3-legged` + */ + "oauth2.auth_url.auth_url_params"?: string; + /** + * @description __Access Token URL__: Enter the OAuth 2.0 Token URL used for fetching tokens from the API token server. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` + */ + "oauth2.access.token.url"?: string; + /** + * @description __(Optional) Access Token URL Parameters__: Enter any optional parameters that should be attached to the Authorization URL. + * + * This should be in the form `key1=value1&key2=value2`. You __do not__ need to include properties like the __client_id__, __grant_type__, or __client_secret__. + * + * __Applicable__ if `auth.type` is `OAUTH2` and `oauth2.token_exchange_type` is `3-legged` + */ + "oauth2.token_url.token_url_params"?: string; + /** + * @description __Token Type__: Enter the token type that should be used when attaching the OAuth2.0 token to the request. + * + * This is usually set to `Bearer` to indicate a Bearer token, although some API vendors might use a different name or case. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` + */ + "oauth2.token.type.override"?: string; + /** + * @description __Access Token URL method__: Enter the request method used for the OAuth 2.0 Token URL. This is usually `POST`. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` + * + * @enum {string} + */ + "oauth2.access.token.method"?: "GET" | "POST"; + /** + * @description __Authentication Payload Mode__: Select how the OAuth2 Client ID and Client Secret should be sent with the Token URL. + * + * * `header`: __Encoded Header__ - The OAuth client ID and secret information is attached to the Authentication header as a Base-64 encoded pair. + * + * * `form`: __Form Data__ - The OAuth client ID and secret information are attached in the form of data payload parameters. + * + * * `query`: __Query Parameters__ - The OAuth client ID and secret information are attached to the Token query as query parameters. + * + * __Applicable and required__ if `auth.type` is `OAUTH2`. Usually, this is `header`. + * + * @enum {string} + */ + "oauth2.client.auth.scheme"?: "header" | "form" | "query"; + /** + * @description __OAuth2: Token Request Body__: Nexla automatically includes standard OAuth2 token URL payload properties like __grant_type__, __client_id__ and __client_secret__. However, sometimes, OAuth servers require additional custom payload properties to be sent with the token URL request. + * + * Enter these __additional__ properties in the form of a JSON dictionary. + * + * __Applicable__ if `auth.type` is `OAUTH2`and `oauth2.token_exchange_type` is `2-legged` + */ + "oauth2.auth.body"?: string; + /** + * @description __OAuth2: Token Request Headers__: Sometimes OAuth servers require additional custom headers to be sent with the token URL request. + * + * Add any additional request headers that must be sent as part of token obtaining request. Please input as comma-separated values e.g header1:value1,header2:value2. + * + * __Applicable__ if `auth.type` is `OAUTH2`and `oauth2.token_exchange_type` is `2-legged` + */ + "oauth2.auth.headers"?: string; + /** + * @description __Access Token__: Set this to the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` + */ + "oauth2.access.token"?: string; + /** + * @description __Requires Auto-Refresh of Token__: Enable this option if the access tokens are short-lived and the platform should automatically continuously refresh the token to retain a valid access token. + * + * __Applicable __ if `auth.type` is `OAUTH2` + * + * Default value: `false` + */ + "vendor.has_token_expiration_ts"?: boolean; + /** + * @description __Nexla OAuth 2.0 connector codename__: Codename for the Nexla token refresh mechanism, which is used to identify this connector. Give any string value if this is a custom OAuth2.0 connector. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` and `vendor.has_token_expiration_ts` is `true` + */ + "vendor.name"?: string; + /** + * @description __Access token expiration time__: Set this to the expiration time (in seconds) of the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. This is usually `3599` for a GCS 3-legged OAuth workflow. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` and `vendor.has_token_expiration_ts` is `true` + */ + "vendor.token_expires_in"?: number; + /** + * @description __Token Type__: Enter the token type that should be used when attaching the OAuth2.0 token to the request. + * + * This is usually set to `Bearer` to indicate a Bearer token, although some API vendors might use a different name or case. + * + * __Applicable __ if `auth.type` is `OAUTH2` and `oauth2.token_exchange_type` is `3-legged` + */ + "vendor.token_type"?: string; + /** @description __(Optional) API Scopes__: Enter any additional scopes that should be added to the OAuth token calls. These should be added as space-separated values (`scope1 scope2 scope3`). Nexla will automatically attach them to the request as the `scope` payload property. */ + "vendor.scopes"?: string; + /** + * @description __Refresh Token URL__: Enter the OAuth 2.0 Token Refresh URL used for fetching a new access token using the current access token and refresh token. This is usually the same as the OAuth2.0 Token URL. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` and `vendor.has_token_expiration_ts` is `true` + */ + "vendor.refresh_url"?: string; + /** + * @description __Authentication Payload Mode__: Select how the OAuth2 Client ID and Client Secret should be sent with the Token URL. + * + * * `header`: __Encoded Header__ - The OAuth client ID and secret information are attached to the Authentication header as a Base-64 encoded pair. + * + * * `form`: __Form Data__ - The OAuth client ID and secret information are attached in the form of data payload parameters. + * + * * `query`: __Query Parameters__ - The OAuth client ID and secret information are attached to the Token query as query parameters. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` and `vendor.has_token_expiration_ts` is `true` + * + * @enum {string} + */ + "vendor.auth_scheme"?: "header" | "form" | "query"; + /** + * @description __Access Token__: Set this to the access token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. Note that this is the same as `access_token` above but is required in the payload to allow Nexla to automatically continuously refresh tokens. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` and `vendor.has_token_expiration_ts` is `true` + */ + "vendor.access_token"?: string; + /** + * @description __Refresh Token__: Set this to the refresh token received when you made the first token request as part of the 3-legged OAuth 2.0 token fetching sequence. + * + * __Applicable and required__ if `auth.type` is `OAUTH2` and `vendor.has_token_expiration_ts` is `true` + */ + "vendor.refresh_token"?: string; + /** + * @description __AWS Access Key__: The access key id of the Access Key pair that is used for verifying the identity of the requesting user. + * + * __Applicable and required__ if `auth.type` is `AWS_SIGNATURE`. + */ + "aws.access.key"?: string; + /** + * @description __AWS Secret Key__: The access secret of the Access Key pair that is used for verifying the identity of the requesting user. + * + * __Applicable and required__ if `auth.type` is `AWS_SIGNATURE`. + */ + "aws.secret.key"?: string; + /** + * @description __AWS Region__: The region that is receiving the request. + * + * __Applicable and required__ if `auth.type` is `AWS_SIGNATURE`. + */ + "aws.region"?: string; + /** + * @description __Service Name__: The service that is receiving the request. + * + * __Applicable and required__ if `auth.type` is `AWS_SIGNATURE`. + */ + "aws.service"?: string; + /** + * @description __Session Token__: Optional session token if the request requires temporary security credentials. This is added to the request as `x-amz-access-token` request header. + * + * __Applicable__ if `auth.type` is `AWS_SIGNATURE`. + */ + "aws.session.token"?: string; + /** + * @description __Google Service Account Credentials JSON__: Enter the content of the Service Content JSON file that was was generated by Google Cloud IAM. This should be a valid JSON object + * + * __Applicable and required__ if `auth.type` is `gcp_service_account`. + */ + "gcp.service.account.credentials.json"?: string; + /** + * @description __Google Service Account OAuth Scopes__: Enter the OAuth 2.0 Scopes for Google APIs when using GCP Service Account authentication. This should be a comma separated string of one or more scopes. + * + * __Applicable__ if `auth.type` is `gcp_service_account`. + */ + "gcp.service.account.oauth.scopes"?: string; + /** + * @description __Skip Credential Validation__: Turn this option on to skip validation of credential when the credential is being created. Note that credential validation is still done when the credential is used as part of a source/destination. However, sometimes some APIs don't have a good endpoint that can be used for just testing if the credential information is accurate or not. This optional property helps avoid setting `test.url` properties for such API vendors. + * + * __Default__: `false` + */ + "skip.validation"?: boolean; + /** + * @description __Credential Validation: URL__: Enter a URL from this API vendor that should be used to check whether or not the authentication mechanism works. + * + * 1. This does not need to be the same URL as the final endpoint that you wish to use. + * + * 2. We recommend using an endpoint that does not incur a noticeable response delay from the API server. For example, fetch only one item if accessing an endpoint that sends an array of items. + * + * 3. Use any public endpoint if you want to bypass credential validation. + * + * __Applicable and required__ if `skip.validation` is `false` or not specified. + */ + "test.url": string; + /** + * @description __Credential Validation: API Method__: Enter the API method used to execute the __Credential Validation: URL__. Usually, you should set this to GET for record fetching. + * + * __Applicable and required__ if `skip.validation` is `false` or not specified. + * + * @enum {string} + */ + "test.method": "GET" | "POST"; + /** + * @description __Credential Validation: Request Body__: Enter the request body in JSON-object format if the __Credential Validation: URL__ requires a payload. + * + * __Applicable and required__ if `skip.validation` is `false` or not specified. + */ + "test.body"?: string; + /** + * @description Credential Validation: Content type + * + * Usually, this is `application/json`. + * + * __Applicable and required__ if `skip.validation` is `false` or not specified. + * + * @enum {string} + */ + "test.content.type": "application/atom+xml" | "application/x-www-form-urlencoded" | "application/json" | "application/json;charset=UTF-8" | "application/octet-stream" | "application/pdf" | "application/problem+json" | "application/problem+json;charset=UTF-8" | "application/problem+xml" | "application/rss+xml" | "application/stream+json" | "application/xhtml+xml" | "application/xml"; + /** @description __Additional Request Headers__: Add any additional request headers that must be sent as part of every request. Please input these headers as comma-separated values. E.g., `header1:value1,header2:value2`. */ + "request.headers"?: string; + /** + * @description __Ignore SSL certificate validation?__: Enable this option if you need to allow an insecure SSL server connection by bypassing the SSL certificate validation of the API endpoint. + * + * This is only needed if the API vendor's SSL Certificate has expired. + * + * Default value: `false` + */ + "ignore.ssl.cert.validation"?: boolean; + /** + * @description __Enable JWT Authentication?__: Sometimes, API vendors require JWT-based authorization as an added security layer in addition to the normal authentication methods. For example, vendors requiring __OAuth authentication__ might use the additional __JWT assertion__ part of the OAuth specification. + * + * Select this option and configure the associated properties only if the API vendor requires this authentication mechanism. + * + * Default value: `false` + */ + "jwt.enabled"?: boolean; + /** + * @description __JWT: Token URL__: Enter the token URL that must be called to initiate JWT authentication. + * + * __Required__ if `jwt.enabled` is `true` + */ + "jwt.auth.url"?: string; + /** + * @description __JWT: Token URL Request Body__: Enter the request body for the JWT token URL. This must be entered as a valid JSON object. + * + * You can use the `{token}` macro to dynamically substitute the JWT value. + * + * __Applicable__ if `jwt.enabled` is `true` + */ + "jwt.auth.body"?: string; + /** + * @description __JWT: Token URL Content type__: Enter the content type for the JWT Token URL. + * + * Usually, this is `application/x-www-form-urlencoded`. + * + * __Applicable__ if `jwt.enabled` is `true` + * + * @enum {string} + */ + "jwt.auth.content.type"?: "application/atom+xml" | "application/x-www-form-urlencoded" | "application/json" | "application/json;charset=UTF-8" | "application/octet-stream" | "application/pdf" | "application/problem+json" | "application/problem+json;charset=UTF-8" | "application/problem+xml" | "application/rss+xml" | "application/stream+json" | "application/xhtml+xml" | "application/xml"; + /** + * @description __JWT: Token Response Format__: Enter the response format of the JWT Token URL request. + * + * This is usually `json` or `xml`. + * + * __Applicable__ if `jwt.enabled` is `true` + * + * @enum {string} + */ + "jwt.auth.response.format"?: "xml" | "json" | "plain"; + /** + * @description __JWT: Path to Token__: Enter the path of the token property in the response. + * + * This should be a valid __JSON Path__ or __XPath__ input depending on the __JWT: Token Response Format__. + * + * __Applicable__ if `jwt.enabled` is `true` + */ + "jwt.auth.token.path"?: string; + /** + * @description __JWT: Path to Token Type__: Enter the path of the token-type property in the response. + * + * This should be a valid __JSON Path__ or __XPath__ input depending on the __JWT: Token Response Format__. + * + * __Applicable__ if `jwt.enabled` is `true` + */ + "jwt.auth.token.type.path"?: string; + /** + * @description __JWT: Token Include Mode__: Configure how the generated JWT token should be included in subsequent requests. + * + * __Applicable__ if `jwt.enabled` is `true` + * + * @enum {string} + */ + "jwt.token.include.mode"?: "HEADER" | "URL_PARAMETER"; + /** + * @description __JWT: Token URL parameter__: If the __Token Include Mode__ is __URL_PARAMETER__, enter the URL parameter name used to send the JWT token as a URL parameter. + * + * __Applicable__ if `jwt.enabled` is `true` and `jwt.token.include.mode` is `URL_PARAMETER` + */ + "jwt.token.url.parameter"?: string; + /** + * @description __JWT: Token Header Name__: If the __Token Include Mode__ is __Header__, enter the request header name used to send the JWT token as a header. + * + * __Applicable__ if `jwt.enabled` is `true` and `jwt.token.include.mode` is `HEADER` + */ + "jwt.token.header"?: string; + /** @description __JWT: Token Header Type Prefix__: If the __Token Include Mode__ is __Header__, enter the prefix (if applicable) that should be prepended to the header value. */ + "jwt.token.header.type"?: string; + /** + * @description __JWT: Token Expiration Duration(sec)__: Enter the duration in seconds over which the JWT token is valid. + * + * __Applicable__ if `jwt.enabled` is `true` + */ + "jwt.claim.expiration.sec"?: number; + /** + * @description __JWT: Token Secret__: Enter the secret key used to sign the JWT request and payload. + * + * This is not always required. + * + * __Applicable__ if `jwt.enabled` is `true` + */ + "jwt.token.secret"?: string; + /** + * @description __JWT: Signature Algorithm__: Select the algorithm used to generate the JWT token signature. + * + * This is usually `HS256`. + * + * __Applicable__ if `jwt.enabled` is `true` + * + * @enum {string} + */ + "jwt.token.algorithm"?: "NONE" | "HS256" | "HS384" | "HS512" | "RS256" | "RS384" | "RS512" | "ES256" | "ES384" | "ES512" | "PS256" | "PS384" | "PS512"; + /** + * @description __JWT: Scope Claim__: Enter the value of the scope claim used for token generation. + * + * __Applicable__ if `jwt.enabled` is `true` + */ + "jwt.claim.scope"?: string; + /** + * @description __JWT: Audience Claim__: Enter the value of the audience claim used for token generation. + * + * __Applicable__ if `jwt.enabled` is `true` + */ + "jwt.claim.audience"?: string; + /** + * @description __JWT: Issuer Claim__: Enter the value of the issuer claim used for token generation. + * + * __Applicable__ if `jwt.enabled` is `true` + */ + "jwt.claim.issuer"?: string; + /** + * @description __JWT: Extra Claims__: In addition to the standard claims of __audience__, __scope__ and __issuer__, you can also configure extra claims that must be verified. + * + * Enter any extra claims here. The input should be entered in the form of a valid JSON object. + * + * __Applicable__ if `jwt.enabled` is `true` + */ + "jwt.claim.extra.json"?: string; + /** + * @description __Enable HMAC Signature Based Authentication?__: Select this mode if the API vendor requires requests to be signed with a Signature generated using a custom HMAC algorithm. + * + * Provide details about the signature-generation algorithm in the associated input fields. + * + * Default value: `false` + */ + "hmac.enabled"?: boolean; + /** + * @description __HMAC: API Key__: Enter the API key that will be used to sign requests. This is provided by the API vendor. + * + * __Applicable__ if `hmac.enabled` is `true` + */ + "hmac.api.key"?: string; + /** + * @description __HMAC: API Secret__: API vendors often require an API secret to also be used for HMAC signature authentication. Check the vendor's API documentation for more information. + * + * __Applicable__ if `hmac.enabled` is `true` + */ + "hmac.api.secret"?: string; + /** + * @description __HMAC: Signature Generator Function__: Enter the signature generator function that will generate the appropriate signature for each request. + * + * We currently support the __Base64-Encoded Scala function__ for generating HMAC signatures. + * + * Please contact your Nexla Account Manager for assistance with this input. + * + * __Applicable__ if `hmac.enabled` is `true` + */ + "hmac.func"?: string; + /** + * @description __Sign Requests with Certificates?__: Select this mode if the API vendor requires requests to be signed with a shared certificate. + * + * Provide details about the certificate in the associated input fields. + * + * Default value: `false` + */ + "ui.cert_signed"?: boolean; + /** + * @description __Client Certificate in P12 format__: Enter the content of the P12-formatted certificate file that will be used to sign the API requests. + * + * The P12 file typically consists of the private key and certificate chain. You can open the file in any text editor and copy its content here. + * + * + * __Applicable and required__ if `ui.cert_signed` is `true` + */ + "client.p12"?: string; + /** + * @description __P12 PassPhrase__: Enter the passphrase associated with the certificate file. + * + * __Applicable__ if `ui.cert_signed` is `true` + */ + "client.p12.password"?: string; + }; + }); + s3_data_credential: { + credentials_type: "s3"; + } & Omit & ({ + credentials?: { + /** + * @description __Authentication Mechanism__ Credentials can be configured to allow S3 access through different AWS permissions mechanisms. Select the type of Authentication method you wish to use. + * * `Access Key` - Select this option if you wish to use AWS Access and Secret keys for your S3. + * * `ARN` - Select this option if you wish to use IAM ARN for authentication. + * * `Instance Role` - Select this option if you wish to access S3 using an IAM Instance Role. + * @enum {string} + */ + s3_auth_type: "Access Key" | "ARN" | "Instance Role"; + /** + * @description __AWS Access Key__ + * + * __Required__ if `s3_auth_type` is `Access Key` + */ + access_key?: string; + /** + * @description __AWS Secret Key__ + * + * __Required__ if `s3_auth_type` is `Access Key` + */ + secret_key?: string; + /** + * @description __External Id__: The external ID if S3 has been configured for federated access. + * + * __Required__ if `s3_auth_type` is `ARN` + */ + external_id?: string; + /** + * @description Region for AWS S3 Account. + * + * Default value: `us-east-1` + */ + region?: string; + /** @description __IAM ARN__: The IAM Amazon Resource Name (ARN) for which these permissions are applicable. This should be entered in the format `arn:partition:service:region:account:resource`. */ + arn?: string; + /** @description __S3 Path list access is limited to__: Set this property to `` or `/` if your AWS admin has restricted access to only a specific bucket or a path inside a bucket. */ + "test.path"?: string; + /** + * @description __Enable Client Side Encryption?__: The platform can be configured to encrypt/decrypt S3 objects that require client-side encryption using the AWS Key Management System (KMS). Set this option if KMS encryption is applicable. + * + * Default value: `false` + */ + has_client_encryption?: boolean; + /** + * @description __Client Side Encryption Mode__: Select the type of KMS encryption mode that is applicable for this credential. + * + * __Required__ if `has_client_encryption` is `true` + * + * @enum {string} + */ + encryption_mode?: "EncryptionOnly" | "AuthenticatedEncryption" | "StrictAuthenticatedEncryption"; + /** + * @description __Amazon KMS Key for Encryption__: The KMS Key used for encrypting/decrypting objects. Please ensure that this user has appropriate KMS permissions. + * + * __Required__ if `has_client_encryption` is `true` + */ + kms_key?: string; + /** + * @description __Enable Server Side Encryption?__: The platform can be configured to encrypt/decrypt S3 objects that require server-side encryption. + * + * This can be done using either __Amazon S3-managed encryption keys (SSE-S3)__ or the __AWS Key Management Service (SSE-KMS)__. Set this option if server-side encryption is applicable. + * + * Default value: `false` + */ + "sse.enabled"?: boolean; + /** + * @description __Key ARN for SSE with KMS__: The Key ARN if you want server-side encryption to be performed via the AWS Key Management System. You can leave this field blank if you want to use Amazon S3-managed encryption keys. + * + * __Required__ if `sse.enabled` is `true` + */ + "sse.kms_key.arn"?: string; + }; + }) & components["schemas"]["file_data_credential"]; + s3_iceberg_data_credential: { + credentials_type: "s3_iceberg"; + } & Omit & ({ + credentials?: { + /** + * @description __Authentication Mechanism__ + * Credentials can be configured to allow S3 access through different AWS permissions mechanisms. Select the type of Authentication method you wish to use. + * + * * `Access Key` - Select this option if you wish to use AWS Access and Secret keys for your S3. + * * `ARN` - Select this option if you wish to use IAM ARN for authentication. + * * `Instance Role` - Select this option if you wish to access S3 using an IAM Instance Role. + * + * @enum {string} + */ + s3_auth_type: "Access Key" | "ARN" | "Instance Role"; + /** + * @description __AWS Access Key__ + * __Required__ if `s3_auth_type` is `Access Key` + */ + access_key_id?: string; + /** + * @description __AWS Secret Key__ + * __Required__ if `s3_auth_type` is `Access Key` + */ + secret_key?: string; + /** @description __IAM ARN__: The IAM Amazon Resource Name (ARN) for which these permissions are applicable. This should be entered in the format `arn:partition:service:region:account:resource`. */ + arn?: string; + /** + * @description __External Id__: The external ID if S3 has been configured for federated access. + * __Required__ if `s3_auth_type` is `ARN` + */ + external_id?: string; + /** + * @description Region for AWS S3 Account. + * Default value: `us-east-1` + */ + region?: string; + /** @description __S3 Path list access is limited to__: Set this property to `` or `/` if your AWS admin has restricted access to only a specific bucket or a path inside a bucket. */ + "test.path"?: string; + }; + }); + sharepoint_data_credential: { + credentials_type: "sharepoint"; + } & Omit & ({ + credentials?: { + /** + * @description __Authentication Type__: Select the type of OAuth2 authentication mechanism you wish to use. + * + * * `3-legged`: Use 3-legged OAuth for authentication. + * * `2-legged`: Use 2-legged OAuth for authentication. + * + * @enum {string} + */ + "oauth2.token_exchange_type": "3-legged" | "2-legged"; + /** @description __Tenant ID__: The Tenant ID for your SharePoint account. */ + tenant_id: string; + /** + * @description __Access Token__: Click the button above to fetch a new token. + * + * __Required__ if `oauth2.token_exchange_type` is `3-legged` + */ + access_token?: string; + /** + * @description __API Vendor__: The API vendor for SharePoint. + * + * __Required__ if `oauth2.token_exchange_type` is `3-legged` + * + * @enum {string} + */ + "vendor.name"?: "sharepoint"; + /** + * @description __Vendor Token Type__: The token type for the vendor. + * + * __Required__ if `oauth2.token_exchange_type` is `3-legged` + */ + "vendor.token_type"?: string; + /** + * @description __Vendor Access Token__: The access token for the vendor. + * + * __Required__ if `oauth2.token_exchange_type` is `3-legged` + */ + "vendor.access_token"?: string; + /** + * @description __Vendor Refresh Token__: The refresh token for the vendor. + * + * __Required__ if `oauth2.token_exchange_type` is `3-legged` + */ + "vendor.refresh_token"?: string; + /** + * @description __Vendor Refresh Time UTC__: The last refresh time for the vendor token. + * + * __Required__ if `oauth2.token_exchange_type` is `3-legged` + */ + "vendor.last_refreshed_at"?: string; + /** + * @description __Vendor info contains expiration__: Indicates whether the vendor token contains expiration information. + * + * __Required__ if `oauth2.token_exchange_type` is `3-legged` + */ + "vendor.has_token_expiration_ts"?: boolean; + /** + * @description __Vendor Token Expiration Time__: The expiration time for the vendor token. + * + * __Required__ if `oauth2.token_exchange_type` is `3-legged` + */ + "vendor.token_expires_in"?: number; + /** + * @description __Other Vendor Info__: Additional information for the vendor. + * + * __Required__ if `oauth2.token_exchange_type` is `3-legged` + */ + "vendor.other_info"?: string; + /** + * @description __Client ID__: The Client ID of your OAuth 2.0 client. + * + * __Required__ if `oauth2.token_exchange_type` is `2-legged` + */ + "oauth2.client.id"?: string; + /** + * @description __Client Secret__: The Client Secret of your OAuth 2.0 client. + * + * __Required__ if `oauth2.token_exchange_type` is `2-legged` + */ + "oauth2.client.secret"?: string; + /** + * @description __Authentication Type__: The type of authentication used. + * + * Default Value: `OAUTH2` + * + * @enum {string} + */ + "auth.type"?: "OAUTH2"; + /** + * @description __Access Token URL method__: The request method used for the OAuth 2.0 Token URL. + * + * Default Value: `POST` + * + * @enum {string} + */ + "oauth2.access.token.method"?: "POST"; + /** + * @description __Client Authentication Scheme__: The authentication scheme used for the OAuth 2.0 client. + * + * Default Value: `form` + * + * @enum {string} + */ + "oauth2.client.auth.scheme"?: "form"; + /** + * @description __Token Type__: The token type that should be used when attaching the OAuth2.0 token to the request. + * + * Default Value: `Bearer` + * + * @enum {string} + */ + "oauth2.token.type.override"?: "Bearer"; + /** + * @description __Skip Validation__: Indicates whether to skip validation. + * + * Default Value: `false` + */ + "skip.validation"?: boolean; + /** + * @description __Test Content Type__: The content type used for testing. + * + * Default Value: `application/json` + */ + "test.content.type"?: string; + /** + * @description __Test Method__: The request method used for testing. + * + * Default Value: `GET` + */ + "test.method"?: string; + /** + * @description __Test URL__: The URL used for testing. + * + * Recommended Value: `https://graph.microsoft.com/v1.0/sites` + */ + "test.url"?: string; + /** + * @description __API Scopes__: The scopes that should be added to the OAuth token calls. + * + * Recommended Value: `https://graph.microsoft.com/.default` + */ + "vendor.scopes"?: string; + }; + }) & components["schemas"]["file_data_credential"]; + snowflake_data_credential: { + credentials_type: "snowflake"; + } & Omit & ({ + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is usually in the format `.snowflakecomputing.com`. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** + * @description __Snowflake Authentication Type__: Credentials can be configured to allow Snowflake access through different Snowflake permissions mechanisms. Select the type of authentication method you wish to use. + * + * * `basic`: __Basic Authentication__: Select this option if you wish to use the Username and Password for Snowflake access. + * + * * `key_pair`: __Key Pair Authentication__: Select this option if you wish to use the Key Pair authentication mechanism to connect to your Snowflake account. + * + * @enum {string} + */ + "snowflake.auth.type": "basic" | "key_pair"; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** + * @description __Password__: Enter the password used to access your database. + * + * __Applicable and required__ if `snowflake.auth.type` is `basic` + */ + password?: string; + /** + * @description __Private Key__: Enter the file content of the private key file as a `base64` encoded string. To retrieve the file content, open the PEM file in any text editor, and copy over its content as the payload. Then `base64`-encode that content before adding it to the payload. + * + * __Applicable and required__ if `snowflake.auth.type` is `key_pair` + */ + "snowflake.privateKey"?: string; + /** + * @description __Passphrase for Private Key__: If the private key file was generated with a passphrase, please enter the passphrase here. + * + * __Applicable__ if `snowflake.auth.type` is `key_pair` + */ + "snowflake.privateKey.passphrase"?: string; + /** @description __Warehouse__: The Snowflake warehouse to which you wish to connect. */ + warehouse_name: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name: string; + /** @description __Schema Name__: Enter the schema name for the database to which you wish to connect. */ + schema_name: string; + /** + * @description __Access Control Roles__: Snowflake uses roles to control access to objects in the system. Roles are granted access privileges for objects in the system. + * + * Usually, you can leave this field blank. However, if your Snowflake configuration requires a special, non-default role to be applied to access the objects in which you are interested, you can set the Role here. + */ + "database.field.role"?: string; + /** @description __Query Tag__: Snowflake query tags allow users to associate arbitrary metadata with each query. This can be used to identify the query in the query history for enhanced observability. */ + "database.field.QUERY_TAG": string; + /** @description __Additional Connection Parameters__: Enter any additional connection parameters you need to pass to the warehouse while establishing the connection. The format should be `key1:value1,key2:value2` */ + "jdbc.parameters"?: string; + }; + }) & components["schemas"]["database_data_credential"]; + snowflake_dcr_data_credential: { + credentials_type: "snowflake_dcr"; + } & Omit & ({ + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is usually in the format `.snowflakecomputing.com`. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** + * @description __Snowflake Authentication Type__: Credentials can be configured to allow Snowflake access through different Snowflake permissions mechanisms. Select the type of authentication method you wish to use. + * + * * `basic`: __Basic Authentication__: Select this option if you wish to use the Username and Password for Snowflake access. + * + * * `key_pair`: __Key Pair Authentication__: Select this option if you wish to use the Key Pair authentication mechanism to connect to your Snowflake account. + * + * @enum {string} + */ + "snowflake.auth.type": "basic" | "key_pair"; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** + * @description __Password__: Enter the password used to access your database. + * + * __Applicable and required__ if `snowflake.auth.type` is `basic` + */ + password?: string; + /** + * @description __Private Key__: Enter the file content of the private key file as a `base64` encoded string. To retrieve the file content, open the PEM file in any text editor, and copy over its content as the payload. Then `base64`-encode that content before adding it to the payload. + * + * __Applicable and required__ if `snowflake.auth.type` is `key_pair` + */ + "snowflake.privateKey"?: string; + /** + * @description __Passphrase for Private Key__: If the private key file was generated with a passphrase, please enter the passphrase here. + * + * __Applicable__ if `snowflake.auth.type` is `key_pair` + */ + "snowflake.privateKey.passphrase"?: string; + /** @description __Warehouse__: The Snowflake warehouse to which you wish to connect. */ + warehouse_name: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name: string; + /** @description __Schema Name__: Enter the schema name for the database to which you wish to connect. */ + schema_name: string; + /** + * @description __Access Control Role__: Snowflake uses roles to control access to objects in the system. Roles are granted access privileges for objects in the system. + * + * Usually, you can leave this field blank. However, if your Snowflake configuration requires a special, non-default role to be applied to access the objects in which you are interested, you can set the Role here. + */ + "database.field.role"?: string; + }; + }) & components["schemas"]["database_data_credential"]; + soap_data_credential: { + credentials_type: "soap"; + } & Omit & ({ + credentials?: { + /** + * @description __Authentication Mechanism__: Credentials can be configured to allow API access through different authentication mechanisms supported by the API. + * + * @enum {string} + */ + "auth.type": "NONE" | "BASIC"; + /** + * @description __Username__: Enter the username field for basic authentication. Nexla will automatically combine this with the password and Base64-encode the pair. + * + * __Required__ if `auth.type` is `BASIC` + */ + "basic.username"?: string; + /** + * @description __Password__: Enter the password field for basic authentication. Nexla will automatically combine this with the username and Base64-encode the pair. + * + * __Required__ if `auth.type` is `BASIC` + */ + "basic.password"?: string; + }; + }); + sqlserver_data_credential: { + credentials_type: "sqlserver"; + } & Omit & ({ + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is usually an IP address or text in the format __company.domain.com__. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name?: string; + /** @description __Schema Name__: Enter the schema name for the database to which you wish to connect. */ + schema_name?: string; + /** + * @description __Connection Mode / Application Intent__: You can restrict database connectivity capabilities by specifying the connection mode. Select __Read Only__ to connect to the database in __readonly__ mode. + * + * * `readonly`: Connect in read-only mode. + * + * * `readwrite`: Connect in read-write mode. + * + * Default value: `readwrite` + * + * @enum {string} + */ + "database.field.applicationIntent"?: "readonly" | "readwrite"; + }; + }) & components["schemas"]["database_data_credential"]; + sybase_data_credential: { + credentials_type: "sybase"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is in the format `company.domain.com`. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name: string; + /** + * @description __Schema Name__: Enter the schema name for the tables you are interested in. + * + * Leave this input empty `` if you wish to access tables that do not belong to any schema. + */ + schema_name?: string; + }; + } & components["schemas"]["database_data_credential"]; + teradata_data_credential: { + credentials_type: "teradata"; + } & Omit & { + credentials?: { + /** @description __Host__: Enter the hostname for your database. This is usually an IP address or text in the format __company.domain.com__. Do not include the connection protocol. */ + host: string; + /** @description __Port__: Enter the port used to access your database. */ + port: number; + /** @description __Username__: Enter the username used to access your database. */ + username: string; + /** @description __Password__: Enter the password used to access your database. */ + password: string; + /** @description __Database Name__: Enter the database name if you want to connect to a specific database. */ + database_name: string; + }; + } & components["schemas"]["database_data_credential"]; + tibco_data_credential: { + credentials_type: "tibco"; + } & Omit & ({ + credentials?: { + /** + * @description __JMS Vendor__: Select the type of vendor for this JMS connector. + * + * @enum {string} + */ + "jms.vendor": "tibco" | "activemq"; + /** @description __Server URL__: Enter the address of the JMS Server. This is usually a Virtual IP address. */ + url: string; + /** @description __Username__: Enter the username used to access this server. */ + username: string; + /** @description __Password__: Enter the password used to access this server. */ + password: string; + }; + }) & components["schemas"]["stream_data_credential"]; + webdav_data_credential: { + credentials_type: "webdav"; + } & Omit & { + credentials?: { + /** @description __WebDAV Server URL__: Enter the server URL for accessing the WebDAV server. This should be a fully formatted URL that includes the connection protocol. */ + url: string; + /** + * @description __Anonymous access?__: Set this to `true` if the connection should be made as an anonymous user. + * + * Default value: `false` + */ + anonymous?: boolean; + /** + * @description __Username__: Enter the username used to access your database. + * + * __Applicable and required__ if `anonymous` is `false` + */ + username?: string; + /** + * @description __Password__: Enter the password used to access your database. + * + * __Applicable and required__ if `anonymous` is `false` + */ + password?: string; + }; + } & components["schemas"]["file_data_credential"]; + /** @description This object represents the response of an asynchronous operation. The response can be a dictionary or an array of dictionaries. The format of the response depends on the operation that was executed. */ + AsyncResponse: { + /** @description The unique ID of the request that was executed. This ID can be used to track the status of the request. */ + request_id?: number; + /** @enum {string} */ + status?: "pending" | "running" | "completed" | "failed" | "cancelled"; + /** + * Format: date-time + * @description The date and time when the request was started. + */ + request_started_at?: string; + /** @description The progress of the request (value from 0 to 100). This value is a percentage of the total progress of the request. Applicable only to certain types of async operations. */ + progress?: number; + /** @description The result of the async operation. Format depends on the task type. */ + result?: { + [key: string]: unknown; + }; + /** + * Format: date-time + * @description The date and time when the request was stopped. This field is only present if the request has been stopped (due to cancellation or error). + */ + request_stopped_at?: string; + /** + * Format: date-time + * @description The date and time when the request was completed. This field is only present if the request has been completed successfully. + */ + request_completed_at?: string; + }; + probe_response_with_async_results: OneOf<[components["schemas"]["AsyncResponse"], { + /** @description The output of the probe operation. */ + output?: { + [key: string]: unknown; + }; + }]>; + probe_tree_common: { + /** + * @description Response status code from the third-party storage system to which this credential connects. `Ok` indicates that the request resulted in a valid response from the storage system. + * + * @enum {string} + */ + status?: "ok"; + /** + * @description Message string indicating response validity from the underlying storage system. `Ok` indicates that the request resulted in a valid response from the storage system. + * Errors are returned as relevant message strings indicating the failure reasons. + * + * @enum {string} + */ + message?: "Ok"; + /** @description __Connector Type__: Connector codename. */ + connection_type?: string; + }; + file_probe_tree: { + connection_type: "azure_blb"; + } & Omit & { + /** + * @description This object reports the folder-file structure in a nested JSON format for visualizing the file storage hierarchy tree. + * + * The unique name/identifier of any element in the storage system is set as the key of the relevant node. Each node entry contains the following: + * 1. a `type` property that defines whether the element is a `folder`, `file`, or `meta` (Nexla probe response meta-data). + * 2. the top-level node has a special `meta` key that contains response metadata (of the type `meta`) and a key that indicates the name/unique ID of the root folder. + * - for storage systems like `s3` that have a bucket concept, the top-level keys are all bucket names to which this credential has access. + * - for storage systems like `gdrive` that have a unique ID and synthetic display names, the keys represent unique IDs instead of folder names. + * - for storage systems like `ftp`, `dropbox`, and `box` that do not have buckets, the top-level entry is `/` to signify the credential root folder. + * 2. a `value` property that defines the children of that node. + * - For a folder, the `value` is an object, with each subfolder and file as an entry. + * - For a file, the `value` is an empty object `{}` because files do not have any children. + * 3. `file`-type nodes have `created_at`, `updated_at`, and `size` properties that indicate the corresponding values for that file. + * 4. For Google Drive-type systems, wherein the storage assigns a unique identifier instead of constraining the display name to be unique, the `value` object contains a `display_name` property to indicate the name of that entry. + * + * __Example__ Sample value of `output` in the probe tree response: + * ``` + * { + * "/": { + * "type": "folder", + * "value": { + * "subfolder_1": { + * "type": "folder", + * "value": { + * "subfolder_1a": { + * "type": "folder" + * "value": {} + * } + * }, + * "subfolder_2": { + * "type": "folder", + * "value": { + * "subfolder_2a": { + * "type": "folder" + * "value": {} + * }, + * "file_1": { + * "type": "file", + * "created_at": 1664998398000, + * "updated_at": 1664998398000, + * "size": 152, + * "value": {} + * } + * }, + * } + * } + * ``` + */ + object?: { + /** @description Name or unique identifier of the folder/subfolder. This value might be `/` at the root of the hierarchy for some storage types, like `dropbox` and `ftp`. */ + folder_name?: { + /** + * @description The type `folder` indicates that this property is the name of a folder. + * + * @enum {string} + */ + type?: "folder"; + /** @description This value indicates the display name of this folder and is only present for storage systems like `gdrive`, in which display names are not unique. */ + display_name?: string; + /** @description List of subfolders and files in this folder. */ + value?: { + /** @description Name of the file. */ + file_name?: { + /** @enum {string} */ + type?: "file"; + /** @description File creation time in epoch time (milliseconds). */ + created_at?: number; + /** @description File modification time (most recent) in epoch time (milliseconds). */ + updated_at?: number; + /** @description File size in bytes. */ + size?: number; + /** @description This value indicates the display name of the file and is only present for storage systems like `gdrive`, in which display names are not unique. */ + display_name?: string; + }; + }; + }; + /** @description A special metadata object that indicates the relevant response Nexla metadata for probe responses. This is only present at the root level. */ + meta?: { + /** @enum {string} */ + type?: "meta"; + value?: Record; + }; + }; + }; + database_probe_tree: { + connection_type: "as400"; + } & Omit & ({ + /** + * @description This object reports the database-table-column structure in a nested JSON format for visualizing the database hierarchy tree. + * + * The unique name/identifier of any element in the storage system is set as the key of the relevant node. Each node entry contains the following: + * 1. a `type` property that defines whether the element is a `database`, `table`, or `column`. + * 2. a `value` property that defines the children of that node. + * - For a database, the `value` is an object, with each table included as a property of the object. + * - For a table, the `value` is an array of objects, with each object matching a column of the database. + * - Column objects do not have any `value` property because columns do not have any children. + * 3. `column`-type nodes have `name`, `primaryKey` and `defaultValue` properties that define the column properties. + * + * __Example__ Sample value of `output` in the probe tree response. + * ``` + * { + * "DEMO_DB": { + * "type": "database", + * "value": { + * "DEMO_TABLE": { + * "type": "table", + * "value": [ + * { + * "name": "CREATIONDATE", + * "primaryKey": false, + * "type": "VARCHAR", + * "defaultValue": null + * }, + * { + * "name": "PROJECTNAME", + * "primaryKey": false, + * "type": "VARCHAR", + * "defaultValue": null + * }, + * { + * "name": "PROJECTYEAR", + * "primaryKey": false, + * "type": "VARCHAR", + * "defaultValue": null + * } + * ] + * } + * } + * ``` + */ + object?: { + /** @description Name of database. */ + db_name_1?: { + /** + * @description The type `database` indicates that this property is the name of a database. + * + * @enum {string} + */ + type?: "database"; + /** @description List of tables in the database. */ + value?: { + /** @description Name of the table. */ + table_name_1?: { + /** @enum {string} */ + type?: "table"; + value?: ({ + /** @enum {string} */ + type?: "column"; + /** @description Name of the column. */ + name?: string; + /** @description Whether or not this column is a primary key for the table. */ + primaryKey?: boolean; + /** @description Whether or not the column has a default value. */ + defaultValue?: null | string | number; + })[]; + }; + }; + }; + }; + }); + nosql_probe_tree: { + connection_type: "dynamodb"; + } & Omit & { + /** + * @description This object reports the database-collection structure in nested JSON format for visualizing the database hierarchy tree. + * + * The unique name/identifier of any element in the storage is set as the key of the relevant node. Each node entry contains the following: + * 1. a `type` property that defines whether the element is a `database` or `collection`. + * 2. a `value` property that defines the children of that node. + * - For a database, the `value` is an object, with each collection as a property of the object. + * - For a collection, this is an empty object (`{}`), as the platform does not list documents inside the collection as part of this response. Use the `probe/sample` endpoint to obtain sample documents from the collection. + * + * __Example__ Sample value of `output` in the probe tree response: + * ``` + * { + * "DEMO_DB": { + * "type": "database", + * "value": { + * "demo collection_1": { + * "type": "collection", + * "value": {} + * }, + * "demo collection_2": { + * "type": "collection", + * "value": {} + * } + * + * } + * ``` + */ + object?: { + /** @description Name of the database. */ + db_name_1?: { + /** + * @description The type `database` indicates that this property is the name of a database, + * + * @enum {string} + */ + type?: "database"; + /** @description List of collections in the database. */ + value?: { + /** @description Name of the collection. */ + collection_name_1?: { + /** @enum {string} */ + type?: "collection"; + /** @enum {object} */ + value?: unknown; + }; + }; + }; + }; + }; + probe_tree_with_async: { + connection_type: "probe_tree_with_async"; + } & (Omit | components["schemas"]["AsyncResponse"]); + /** @enum {string} */ + ConnectorTypeFile: "azure_blb" | "azure_data_lake" | "box" | "delta_lake_azure_blb" | "delta_lake_azure_data_lake" | "delta_lake_s3" | "dropbox" | "ftp" | "gcs" | "gdrive" | "min_io_s3" | "s3" | "s3_iceberg" | "sharepoint" | "webdav"; + /** @enum {string} */ + ConnectorTypeDatabase: "as400" | "aws_athena" | "azure_synapse" | "bigquery" | "cloudsql_mysql" | "cloudsql_postgres" | "cloudsql_sqlserver" | "databricks" | "db2" | "firebolt" | "gcp_alloydb" | "gcp_spanner" | "hana_jdbc" | "hive" | "mysql" | "netsuite_jdbc" | "oracle" | "oracle_autonomous" | "postgres" | "redshift" | "snowflake" | "snowflake_dcr" | "sqlserver" | "sybase" | "teradata"; + /** @enum {string} */ + ConnectorTypeNoSql: "dynamodb" | "firebase" | "mongo"; + /** @enum {string} */ + ConnectorTypeKafka: "confluent_kafka" | "google_pubsub" | "jms" | "kafka" | "tibco"; + /** @enum {string} */ + ConnectorTypeVectorDB: "pinecone"; + probe_sample: { + /** + * @description Response status code from the third-party storage system to which this credential connects. `Ok` indicates that the request resulted in a valid response from the storage system. + * + * @enum {string} + */ + status?: "ok"; + /** + * @description Message string indicating response validity from the underlying storage system. `Ok` indicates that the request resulted in a valid response from the storage system. + * Errors are returned as relevant message strings indicating the failure reasons. + * + * @enum {string} + */ + message?: "Ok"; + connection_type?: components["schemas"]["ConnectorTypeFile"] & components["schemas"]["ConnectorTypeDatabase"] & components["schemas"]["ConnectorTypeNoSql"] & components["schemas"]["ConnectorTypeKafka"] & components["schemas"]["ConnectorTypeVectorDB"] & "rest" & "soap"; + output?: OneOf<[{ + /** @description File content type. Usually, this is `application/json`, `text/plain`, or `application/binary`. */ + contentType?: string; + /** @description Storage system status code for the sample request. Usually, this is 200 for a successful sample attempt and other status codes for unsuccessful sample attempts. */ + statusCode?: number; + /** @description Sample lines from the file. */ + response?: string; + }, { + /** @description Response content type. Usually, this is `application/json`. */ + contentType?: string; + /** @description Storage system status code for the sample request. Usually, this is 200 for a successful sample attempt and other status codes for unsuccessful sample attempts. */ + statusCode?: number; + response?: { + /** @description An array of column names in the sample response. */ + columns?: string[]; + /** @description An array of data rows in the sample response. */ + data?: ({ + [key: string]: number | string | Record; + })[]; + }; + }, { + /** + * @description Response content type. Usually, this is `application/json`. + * + * @enum {string} + */ + contentType?: "application/json"; + /** @description Storage system status code for the sample request. Usually, this is 200 for a successful sample attempt and other status codes for unsuccessful sample attempts. */ + statusCode?: number; + /** @description JSON string in which each key is a document name and the value of each key is the content of the corresponding document. */ + response?: string; + }, { + /** @description Connector response content type. Usually, this is `application/json`, `text/plain`, or `application/binary`. */ + contentType?: string; + /** @description Storage system status code for the sample request. Usually, this is 200 for a successful sample attempt and other status codes for unsuccessful sample attempts. */ + statusCode?: number; + /** @description API response from the 3rd party (connector) API. */ + response?: string; + }]>; + }; + probe_sample_with_async: components["schemas"]["AsyncResponse"] | components["schemas"]["probe_sample"]; + FlowChildNode: { + id?: number; + /** @description The flow id of the flow node this node is a direct descendant of. */ + parent_node_id?: number; + /** @description The flow id of the flow node at the root of this flow chain. */ + origin_node_id?: number; + /** + * Format: nullable + * @description The ID of the data source this flow node is linked to if this is a flow node for a data source. + */ + data_source_id?: number; + /** + * Format: nullable + * @description The ID of the Nexset this flow node is linked to if this is flow node for a Nexset. + */ + data_set_id?: number; + /** + * Format: nullable + * @description The ID of the data sink this flow node is linked to if this is a flow node for a data sink. + */ + data_sink_id?: number; + /** @description Each element is a flow node that is directly linked to this node. */ + children?: unknown[]; + }; + FlowOriginNode: { + id?: number; + /** + * Format: nullable + * @description Flow id of the parent flow node if this node is not the root node in the flow chain. This will usually be `null` as most flow definitions will originate in the node for `data_source`. + */ + parent_node_id?: number; + /** @description Flow id of the root node in the flow chain. */ + origin_node_id?: number; + /** + * Format: nullable + * @description The ID of the data source this flow node is linked to if this is a flow node for a data source. + */ + data_source_id?: number; + /** + * Format: nullable + * @description The ID of the Nexset this flow node is linked to if this is flow node for a Nexset. + */ + data_set_id?: number; + /** + * Format: nullable + * @description The ID of the data sink this flow node is linked to if this is a flow node for a data sink. + */ + data_sink_id?: number; + /** Format: nullable */ + shared_origin_node_id?: number; + status?: string; + /** Format: nullable */ + project_id?: number; + flow_type?: string; + ingestion_mode?: string; + name?: string; + description?: string; + /** @description Each element of this array is a flow node that is directly linked to this flow node. */ + children?: components["schemas"]["FlowChildNode"][]; + }; + FlowNodes: components["schemas"]["FlowOriginNode"][]; + FlowElements: { + /** @description All code containers that are linked to flow nodes in this response. */ + code_containers?: { + id?: number; + owner_id?: number; + org_id?: number; + name?: string; + description?: null; + data_credentials_id?: null; + public?: boolean; + managed?: boolean; + reusable?: boolean; + resource_type?: string; + output_type?: string; + code_type?: string; + code_encoding?: string; + access_roles?: unknown[]; + tags?: unknown[]; + copied_from_id?: number; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + }[]; + /** @description All data sources that are linked to flow nodes in this response. */ + data_sources?: ({ + id?: number; + owner_id?: number; + org_id?: number; + flow_node_id?: number; + origin_node_id?: number; + name?: string; + description?: null; + status?: string; + data_credentials_id?: number | null; + data_sink_id?: number | null; + auto_generated?: boolean; + managed?: boolean; + source_type?: string; + connector_type?: string; + connection_type?: string; + template_config?: Record; + vendor?: null; + access_roles?: unknown[]; + tags?: unknown[]; + copied_from_id?: number | null; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + })[]; + /** @description All Nexsets that are linked to flow nodes in this response. */ + data_sets?: ({ + id?: number; + owner_id?: number; + org_id?: number; + flow_node_id?: number; + origin_node_id?: number; + name?: string; + description?: string; + status?: string; + data_source_id?: number | null; + parent_data_set_id?: number | null; + code_container_id?: number | null; + data_sink_ids?: unknown[]; + public?: boolean; + managed?: boolean; + access_roles?: unknown[]; + tags?: unknown[]; + copied_from_id?: number | null; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + })[]; + /** @description All data sinks that are linked to flow nodes in this response. */ + data_sinks?: ({ + id?: number; + owner_id?: number; + org_id?: number; + flow_node_id?: number; + origin_node_id?: number; + name?: string; + description?: null; + status?: string; + data_credentials_id?: number; + data_set_id?: number; + data_source_id?: number | null; + managed?: boolean; + sink_type?: string; + connector_type?: string; + connection_type?: string; + template_config?: Record; + vendor?: null; + access_roles?: unknown[]; + tags?: unknown[]; + copied_from_id?: null; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + })[]; + /** @description All credentials that are referenced by flow nodes in this response. */ + data_credentials?: { + id?: number; + owner_id?: number; + org_id?: number; + name?: string; + description?: null; + credentials_type?: string; + verified_status?: string; + managed?: boolean; + template_config?: Record; + vendor?: null; + access_roles?: unknown[]; + tags?: unknown[]; + copied_from_id?: null; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + }[]; + /** @description Metadata about the parent Nexset any relevant origin flow node is a descendant of. This is only relevant for flow nodes that originate in a shared Nexset instead of a data source. */ + shared_data_sets?: unknown[]; + orgs?: components["schemas"]["org"][]; + users?: components["schemas"]["owner"][]; + /** @description All projects that any of the flows in this flow response are linked to. */ + projects?: { + id?: number; + owner_id?: number; + org_id?: number; + name?: string; + description?: string; + access_roles?: unknown[]; + }[]; + }; + flow_one_with_async: OneOf<[{ + flows?: components["schemas"]["FlowOriginNode"][]; + } & components["schemas"]["FlowElements"], components["schemas"]["AsyncResponse"]]>; + data_source: { + name?: string; + description?: string; + /** + * @description __Credential ID__: Nexla data credential that contains all authentication information for this source. + * + * Note that this is not applicable for `file_upload`, `nexla_rest` and `email` sources but is required for all other connectors. + */ + data_credentials_id?: number; + /** @description Field only used for ELT Vendor Endpoints */ + stream_config?: Record; + /** @description __Connector Type__: Connector codename. */ + source_type?: string; + /** @description ID of code container to attach to this source. */ + code_container_id?: number; + /** + * @description Code Container details that will be created and attached to this source. Note: you can't pass both `code_container` and `code_container_id` together, + * code_container_id will take priority. + */ + code_container?: { + code?: string | Record | unknown[]; + name?: string; + description?: string; + /** @enum {string} */ + resource_type?: "source" | "source_custom"; + /** @enum {string} */ + code_type?: "jolt_standard" | "jolt_custom" | "python" | "python3" | "javascript" | "flink_sql" | "spark_sql"; + /** @enum {string} */ + code_encoding?: "none" | "base64"; + code_config?: Record; + custom_config?: Record; + /** @enum {string} */ + repo_type?: "embedded" | "github"; + repo_config?: Record; + }; + }; + WithoutCDCSupport: { + source_config?: { + /** + * @description __Scheduling Frequency__: The interval at which Nexla should scan this source for new data. This must be in the form of a cron expression. + * + * @example 0 0 22 10 11 ? 2022 + */ + "start.cron": string; + /** + * @description __Database Fetch Mode__: Database connectors are designed to support self-serve capabilities for use cases ranging from simple table ingestion to ingestion based on complex custom queries. + * + * * `Default`: Equivalent to running a simple (but optimized) SELECT clause on any database table, along with some additional customizations for filtering rows. + * + * * `Query`: Execute data fetching based on a custom database query using the syntax and convention supported by the underlying database/warehouse. + * + * @enum {string} + */ + db_query_mode: "Default" | "Query"; + /** + * @description __Query__: Set the Query to be executed during each ingestion cycle. + * + * __Required__ if `db_query_mode` is `Query` + */ + query?: string; + /** + * @description __Database__: Database whose table needs to be scanned for data. + * + * __Required__ if `db_query_mode` is `Default` + */ + database?: string; + /** + * @description __Table__: Table that needs to be scanned for data. + * + * __Required__ if `db_query_mode` is `Default` + */ + table?: string; + /** + * @description __Table Scan Mode__: The default configuration of Table Mode is set to read all data in a table in each ingestion cycle, which is equivalent to a running `SELECT` clause on the table. However, when the table contains much more historical data than you wish to scan, you can instruct the platform to begin loading from a specific __id__ (stored in a numeric column), __timestamp__ (stored in a date-time column) or __both id and timestamp__. + * + * You can use the relevant Table Scan Mode to address this use case of partial loading from a table. + * + * Note that this can also be achieved by writing a properly structured query in Query Mode. + * + * Only applicable if `db_query_mode` is `Default` + * Default Value: `none` + * + * * `none`: Read the whole table + * + * * `incrementing`: Start reading from a specific ID + * + * * `timestamp`: Start reading from a specific timestamp + * + * * `incrementing,timestamp`: Start reading from a specific ID and timestamp + * + * @enum {string} + */ + mode?: "none" | "incrementing" | "timestamp" | "incrementing,timestamp"; + /** + * @description __ID Column__: The ID column that should be used for executing partial data loading from the selected table. This must be a numeric column. + * + * Only applicable if `db_query_mode` is `Default` and `mode` is `incrementing` or `incrementing,timestamp` + */ + "incrementing.column.name"?: string; + /** + * @description __Starting ID__: The starting ID value of __ID Column__ from which the platform should start ingesting data. + * + * Only applicable if `db_query_mode` is `Default` and `mode` is `incrementing` or `incrementing,timestamp` + */ + "incrementing.load.from"?: string; + /** + * @description __Timestamp Column__: The timestamp column that should be used for executing partial data loading from the selected table. This must be a date-time column. + * + * Only applicable if `db_query_mode` is `Default` and `mode` is `timestamp` or `incrementing,timestamp` + */ + "timestamp.column.name"?: string; + /** + * @description __Starting Timestamp__: The timestamp value of the __Timestamp Column__ from which the platform should start ingesting data. This must be a UNIX epoch- (milliseconds) or ISO-formatted date value (e.g., 2016-01-01T12:13:14). + * + * Only applicable if `db_query_mode` is `Default` and `mode` is `timestamp` or `incrementing,timestamp` + */ + "timestamp.load.from"?: number | string; + /** + * Format: boolean + * @description __Perform Database Commit after Read?__: You can instruct the platform to execute a database commit if the query includes statements that should also be committed to the database after ingestion. This is typically not the case, so most of the time, the value should be left as false. + * + * Only applicable if `db_query_mode` is `Query` + * + * Default Value: `"false"` + * + * @enum {string} + */ + "commit.on.read"?: "false" | "true"; + }; + }; + as400_data_source: { + source_type: "as400"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + aws_athena_data_source: { + source_type: "aws_athena"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + file_data_source: { + source_config?: { + /** + * @description __Scheduling Frequency__: The interval at which Nexla should scan this source for new files. This must be in the form of a cron expression. + * + * @example 0 0 22 10 11 ? 2022 + */ + "start.cron": string; + /** @description __Path to Scan__: The path of the folder to be scanned for new data. */ + path: string; + /** + * @description __Choose a pre-built File Processor__: Select a file processor that best matches the format of your files. The platform offers several built-in options to handle various file types and formats. You can also combine these with custom file parsing code for more advanced use cases. + * + * The platform's default configurations of automatically detecting Nexsets from file content are primarily driven by parsing appropriately based on file extensions. + * + * + * However, you can override the default parsing and further customize how the files are processed by selecting appropriate File Content Format choices. + * + * Some common scenarios are - + * + * 1. Files without extensions to indicate what type of parser should be applied + * + * 2. Compressed Zip or Tar files. + * Select Custom Text Format, and the platform will automatically un-compress and parse the un-compressed files based on those settings. + * 3. Text files with fixed-width or custom column delimiters + * 4. CSV files without a header row. The platform will automatically assign attribute names as __attribute1__, __attribute2__, etc. + * 5. Structured files in which some lines have to be skipped before processing rows + * 6. Files in which the name extension does not match the desired parser (e.g., JSON content in a .dat file) + * 7. Files for which some customization has to be applied to designate how the default extension parser treats data. + * + * * `Auto Detect`: (Automatic) Automatically detect how to parse each file. Select this option for most use cases, even if the source contains files in more than one format. The platform will automatically detect the file format ( AVRO, CSV, EDI, Excel, JSON, ORC, Parquet, TSV, XML, etc.) based on the file extensions. + * * `Custom Text Format`: (Custom Text Parser) Force all files to be parsed as custom text files. + * * `XML`: (XML Parser) Force all files to be parsed as XML-formatted files. + * * `JSON`: (JSON Parser) Force all files to be parsed as JSON-formatted files. + * * `EDI`: (EDI Parser) Force all files to be parsed as EDI-formatted files. + * * `Log File`: (Log File Parser) Force all files to be parsed as Log File-formatted files. + * * `ORC`: (ORC Parser) Force all files to be parsed as ORC-formatted files. + * * `AVRO`: (AVRO Parser) Force all files to be parsed as AVRO files. + * * `Parquet`: (Parquet Parser) Force all files to be parsed as Parquet files. + * * `Excel`: (Excel Parser) Force all files to be parsed as Excel files. + * * `Fixed Width`: (Fixed Width File Parser) Force all files to be parsed as fixed-width files. + * * `PDF`: (PDF Parser) Force all files to be parsed as PDF files. + * * `Unstructured`: (Unstructured.io Parser) This file parsing options leverages Unstructured.io's AI to parse unstructured data. Choose this option if you want customizations to how the files are parsed and Automatic parsing is not sufficient. + * * `SWIFT`: (SWIFT File Parser) This file parser is useful when all the files are SWIFT file. Choose this option if you want customizations to how the files are parsed and Automatic parsing is not sufficient. + * * `Raw File Data`: (Override with Custom File Processor) Use this option if you want to bypass Nexla's native parsers and only want to use custom file processors for parsing file content. + * + * @enum {string} + */ + advanced_settings?: "Auto Detect" | "Custom Text Format" | "XML" | "JSON" | "EDI" | "Log File" | "ORC" | "AVRO" | "Parquet" | "Excel" | "Fixed Width" | "PDF" | "Unstructured" | "SWIFT" | "Raw File Data"; + /** + * @description __Force Files to detect to:__ Configure this setting to make the platform ignore file extensions and instead parse files as if they had a different extension. + * + * For example, if the files have a `.dat` extension but contain JSON content, set this to `*:json`, and the platform will process the files as if they were JSON files. + * + * __Required__ if `advanced_settings` is anything other than `Auto Detect` + * + * * `*:txt` if `advanced_settings` is `XML` + * * `*:xml` if `advanced_settings` is `JSON` + * * `*:json` if `advanced_settings` is `XML` + * * `*:edi` if `advanced_settings` is `EDI` + * * `*:log` if `advanced_settings` is `Log File` + * * `*:orc` if `advanced_settings` is `ORC` + * * `*:avro` if `advanced_settings` is `AVRO` + * * `*:parquet` if `advanced_settings` is `Parquet` + * * `*:xlsx` if `advanced_settings` is `Excel` + * * `*:fw` if `advanced_settings` is `Fixed Width` + * * `*:pdf` if `advanced_settings` is `PDF` + * * `*:unstructured` if `advanced_settings` is `Unstructured` + * * `*:swift` if `advanced_settings` is `SWIFT` + * + * @enum {string} + */ + "overriden.extensions"?: "*:txt" | "*:xml" | "*:json" | "*:edi" | "*:log" | "*:orc" | "*:avro" | "*:parquet" | "*:xlsx" | "*:fw" | "*:pdf" | "*:unstructured" | "*:swift"; + /** + * @description __Text Delimiter__: Delimiters are used to automatically split each row of data into multiple attributes. Select one of these common delimiters, or enter the character that you wish to use as a delimiter. + * + * Only applicable if `advanced_settings` is `Custom Text Format` + * Default Value: `,` + * + * Common Values: + * * `,`: Comma + * * `\t`: Tab + * * `\\n`: Newline + * * `;`: Semicolon + * * `|`: Pipe + * * `^`: Caret + */ + "csv.delimiter"?: string; + /** + * @description __Text Qualifier Character__: Usually, text in one row is separated into multiple attributes based on the delimiter. A qualifier character is used to wrap together the text that should be treated as one attribute even if it includes occurrences of the delimiter.\\n\\n Leave this field blank if you wish to use the default qualifier `"`. For example, most CSV file generators use `"` as the qualifier; thus, `"one,two"` can be one attribute. + * + * Only applicable if `advanced_settings` is `Custom Text Format` + * Default Value: `"` + */ + "csv.quote.char"?: string; + /** + * @description __Escape Character__: When reading CSV files, delimiters (like commas) separate fields and text qualifiers (like quotes) wrap fields. But sometimes these special characters need to appear within the field values themselves. The escape character is used to indicate that the character following it should be treated as regular text, not as a delimiter or qualifier. + * + * By default, backslash (`\\`) is used as the escape character. If your data contains backslashes that should be treated as regular text (not as escape characters), you should leave this field empty. + * If your CSV parsing is failing, try leaving this field empty, especially if your data uses doubled quotes (`\"`) to escape quotes or if you want backslashes to be treated as regular characters. + * + * Only applicable if `advanced_settings` is `Custom Text Format` + * Default Value: `\\` + */ + "csv.escape.char"?: string; + /** + * @description __Schema Attribute Detection Mode__: Configure this option to control how the platform should set Nexset schema attribute names for each column of data. Set this option to __generated__ if your file does not have a row that should be treated as the header row, and the platform will assign attribute names like `attribute1` and `attribute2`. + * + * Only applicable if `advanced_settings` is `Custom Text Format` + * Default Value: `header` + * + * * `header`: Select this option if the file contains a header row. Detected Nexset attribute names will be derived from the column names in the header row. + * + * * `generated`: Select this option if the file does not contain a header row. Nexla will automatically assign attribute names to each column of data. + */ + "csv.schema.detection"?: string; + /** + * @description __Skip Lines at the Head__: The platform can be configured to skip the first few lines of each file before it starts ingesting data into records. Set this option if a fixed number of rows in each file should be ignored. + * + * Only applicable if `advanced_settings` is `Custom Text Format` + * Default Value: `0` + */ + "csv.skip.first.lines"?: number; + /** + * @description __Schema Attribute Detection Mode__: Configure this option to control how the platform should set Nexset schema attribute names for each column of data. Set this option to __generated__ if your file does not contain a row that should be treated as the header row, and the platform will assign attribute names like `attribute1` and `attribute2`. + * + * Only applicable if `advanced_settings` is `Excel` + * Default Value: `header` + * + * * `header`: Select this option if the file contains a header row. Detected Nexset attribute names will be derived from the column names in the header row. + * + * * `generated`: Select this option if the file does not contain a header. Nexla will automatically assign attribute names to each column of data. + * + * @enum {string} + */ + "excel.schema.detection"?: "header" | "generated"; + /** + * @description __(Optional) Data Records Cell Range__: Configure the relevant cell range if you want only specific cells in your sheets to be scanned. + * + * This is an optional field. Leave the field blank if you want all cells to be ingested. You can specify multiple ranges by using comma-separated values. + * + * For example, `sheet1!A1:B5,sheet2!C2:D5` will ingest cells `A1:B5` from `sheet 1` and cells `C2:D5` from `sheet2`. + * + * Only applicable if `advanced_settings` is set to `Excel` + */ + sheets?: string; + /** + * @description __(Optional) Metadata Cell Range__: By setting the `Data Records Cell Range` above you have configured the part of file that should be ingested by Nexla as individual records. + * + * But sometimes, you might want some common data in the file outside those cells that should be included in each record. Set this field to the appropriate cells path format and Nexla will include those attributes in each ingested record. + * + * 1. General format is `_:|...|:` + * + * 2. Split attribute cell from value cell by using `:` delimiter (`:`) + * + * 3. Split different key-value pairs by using `|` delimiter (`:|:`) + * + * 4. Specify sheetname following with `_` to read from the specific sheet. + * + * 5. Do not specify sheetname if you want to read from first single sheet by default. + * + * Only applicable if `advanced_settings` is set to `Excel` + */ + "excel.range.additional"?: string; + /** + * @description __Skip Merged cells?__: The platform can be configured to skip merged cells when it is ingesting data into records. Select this option if you want merged cells to be skipped. + * + * Only applicable if `advanced_settings` is set to `Excel` + * Default Value: `false` + */ + "excel.skip.merged.cells"?: boolean; + /** + * @description __JSON Ingestion Mode__: JSON text files generated by data processing systems are often saved in JSON Line format, in which each row in the file is a valid JSON object instead of the entire file being a valid JSON object. + * + * Only applicable if `advanced_settings` is set to `JSON` + * * `row`: Each row is a separate JSON object. + * * `entire.file`: The entire file is a valid JSON object. + * + * @enum {string} + */ + "json.mode"?: "row" | "entire.file"; + /** + * @description __JSON Path To Data (when Ingestion Mode = entire.file)__: When JSON Ingestion Mode is set to `entire.file`, you can configure the platform to only ingest part of the JSON object. + * + * Set this path in JSON Path format to configure the area of the file from this Source that should be processed. + * + * Only applicable if `advanced_settings` is set to `JSON` and `json.mode` is `entire.file` + */ + "json.path"?: string; + /** + * @description __Path to Additional JSON data__: Setting the `JSON Path to Data` above configures the part of the data file that should be ingested by Nexla as individual records. + * + * However, sometimes, you might want some common data in the file that is located outside of the entered JSON Path to be included in each record. Set this field to the appropriate file content in JSON path format, and Nexla will include that attribute in each ingested record. + * + * Only applicable if `advanced_settings` is set to `JSON` and `json.mode` is `entire.file` + */ + "json.path.additional"?: string; + /** + * @description __XML Ingestion Mode__: XML files generated by data processing systems are often saved such that each row of the file is a valid XML object instead of the entire file being a valid XML object. + * + * Only applicable if `advanced_settings` is set to `XML` + * + * * `row`: Each row is a separate XML object. + * * `entire.file`: Entire file is a valid XML object. + * + * @enum {string} + */ + "xml.mode"?: "row" | "entire.file"; + /** + * @description __XPath to Data (when Ingestion Mode = entire.file)__: When XML Ingestion Mode is set to `entire.file` you can configure the platform to only ingest part of the XML object. + * + * Set this path in XPath format to configure the area of the file from this Source that should be processed. + * + * Only applicable if `advanced_settings` is set to `XML` and `xml.mode` is set to `entire.file` + */ + "xml.xpath"?: string; + /** + * @description __Path to Additional XML data__: Setting the `XPath to Data` above configures the part of the data file that should be ingested by Nexla as individual records. + * + * However, sometimes, you might want some common data in the file that is located outside of the entered XPath to be included in each record. Set this field to the appropriate file content in XPath format, and Nexla will include that attribute in each ingested record. + * + * Only applicable if `advanced_settings` is set to `XML` and `xml.mode` is set to `entire.file` + */ + "xml.xpath.additional"?: string; + /** + * @description __EDI XPath__: Set the path (in XPath format) to the area of the file that should be processed as EDI content. Note that this option is necessary when the entire file isn't a valid EDI file and only a portion of the file content is valid EDI content + * + * Only applicable if `advanced_settings` is set to `EDI` + */ + "edi.xpath"?: string; + /** + * @description __Grok Pattern__: Each line of a log file is parsed into multiple attributes by applying a Grok Pattern. + * + * Grok works by combining text patterns into something that matches your logs. Select one of these predefined patterns, or write your own custom Grok Pattern that should be applied when parsing log files. + * **Predefined Patterns**: Depending on your log file content, one of these predefined patterns might be best suited for your use case. Alternately you can also type in your own custom Grok Pattern. + * - `%{SYSLOGBASE}` + * - `%{COMBINEDAPACHELOG}` + * - `%{NAGIOSLOGLINE}` + * - `%{POSTGRESQL}` + * - `%{REDISLOG}` + * - `%{JAVASTACKTRACEPART}` + * + * Only applicable if `advanced_settings` is set to `Log File` + * + * @enum {string} + */ + "grok.pattern"?: "%{SYSLOGBASE}" | "%{COMBINEDAPACHELOG}" | "%{NAGIOSLOGLINE}" | "%{POSTGRESQL}" | "%{REDISLOG}" | "%{JAVASTACKTRACEPART}"; + /** + * @description __Length of Each Field in File__: Enter the length of each field in the file. This should be entered as a comma-separated list of numbers, for ex: `10,12,8,15,20`. + * + * __Required__ and __applicable__ only if `advanced_settings` is set to `Fixed Width`. + */ + "field.lengths"?: string; + /** + * @description __Schema Attribute Detection Mode__: Configure this option to control how the platform should set Nexset schema attribute names for each column of data. + * + * * `header`: Select this option if the file contains a header row. Detected Nexset attribute names will be derived from the column names in the header row. + * + * * `generated`: Select this option if the file does not contain a header row. Nexla will automatically assign attribute names to each column of data. + * + * __Applicable__ only if `advanced_settings` is set to `Fixed Width`. + * + * @enum {string} + */ + "fixed.width.schema.detection.mode"?: "header" | "generated"; + /** + * @description __Padding Character__: Choose or type the character that acts as the separator between fields. For ex: `_`. + * + * Only applicable if `advanced_settings` is `Fixed Width` + * Default: single space ` ` + * + * Common Values: + * * ` `: Single Space + * * `` : No Padding + * * `,`: Comma + * * `\\t`: Tab + * * `\\n`: Newline + * * `;`: Semicolon + * * `|`: Pipe + * * `^`: Caret + */ + "padding.character"?: string; + /** + * @description __Auto detect line separators?__: Set this option on if you wish the platform to automatically detect line separators in the document. + * + * Only applicable if `advanced_settings` is `Fixed Width` + * Default: true + */ + "line.separator.detection.enabled"?: boolean; + /** + * @description __Line Separation Character__: Specify the character that should be recognized by the platform as an indicator of a new line. + * + * Only applicable if `advanced_settings` is `Fixed Width` and `line.separator.detection.enabled` is set to `true`. + * + * Default: `\\n` + * + * Common Values: + * * `\\t`: Tab + * * `\\n`: Newline + */ + "line.separator"?: string; + /** + * @description __Remove Quote Characters?__: Set this option to true if you wish the platform to force remove quotes from strings where it is safe to do so. + * + * Only applicable if `advanced_settings` is `Fixed Width`. + * Default: false + */ + "remove.quotes.forced"?: boolean; + /** + * @description __Quote Character__: Set the character to be treated as a quote character. This is relevant if the Remove Quotes Character setting is enabled. + * + * Leave this field blank if you wish to use the default qualifier `"` + * + * Only applicable if `advanced_settings` is `Fixed Width` and `remove.quotes.forced` is set to `true`. + */ + "quote.char"?: string; + /** + * @description __Force Scalar Coercion of String Values?__: Set this option on if you wish to force coercion of string values to scalar types. + * + * Only applicable if `advanced_settings` is `Fixed Width` + * Default: true + */ + "scalar.coercion"?: boolean; + /** + * @description __Skip Lines at the Head__: The platform can be configured to skip the first few lines of each file before it starts ingesting data into records. Set this option if a fixed number of rows in each file should be ignored. + * + * Only applicable if `advanced_settings` is `Fixed Width` + * Default Value: `0` + */ + "fixed.width.skip.first.lines"?: number; + /** + * @description __Skip Lines at the Tail__: The platform can be configured to skip the last few lines of each file before it starts ingesting data into records. Set this option if a fixed number of rows in each file should be ignored. + * + * Only applicable if `advanced_settings` is `Fixed Width` + * Default Value: `0` + */ + "fixed.width.skip.last.lines"?: number; + /** + * @description __Parsing mode__: Choose the mode for extracting text from the PDF Files. + * + * The `text` strategy is used to extract the textual layer from the PDF file. It will emit one record per page. The `semi-auto` strategy uses hints to extract structured data from the PDF file. + * + * * `text`: This strategy is used to extract the textual layer from the PDF file. It will emit one record per page with two attributes: `type` with value `text`, and `text` with value equal to extracted content of entire page. + * + * * `semi-auto`: This strategy uses hints to extract structured data from the PDF file. + * + * * `auto-1`: (auto-textract) This strategy uses Textract for parsing PDF file. + * + * * `auto-2`: (auto-tesseract) This strategy uses Tesseract for parsing PDF file. + * + * * `image`: (image) This strategy exports each page as a Base64-encoded PNG image. + * + * __Applicable__ only if `advanced_settings` is set to `PDF`. + * + * @enum {string} + */ + "pdf.parsing.strategy"?: "text" | "semi-auto" | "auto-1" | "auto-2" | "image"; + /** + * @description __Page Metadata Inclusion Mode__: Choose the mode for including page metadata in the generated content. This is relevant if you are generating embeddings from the source and wish to include information like page number in the embedded content. + * + * * `default`: (Default: Exclude From Data) This strategy is used to extract the textual layer from the PDF file. It will emit one record per page with two attributes: `type` with value `text`, and `text` with value equal to extracted content of entire page. + * + * * `embedded`:(Embedded: Include With Embeddings) Choose this option if you wish to include page metadata in embeddings. This is only relevant if you have opted into generating embeddings. + * + * * `extended`: (Extended: Include Without Embeddings) Choose this option if you wish to include page metadata in the generated content when embeddings are not generated. + * + * * `table.as.message`: (Message Per Table) Choose this option if you wish to emit one message per table in the document. This option is useful if your processing logic requires table-level granularity. + * + * * `document.as.message`: (File Content as Message) Choose this option if you wish to emit one message for entire file. This option is useful if your processing logic requires file-level granularity. + * + * __Applicable__ only if `advanced_settings` is set to `PDF`. + * + * @default default + * @enum {string} + */ + "pdf.rendering.strategy"?: "default" | "embedded" | "extended" | "table.as.message" | "document.as.message"; + /** + * @description __Extract Text Blocks?__: When parsing a PDF file only table blocks are extracted. This option allows you to extract additional text blocks between table blocks as record. + * + * __Applicable__ only if `advanced_settings` is set to `PDF` and `pdf.parsing.strategy` is `semi-auto`. + * + * Default: true + */ + "pdf.parsing.emitTextBlocks"?: boolean; + /** + * @description __(Optional) Document Password__: Enter the password for opening and processing the PDF files if they are password protected. + * + * __Applicable__ only if `advanced_settings` is set to `PDF`. + */ + "pdf.document.password"?: string; + /** + * @description __Placeholder Text for empty values__: Set the text placeholder to be used if a cell in the parsed table is empty. + * + * __Applicable__ only if `advanced_settings` is set to `PDF`. + * + * Default: `(blank)` + */ + "pdf.parsing.emptyValuePlaceholder"?: string; + /** + * @description __Configuration Settings for Extracting Tables__: Set this property with all configuration settings required for extracting Nexla records from different slices of structured data in the file. This property must be a valid JSON object, with some or all of the keys listed below. + * + * 1. `columns`: This must be an array of column names in the table. This is the only **required** property in this object. Column names should be listed from left to right or top to bottom. There are two special values : `(blank)` for a column without a name in the table and `(column)` or `(column=Desired Name)` for empty columns. + * + * 2. `forceBoldHeaders`: A boolean that defaults to false. If true, this property only treats headers that are in bold as attribute names. + * + * 3. `forceFullWidth`: A boolean that defaults to true. If true, the parser uses entire page width as table width, else the parser tries to treat least possible width as table width. + * + * 4. `header`: Configures whether columns are defined horizontally (value `ROW`) or vertically (value `COLUMN`). + * + * 5. `tupleNumber`: An integer value if the header mode is set to COLUMN and the table does not take up entire page width. + * + * 6. `spacing`: A double data type used for tuning for complex documents. Defaults to 2.5 + * + * + * __Applicable and required__ if `advanced_settings` is set to `PDF` and `pdf.parsing.strategy` is `semi-auto`. + * + * Example: `[{\n\t\"columns\": [ ]\n}]` + */ + "pdf.parsing.tables"?: string; + /** + * @description __Extract Text Blocks?__: Set this option to true if you wish to extract text blocks from the PDF file. + * + * __Applicable__ only if `advanced_settings` is set to `PDF` and `pdf.parsing.strategy` is `auto-1`. + * + * Default Value: `true` + */ + "pdf.parsing.auto-1.outputText"?: boolean; + /** + * @description __Extract Table Blocks?__: Set this option to true if you wish to extract table blocks from the PDF file. + * + * __Applicable__ only if `advanced_settings` is set to `PDF` and `pdf.parsing.strategy` is `auto-1`. + * + * Default Value: `true` + */ + "pdf.parsing.auto-1.outputTables"?: boolean; + /** + * @description __Image Scale__: Set the scale factor for the image. + * + * __Applicable__ only if `advanced_settings` is set to `PDF` and `pdf.parsing.strategy` is `image`. + * + * Default Value: `1.25` + */ + "pdf.parsing.image.scale"?: string; + /** + * @description __Content Type for Compressed Files__: In case the files in your file system are compressed files, set this to the expected content type of the files after they have been uncompressed. + * + * * `application/pdf`: Set this if the files are compressed PDF files. + * + * __Applicable__ only if `advanced_settings` is set to `Unstructured`. + * + * @enum {string} + */ + "unstructured.gz.uncompressed.content.type"?: "application/pdf"; + /** + * @description __Content Encoding Method__: Set this to the content encoding method that should be used for decoding text input. + * + * __Applicable__ only if `advanced_settings` is set to `Unstructured`. + */ + "unstructured.encoding"?: string; + /** + * @description __Include Page Breaks?__: Set this option to true if you wish to include page breaks in the extracted content. + * + * __Applicable__ only if `advanced_settings` is set to `Unstructured`. + * + * @default false + */ + "unstructured.include.page.breaks"?: boolean; + /** + * @description __Infer Table Structure In PDFs?__: Set this to true if you wish to include 'text_as_html' metadata for table elements within PDFs. + * + * __Applicable__ only if `advanced_settings` is set to `Unstructured`. + * + * @default false + */ + "unstructured.pdf.infer.table.structure"?: boolean; + /** + * @description __Chunking Strategy__: Set this to the strategy for chunking the returned elements. Leave it blank for default strategy. + * + * * ``: Default strategy. + * * `by_title`: Chunk by title. + * + * __Applicable__ only if `advanced_settings` is set to `Unstructured`. + * + * @enum {string} + */ + "unstructured.chunking.strategy"?: "" | "by_title"; + /** + * @description __Allow Sections to Span Multiple Pages__: Set this to true for allowing sections to span multiple pages. This is only applicable if chunking strategy has been set. + * + * __Applicable__ only if `advanced_settings` is set to `Unstructured` and `unstructured.chunking.strategy` is set to `by_title`. + * + * @default true + */ + "unstructured.multipage.sections"?: boolean; + /** + * @description __Minimum # of characters for combing elements__: Set this to minimum number of characters in a section until which elements should be combined. + * + * __Applicable__ only if `advanced_settings` is set to `Unstructured` and `unstructured.chunking.strategy` is set to `by_title`. + * + * @default 500 + */ + "unstructured.combine.under.n.chars"?: number; + /** + * @description __(Soft) Maximum # of characters in a section__: Set this to (soft) maximum number of characters that can be present in a section. This is a soft maximum. A new section will be created soon after / before this number is hit. + * + * __Applicable__ only if `advanced_settings` is set to `Unstructured` and `unstructured.chunking.strategy` is set to `by_title`. + * + * @default 1500 + */ + "unstructured.new.after.n.chars"?: number; + /** + * @description __(Hard) Maximum # of characters in a section__: Set this to (hard) maximum number of characters that can be present in a section. + * + * __Applicable__ only if `advanced_settings` is set to `Unstructured` and `unstructured.chunking.strategy` is set to `by_title`. + * + * @default 1500 + */ + "unstructured.max.characters"?: number; + /** + * @description __Is lenient?__: Indicates whether the parser is permissive or not. Defaults to true, meaning the parser will do a best effort to read as much from the message content as possible regardless of the content and block boundaries being valid or not. + * + * __Applicable__ only if `advanced_settings` is set to `SWIFT`. + * + * Default Value: `true` + * + * @default true + */ + lenient?: boolean; + /** + * @description __Should Parse Text Block?__: Set this to true you if you wish the text block (block 4) to be parsed. + * + * __Applicable__ only if `advanced_settings` is set to `SWIFT`. + * + * Default Value: `true` + * + * @default true + */ + "parse.text.block"?: boolean; + /** + * @description __Should Parse Trailer Block?__: Set this to true you if you wish the trailer block (block 5) to be parsed. + * + * __Applicable__ only if `advanced_settings` is set to `SWIFT`. + * + * Default Value: `true` + * + * @default true + */ + "parse.trailer.block"?: boolean; + /** + * @description __Should Parse User Block?__: Set this to true you if you wish the user block to be parsed. + * + * __Applicable__ only if `advanced_settings` is set to `SWIFT`. + * + * Default Value: `true` + * + * @default true + */ + "parse.user.block"?: boolean; + /** + * @description __Runtime Data Credentials For LLM__: Set the Nexla Credential Id for the OpenAI API that should be used for generating embeddings. + * + * __Applicable and required__ only if `post.processor` is set to `true`. + */ + runtime_data_credentials_id?: number; + /** + * @description __Embedding Model__: Set the model that should be used for generating embeddings. + * + * __Applicable and required__ only if `post.processor` is set to `true`. + * + * Example: `text-embedding-ada-002` + */ + model?: string; + /** + * @description __Attribute for embeddings__: Set the data property that should be used for generating embeddings. For example, if your input data has a property called `text` that you wish to use for embeddings, set this value to `text`. + * + * __Applicable and required__ only if `post.processor` is set to `true`. + * + * Example: `text` + */ + property?: string; + /** + * @description __Chunking Limit__: Set the size segments that the parsed text should be broken down to. + * + * __Applicable__ only if `post.processor` is set to `true`. + * + * Default Value: `1000` + * + * @default 1000 + */ + chunk_threshold?: number; + /** + * @description __AI Function ID__: Set the Nexla AI function to be used for post-processing these files before the data is delivered to the Nexset. + * + * __Applicable__ only if `ai.function` is set to `true`. + */ + "ai.function.id"?: number; + /** + * Format: epoch + * @description __Only read files modified after:__ Set this property to indicate whether the platform should skip the ingestion of some files based on the file modification date. + * + * Note that the platform always keeps track of whether or not a file has been processed. Therefore, changes to a previously ingested (or ignored) file that cause a change in its modification date do result in the file being processed again. + * + * Default value: '' + */ + "ignore.files.older.than.ms"?: number; + /** + * @description __Customize Paths to be Scanned/Ignored__: Once you have selected a root bucket/folder/path to be scanned for data, you can further indicate specific path patterns to be scanned or ignored within the selected pattern. + * + * Enable this setting and configure the relevant path patterns if you want the platform to scan (or ignore) all sub-folders or files that match a pattern. + * + * Default Value: `false` + */ + path_exclusions?: boolean; + /** + * @description __Paths to be scanned Eg. **\/Done/__: Configure a path pattern if you want to only scan sub-folders or files that match a pattern. For example, enter `**\/ABC/*` if you would like to only scan files in the subfolder ABC. + * + * **Note**: + * 1. Patterns must match the **Apache Ant Path Pattern**. See [Apache Ant Path Documentation](https://ant.apache.org/manual/dirtasks.html) for examples. + * 2. Patterns must start from the root of the location accessible to the credentials. + * 3. You will still need to select a base folder (root or a subfolder). Only pattern matches that are inside the selected base folder will be scanned. + * + * Only applicable if `path_exclusions` is `true` + */ + "whitelist.pathmatchers"?: string; + /** + * @description __Paths NOT to be scanned. E.g., **\/archives/__: Configure a path pattern if you do __NOT__ want to scan sub-folders or files that match a pattern. For example, enter `**\/ABC/*` if you would like to ignore files that are in the subfolder ABC. + * **Note** + * 1. Patterns must match the **Apache Ant Path Pattern**. See [Apache Ant Path Documentation](https://ant.apache.org/manual/dirtasks.html) for examples. + * 2. Patterns must start from the root of the location accessible to the credentials. + * 3. You will still need to select a base folder (root or a subfolder). Only pattern matches that are inside the selected base folder will be ignored. + * + * Only applicable if `path_exclusions` is `true` + */ + "blacklist.pathmatchers"?: string; + /** + * @description __Timezone for Path Format__ + * Only applicable if `path_exclusions` is `true` + */ + timezone?: string; + /** + * @description __Force a Single Schema__: The platform's automatic Nexset detection mechanism ensures that similar files are always processed as the same Nexset, even if the file extensions are different. + * + * However, sometimes, it might be beneficial to bypass Nexset detection and enforce a single schema. Use this setting to force the platform to only detect one Nexset for this Source. + * + * For example, you might want to force a single schema in the following scenarios: + * 1. Your business case requirement is time-sensitive to any processing latencies, and you know that the data will always be in the same structure. While the Schema Detector does not incur a large overhead per file, for high file volumes, bypassing this additional processing might result in noticeable improvement. + * 2. You know that files should always be processed as the same Nexset, but there is a high likelihood of sparse data. As the Detector might not find significant overlap in the case of sparse data, enforcing a single schema will ensure that the detector will only grow the Nexset, regardless of the presence or absence of overlapping. + * + * Default Value: `false` + */ + "schema.detection.once"?: boolean; + /** + * @description __Enable Grouping__: Set this option and associated settings if you have files in which rows of data need to be combined based on the value of a key. + * + * For example, this is useful if you have CSV files with a column __order_number__ in addition to other columns of information about that order, and instead of including each row as a unique record, you want a Nexset in which all orders with the same number are concatenated as one record at the time of ingestion. To produce a Nexset with the record structure including the attribute __order_number__ and another attribute __order_details__, with an array of objects containing the values of the other columns, you can set __order_number__ as the __Grouping Key Attribute__ and __order_details__ as the __Grouped Field Name__. + * + * Default Value: `false` + */ + allowGrouping?: boolean; + /** + * @description __Grouping Key Attribute__: The name of the column/attribute based on which grouping needs to be performed. + * + * For example, if you have CSV files with a column __order_number__ in addition to other columns of information about that order and grouping needs to be performed based on the column __order_number__, set the value in this box to __order_number__. Data will now be grouped by the __order_number__. + * + * Required and only applicable if ` allowGrouping` is `true`. + */ + "group.by.keys"?: string; + /** + * @description __Grouped Field Name__: Enter the attribute name for the grouped object in the resulting Nexset. + * + * For example, if you have CSV files with a column __order_number__ in addition to other columns of information about that order, and grouping needs to be performed based on the __order_number__, set this input to __order_details__. The resulting Nexset will contain the attribute __order_details__, which will include an array of objects containing the content of each row of grouped data. + * + * Required and only applicable if ` allowGrouping` is `true`. + */ + "group.field.name"?: string; + /** + * @description __Publish value of null key in grouping__: You can configure how the platform should process rows of data in which the Grouping Key Attribute is missing. + * + * Turn on this setting, and the platform will assign such rows to a Grouping Key Attribute with the value `null`. Turn off this setting, and the platform will ignore such rows. + * + * Only applicable if ` allowGrouping` is `true` + */ + "group.publish.null.key"?: string; + }; + }; + azure_blb_data_source: { + source_type: "azure_blb"; + } & Omit & components["schemas"]["file_data_source"]; + azure_data_lake_data_source: { + source_type: "azure_data_lake"; + } & Omit & components["schemas"]["file_data_source"]; + azure_synapse_data_source: { + source_type: "azure_synapse"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + bigquery_data_source: { + source_type: "bigquery"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + box_data_source: { + source_type: "box"; + } & Omit & components["schemas"]["file_data_source"]; + WithCDCSupport: { + source_config?: { + /** + * @description __Scheduling Frequency__: The interval at which Nexla should scan this source for new data. This must be in the form of a cron expression. + * + * @example 0 0 22 10 11 ? 2022 + */ + "start.cron": string; + /** + * @description __Database Fetch Mode__: Database connectors are designed to support self-serve capabilities for use cases ranging from simple table ingestion to ingestion based on complex custom queries. + * + * * `Default`: Equivalent to running a simple (but optimized) SELECT clause on any database table, along with some additional customizations for filtering rows. + * + * * `Query`: Execute data fetching based on a custom database query using the syntax and convention supported by the underlying database/warehouse. + * + * @enum {string} + */ + db_query_mode: "Default" | "Query"; + /** + * @description __Query__: Set the Query to be executed during each ingestion cycle. + * + * __Required__ if `db_query_mode` is `Query` + */ + query?: string; + /** + * @description __Database__: Database whose table needs to be scanned for data. + * + * __Required__ if `db_query_mode` is `Default` + */ + database?: string; + /** + * @description __Table__: Table that needs to be scanned for data. + * + * __Required__ if `db_query_mode` is `Default` + */ + table?: string; + /** + * @description __Table Scan Mode__: The default configuration of Table Mode is set to read all data in a table in each ingestion cycle, which is equivalent to a running `SELECT` clause on the table. However, when the table contains much more historical data than you wish to scan, you can instruct the platform to begin loading from a specific __id__ (stored in a numeric column), __timestamp__ (stored in a date-time column) or __both id and timestamp__. + * + * You can use the relevant Table Scan Mode to address this use case of partial loading from a table. + * + * Note that this can also be achieved by writing a properly structured query in Query Mode. + * + * Only applicable if `db_query_mode` is `Default` and `cdc.enabled` is `false` + * Default Value: `none` + * + * * `none`: Read the whole table + * + * * `incrementing`: Start reading from a specific ID + * + * * `timestamp`: Start reading from a specific timestamp + * + * * `incrementing,timestamp`: Start reading from a specific ID and timestamp + * + * @enum {string} + */ + mode?: "none" | "incrementing" | "timestamp" | "incrementing,timestamp"; + /** + * @description __ID Column__: The ID column that should be used for executing partial data loading from the selected table. This must be a numeric column. + * + * Only applicable if `db_query_mode` is `Default` and `mode` is `incrementing` or `incrementing,timestamp` + */ + "incrementing.column.name"?: string; + /** + * @description __Starting ID__: The starting ID value of __ID Column__ from which the platform should start ingesting data. + * + * Only applicable if `db_query_mode` is `Default` and `mode` is `incrementing` or `incrementing,timestamp` + */ + "incrementing.load.from"?: string; + /** + * @description __Timestamp Column__: The timestamp column that should be used for executing partial data loading from the selected table. This must be a date-time column. + * + * Only applicable if `db_query_mode` is `Default` and `mode` is `timestamp` or `incrementing,timestamp` + */ + "timestamp.column.name"?: string; + /** + * @description __Starting Timestamp__: The timestamp value of the __Timestamp Column__ from which the platform should start ingesting data. This must be a UNIX epoch- (milliseconds) or ISO-formatted date value (e.g., 2016-01-01T12:13:14). + * + * Only applicable if `db_query_mode` is `Default` and `mode` is `timestamp` or `incrementing,timestamp` + */ + "timestamp.load.from"?: number | string; + /** + * Format: boolean + * @description __Perform Database Commit after Read?__: You can instruct the platform to execute a database commit if the query includes statements that should also be committed to the database after ingestion. This is typically not the case, so most of the time, the value should be left as false. + * + * Only applicable if `db_query_mode` is `Query` + * + * Default Value: `"false"` + * + * @enum {string} + */ + "commit.on.read"?: "false" | "true"; + /** + * @description __Enable Change Data Capture (CDC)__: Select this option if you would like the platform to monitor database transaction logs to determine data ingestion. + * + * Note that your DBA will need to grant necessary Change Data Capture permissions to allow Nexla to access transaction logs. Contact Nexla support for relevant instructions. + * + * If limitations that prevent you from enabling CDC controls on your database are in place, you can still set up this source for incremental ingestion by disabling this option and instead selecting incremental table ingestion rules. + * + * Default value: `false` + * + * __Applicable__ if `db_query_mode` is `Default` + */ + "cdc.enabled"?: boolean; + /** + * @description __CDC: Ingest Initial Snapshot__: Choose this option if you want to perform a one-time historical load of all data in the table during the first ingestion from the source. If this option is disabled, only events observed after the source is first activated will be processed. + * + * Default value: `false` + * + * __Applicable__ if `db_query_mode` is `Default` and `cdc.enabled` is `true` + */ + "cdc.snapshot.enabled"?: boolean; + /** + * @description __CDC: Track Deletions__: Choose this option if you also need to track deletions. + * + * You might want this option enabled if you intend to link this source to a destination that needs to be kept in sync with the source by tracking and removing any rows that have been deleted. + * + * With this option enabled, the detected Nexset will include a Nexset record for each deletion event. This record will always contain a `nexla_op` attribute with the value `DELETE`. Additionally, it will include either the primary key of the deleted row or, if the table contains no primary key columns, the content of the entire row. + * + * Default value: `false` + * + * __Applicable__ if `db_query_mode` is `Default` and `cdc.enabled` is `true` + */ + "cdc.capture.delete"?: boolean; + }; + }; + cloudsql_mysql_data_source: { + source_type: "cloudsql_mysql"; + } & Omit & components["schemas"]["WithCDCSupport"]; + cloudsql_postgres_data_source: { + source_type: "cloudsql_postgres"; + } & Omit & components["schemas"]["WithCDCSupport"]; + cloudsql_sqlserver_data_source: { + source_type: "cloudsql_sqlserver"; + } & Omit & components["schemas"]["WithCDCSupport"]; + stream_data_source: { + source_config?: { + /** + * @description __Scheduling Frequency__: The interval at which Nexla should scan this source for new files. This must be in the form of a cron expression. + * + * @example 0 0 22 10 11 ? 2022 + */ + "start.cron": string; + /** + * @description __Data Format__: Select the data format parser for the data in the topic. This is usually JSON. + * + * @enum {string} + */ + "parser.type": "json" | "csv" | "tsv" | "txt" | "xml"; + }; + }; + confluent_kafka_data_source: { + source_type: "confluent_kafka"; + } & Omit & components["schemas"]["stream_data_source"] & ({ + source_config?: { + /** @description __Topic__: Select the topic for your data. */ + topic: string; + /** + * @description __Offset Mode__: You can configure the default offset mode to be used for reading data from the topic. + * + * Usually set to `earliest` + * + * @enum {string} + */ + "offset.mode"?: "earliest" | "from_date" | "latest" | "manual"; + /** + * @description __Consume Data From Date__: You can choose to configure this source for reading data from a specific date. Enter the starting date for data ingestion. + * + * __Required__ if `offset.mode` is `from_date` + */ + "start.from.date"?: string; + /** + * @description __Consume Data From Offset__: You can choose to configure this source for reading data from a specific offset. Enter the starting offset for data ingestion. + * + * __Required__ if `offset.mode` is `manual` + */ + "start.from.offsets"?: string; + /** + * @description __Data Format__: Select the data format parser for the data in the topic. This is usually JSON. + * + * @enum {string} + */ + "parser.type"?: "json" | "csv" | "tsv" | "txt" | "xml" | "avro"; + /** + * @description __Key Deserializer__: Enter the type of key deserializer that should be used to read keys from the Kafka topic. This is usually in the form `org.apache.kafka.common.serialization.StringDeserializer`. + * + * Recommended default value: + * - `org.apache.kafka.common.serialization.StringDeserializer` if `parser.type` is `csv`, `tsv`, `txt`, `xml`, or `json` + * - `org.apache.kafka.common.serialization.ByteArrayDeserializer` if `parser.type` is `avro` + */ + "key.deserializer"?: string; + /** + * @description __Value Deserializer__: Enter the type of value deserializer that should be used to read from the Kafka topic. This is usually in the form `org.apache.kafka.common.serialization.StringDeserializer`. + * + * Recommended default value: + * - `org.apache.kafka.common.serialization.StringDeserializer` if `parser.type` is `csv`, `tsv`, `txt`, `xml`, or `json` + * - `org.apache.kafka.common.serialization.ByteArrayDeserializer` if `parser.type` is `avro` + */ + "value.deserializer"?: string; + /** + * @description __Serialization Variant__: Set the variant of serializer for processing data. This is required for AVRO parser. Typically this should be set to `confluent`. + * + * __Applicable__ if `parser.type` is `avro` + */ + "serialization.variant"?: string; + }; + }); + databricks_data_source: { + source_type: "databricks"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + db2_data_source: { + source_type: "db2"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + dropbox_data_source: { + source_type: "dropbox"; + } & Omit & components["schemas"]["file_data_source"]; + nosql_data_source: { + source_config?: { + /** + * @description __Ingestion Frequency__: The interval at which Nexla should scan this source for new data. This must be in the form of a cron expression. + * + * @example 0 0 22 10 11 ? 2022 + */ + "start.cron": string; + /** + * @description __Database__: The database whose collection needs to be scanned for data. + * + * __Required__ if the `data_credentials` access for this destination is not limited to one database. + */ + database?: string; + /** @description __Collection__: The collection that needs to be scanned for data. */ + collection: string; + /** + * @description __Filter Collections__: Instead of reading all the documents in a collection in each ingestion cycle, you can instruct the platform to ingest only some documents. + * + * In some scenarios, when the collection contains much more historical data than you wish to scan, you might want to instruct the platform to begin loading from a specific __timestamp__. You can also write your filter queries to set rules based on which documents from the collection should be ingested. + * + * - `default`: Read all documents in the collection. + * - `timestamp`: Start reading from a specific timestamp. + * - `query`: Filter using Query. + * + * @default default + * @enum {string} + */ + mode: "default" | "timestamp" | "query"; + /** + * @description __Timestamp Attribute__: Set the document metadata attribute that should be used to execute partial data loading from the selected collection. This must be an attribute containing a date-time value, such as `created_at`. + * + * __Applicable and required__ if `mode` is `timestamp`. + */ + "timestamp.key"?: string; + /** + * @description __Starting Timestamp__: Set the timestamp value of the __Timestamp Key__ from which the platform should start ingesting data. This must be a UNIX epoch (milliseconds) or ISO-formatted date-time value (e.g., 2016-01-01T12:13:14). + * + * __Applicable and required__ if `mode` is `timestamp`. + */ + "timestamp.load.from"?: string; + /** + * @description __Ending Timestamp__: Set the timestamp value of the __Timestamp Key__ at which the platform should end ingesting data. This must be a UNIX epoch (milliseconds) or ISO-formatted date-time value (e.g., 2016-01-01T12:13:14). + * + * __Applicable__ if `mode` is `timestamp`. + */ + "timestamp.load.to"?: string; + }; + }; + dynamodb_data_source: { + source_type: "dynamodb"; + } & Omit & components["schemas"]["nosql_data_source"] & { + source_config?: { + /** + * @description __DynamoDB Filter Expression__: Enter a valid DynamoDB Query expression to filter collections. + * + * See https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Query.html#Query.FilterExpression for information about the Filter Expression structure. + * + * Example: `#name = :name_value`. + * + * __Applicable and required__ if `mode` is `query`. + */ + "dynamodb.filter.expression"?: string; + /** + * @description __Filter Expression: Attribute Values__: Set attribute values for attributes that are used in your filter expression. + * + * See https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Query.html#Query.FilterExpression for information about the Filter Expression structure. + * + * For example, for the above expression, this value is `{":name_value":{"S":"My Name"}}`. + * + * __Applicable__ if `mode` is `query`. + */ + "dynamodb.filter.attributes.values"?: string; + /** + * @description __Filter Expression: Attribute Names__: Set attribute names for attributes that are used in your filter expression. + * + * See https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Query.html#Query.FilterExpression for information about the Filter Expression structure. + * + * For example, for the above expression, this value is `{"#name":"name_column"}`. + * + * __Applicable__ if `mode` is `query`. + */ + "dynamodb.filter.attributes.names"?: string; + }; + }; + file_upload_data_source: { + source_type: "file_upload"; + } & Omit & components["schemas"]["file_data_source"]; + firebase_data_source: { + source_type: "firebase"; + } & Omit & components["schemas"]["nosql_data_source"] & { + source_config?: { + /** + * @description __Query Expression__: Set the Query to be executed during each ingestion cycle. The query content should be a valid JSON object in Nexla Firebase query DSL. Please contact Nexla support for tips on writing valid queries. + * + * Example: `{"filters": [ { "fieldName": "name", "operator": "EQUAL", "fieldType": "string", "stringValue": "test" }]}` results in documents containing a `name` property with the value `test`. + * + * __Applicable and required__ if `mode` is `query`. + */ + "firebase.query"?: string; + }; + }; + firebolt_data_source: { + source_type: "firebolt"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + ftp_data_source: { + source_type: "ftp"; + } & Omit & components["schemas"]["file_data_source"]; + gcp_alloydb_data_source: { + source_type: "gcp_alloydb"; + } & Omit & components["schemas"]["WithCDCSupport"]; + gcp_spanner_data_source: { + source_type: "gcp_spanner"; + } & Omit & components["schemas"]["WithoutCDCSupport"] & { + source_config?: { + /** + * @description __Enable Spanner Data Boost?__: Set this option to True if you want to leverage Spanner Data Boost capability for this source. Note that the service account used in the credential must have `spanner.databases.useDataBoost` IAM permission for successfully running the query with Data Boost. + * + * Default: `false` + */ + "enable.spanner.data.boost"?: boolean; + }; + }; + gcs_data_source: { + source_type: "gcs"; + } & Omit & components["schemas"]["file_data_source"]; + gdrive_data_source: { + source_type: "gdrive"; + } & Omit & components["schemas"]["file_data_source"]; + google_pubsub_data_source: { + source_type: "google_pubsub"; + } & Omit & components["schemas"]["stream_data_source"] & ({ + source_config?: { + /** @description __Subscription__: Select the subscription for your data. */ + subscription: string; + /** + * @description __Offset Mode__: You can configure the default offset mode used to read data from the topic. + * + * Usually set to `earliest` + * + * @enum {string} + */ + "offset.mode"?: "earliest" | "from_date" | "latest" | "manual"; + /** + * @description __Consume Data From Date__: You can choose to configure this source for reading data from a specific date. Enter the starting date for data ingestion. + * + * __Required__ if `offset.mode` is `from_date` + */ + "start.from.date"?: string; + /** + * @description __Consume Data From Offset__: You can choose to configure this source for reading data from a specific offset. Enter the starting offset for data ingestion. + * + * __Required__ if `offset.mode` is `manual` + */ + "start.from.offsets"?: string; + /** + * @description __Key Deserializer__: Enter the type of key deserializer that should be used for reading keys from the topic. This is usually in the form `org.apache.kafka.common.serialization.StringDeserializer`. + * + * Default value: `org.apache.kafka.common.serialization.StringDeserializer` + */ + "key.deserializer"?: string; + /** + * @description __Value Deserializer__: Enter the type of value deserializer that should be used for reading from the topic. This is usually in the form `org.apache.kafka.common.serialization.StringDeserializer`. + * + * Default value: `org.apache.kafka.common.serialization.StringDeserializer` + */ + "value.deserializer"?: string; + }; + }); + hana_jdbc_data_source: { + source_type: "hana_jdbc"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + hive_data_source: { + source_type: "hive"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + jms_data_source: { + source_type: "jms"; + } & Omit & components["schemas"]["stream_data_source"] & ({ + source_config?: { + /** + * @description __Type of Source__: Select whether the source is a `topic` or `queue`. + * + * @enum {string} + */ + "source.type": "topic" | "queue"; + /** @description __Topic or Queue Name__: Configure the topic or queue from which data needs to be ingested. */ + "source.name": string; + }; + }); + kafka_data_source: { + source_type: "kafka"; + } & Omit & components["schemas"]["stream_data_source"] & ({ + source_config?: { + /** @description __Topic__: Select the topic for your data. */ + topic: string; + /** + * @description __Offset Mode__: You can configure the default offset mode to be used for reading data from the topic. + * + * Usually set to `earliest` + * + * @enum {string} + */ + "offset.mode"?: "earliest" | "from_date" | "latest" | "manual"; + /** + * @description __Consume Data From Date__: You can choose to configure this source for reading data from a specific date. Enter the starting date for data ingestion. + * + * __Required__ if `offset.mode` is `from_date` + */ + "start.from.date"?: string; + /** + * @description __Consume Data From Offset__: You can choose to configure this source for reading data from a specific offset. Enter the starting offset for data ingestion. + * + * __Required__ if `offset.mode` is `manual` + */ + "start.from.offsets"?: string; + /** + * @description __Key Deserializer__: Enter the type of key deserializer that should be used to read keys from the Kafka topic. This is usually in the form `org.apache.kafka.common.serialization.StringDeserializer`. + * + * Default value: `org.apache.kafka.common.serialization.StringDeserializer` + */ + "key.deserializer"?: string; + /** + * @description __Value Deserializer__: Enter the type of value deserializer that should be used to read from the Kafka topic. This is usually in the form `org.apache.kafka.common.serialization.StringDeserializer`. + * + * Default value: `org.apache.kafka.common.serialization.StringDeserializer` + */ + "value.deserializer"?: string; + }; + }); + min_io_s3_data_source: { + source_type: "min_io_s3"; + } & Omit & components["schemas"]["file_data_source"]; + mongo_data_source: { + source_type: "mongo"; + } & Omit & components["schemas"]["nosql_data_source"] & { + source_config?: { + /** + * @description __Query Expression__: Set the Mongo query expression to be executed during each ingestion cycle. See https://www.mongodb.com/docs/compass/current/query/filter/ for tips on constructing a valid query. + * + * Example: `{$query: {name: 'John'}, $orderby: {name: -1}}`. + * + * __Applicable and required__ if `mode` is `query`. + */ + "mongo.query.expression"?: string; + }; + }; + mysql_data_source: { + source_type: "mysql"; + } & Omit & components["schemas"]["WithCDCSupport"]; + netsuite_jdbc_data_source: { + source_type: "netsuite_jdbc"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + rest_data_source: { + source_type: "rest"; + } & Omit & ({ + source_config?: { + /** + * @description With Nexla you can configure chains of API calls to fetch data from APIs before records land in the Nexset. Each step in the chain can be configured to fetch multiple pages of data based on the iteration type. Each step is an object in this array. + * + * Typically, you want to fetch multiple pages of data from a REST API. This is done by configuring one object in this array and setting an appropriate `iteration.type` in that object. + * + * But if you want to fetch data from multiple APIs in a chain, configure multiple objects in this array. Assign a unique `key` to each object, and then you can reference the results of each step in subsequent steps using the `.` syntax. + */ + "rest.iterations": ({ + /** @description __Step Key__: Unique key for this iteration step. This can be any string that is unique within all rest iterations steps for this source. The key is used for referencing results from a step in subsequent steps. We typically use `step_` as the key. */ + key: string; + /** + * @description __API URL__: The API URL to fetch data from. You can use Nexla macros in the URL: + * • Date/Time macros: Use `now` with optional offsets (e.g. `now-1`) + * • Response macros: Reference data from previous steps using `step_.` syntax + * • User-defined variables: Use `{variable_name=default_value}` syntax to define variables that can be set at runtime + * + * Examples: + * • `https://api.example.com/data?date=${now-1}` + * • `https://api.example.com/users?token=${step_1.token}` + * • `https://api.example.com/orders?status={status=pending}&limit={limit=100}` + */ + "url.template": string; + /** + * @description __HTTP Method__: HTTP method for the request. + * • GET: Retrieve data from the server + * • POST: Submit data to be processed + * • PUT: Update or replace existing data + * + * @enum {string} + */ + method: "GET" | "POST" | "PUT"; + /** + * @description __Date Format__: The format of the date to be used in the API URL. You can use Nexla macros in the date format: + * • Date/Time macros: Use `now` with optional offsets (e.g. `now-1`) + * • Response macros: Reference data from previous steps using `step_.` syntax + * • User-defined variables: Use `{variable_name=default_value}` syntax to define variables that can be set at runtime + * + * Examples: + * • `%Y-%m-%d` + * • `%Y-%m-%d %H:%M:%S` + * • `%Y-%m-%d %H:%M:%S %z` + * + * @enum {string} + */ + "date.format"?: "yyyy-MM-dd" | "MM/dd" | "yy" | "yyyy" | "MM" | "MMM" | "MMMM" | "hh" | "hh:mm" | "HH" | "HH:mm" | "DD" | "eeee" | "ssss" | "yyyy-MM-dd'T'HH:mm:ss'Z'"; + /** + * @description __Time Unit for Operations__: The unit of date or time for performing mathematical operations on Nexla date-time macro. + * + * @enum {string} + */ + "date.time.unit"?: "yyyy" | "MM" | "dd" | "HH" | "mm" | "ss"; + /** + * @description __Request Body__: Enter the payload for this request. + * + * **For REST APIs:** + * 1. Enter a valid JSON object + * 2. You can use Nexla macros in the payload + * + * **For GraphQL APIs:** + * 1. If using GraphQL pagination (page or cursor), enter your GraphQL query directly + * 2. For other GraphQL requests that you wish to send as a standard REST API POST request, wrap your query in a JSON object with `"query"` field + * + * **Example REST payload:** + * `{ "user_id": 123 }` + * + * **Example GraphQL with pagination:** + * `query {\n characters(page: $page) {\n results {\n name\n species\n }\n }\n}` + * + * **Example GraphQL without pagination:** + * `{ "query": "query { users { id name } }" }` + */ + "body.template"?: string; + /** + * @description __Response Format__: Select the format of the response from the API. This is used to parse the response from the API into a format that can be used by Nexla. + * + * @enum {string} + */ + "response.format"?: "xml" | "json"; + /** + * @description __Response Data Path__: Specify path to data in API response. + * You can choose which part of the API response should be treated as relevant data by Nexla. For ex: when pulling a list of items from a REST API endpoint, the API will usually return an array of records along with some metadata. You can configure this field such that Nexla can treat each element of that array as a record. + * For JSON API response, please specify the JSON Path to point to the object or array you want Nexla to consider as relevant data, and for XML API use XPATH. + * For ex: JSONPath could be $.data[*] if the response has an array named data at the top level. Select one of the smart tokens generated when you test the API, or try out JSON Path examples at https://jsonpath.com + */ + "response.data.path"?: string; + /** + * @description __Response Data Path Additional__: Specify path to metadata in response (Optional). + * By choosing Path to Data in Response above you had defined which part of the API response should be treated by Nexla as a record. But sometimes there can be API response data outside that path to data that you also wish to include in each record. Set this property if you wish to include that common metadata in each record. + * For JSON API response, please specify the JSON Path to point to the object or array you want Nexla to consider as relevant data, and for XML API use XPATH. Try out JSON Path examples at https://jsonpath.com + */ + "response.data.path.additional"?: string; + /** @description __Request Headers__: Add any optional request headers that must be sent as part of this request. You do not need to repeat headers already present in credentials. Please input as comma-separated values e.g `header1:value1,header2:value2`. */ + "request.headers"?: string; + /** @description __Lookup ID__: You can use values of a column in a Nexla lookup as a macro for API URL. Select the lookup you wish to use for generating API macros. */ + "map.id"?: string; + /** + * @description __Type of Iteration__: Type of iteration to use for fetching data. Set instructions for how the connector should iterate requests over multiple pages in this step before moving on to the next step. + * • static.url: No Iteration + * • paging.incrementing: Page Number + * • paging.incrementing.offset: Offset + * • paging.next.token: Next Token In Response + * • paging.next.url: Next URL in Response + * • link.header: Next URL in Response Header + * • response.id.number: ID (number) in Response + * • response.id.string: ID (String) In Response + * • graphql.page: GraphQL Page + * • graphql.cursor: GraphQL Cursor + * • data.map.key.queue: Data Map Iteration + * • async.poll: Asynchronous Job Status API + * • body.as.file: Process API Response as File + * + * @enum {string} + */ + "iteration.type": "paging.incrementing" | "paging.incrementing.offset" | "paging.next.token" | "paging.next.url" | "link.header" | "response.id.number" | "response.id.string" | "graphql.page" | "graphql.cursor" | "data.map.key.queue" | "async.poll" | "body.as.file" | "static.url"; + /** @description __URL parameter for ID / Token / Page__: Enter the URL parameter name that is used to set the id / page number / token for this type of iteration. */ + "param.id"?: string; + /** @description __Start From Page__: Set the starting value for the URL page number parameter. Nexla will automatically iterate through subsequent pages of data till there is no more data to be fetched. */ + "start.page.from"?: number; + /** @description __(Optional) Stop After Page No__: You can configure the platform to stop fetching after receiving a specific page number. Leave blank to fetch all relevant data. */ + "end.page.to"?: number; + /** + * @description __URL Param for Items per page__: Enter the URL parameter name that is used to set the the number of items that are returned in each page. + * API developers usually set the default number of items returned to a low number, but for data ingestion we recommend setting the property to maximum allowed by the API you are trying to access. + */ + "param.page.size"?: string; + /** @description __Items Per Page__: Set the value for number of items to be fetched in page of data. */ + "page.expected.rows"?: number; + /** @description __Response Header containing Next Page URL__: Set the name of the response header that contains the next page URL. Often API developers send this data in the `Link` header. */ + link?: string; + /** @description __URL Param For Offset__: Enter the URL parameter name that is used to set the offset for this type of iteration. */ + "param.offset"?: string; + /** @description __Start From Offset__: Set the starting value of offset that should be used for the first request. */ + "start.offset.from"?: number; + /** @description __(Optional) Stop After Offset__: You can configure the platform to stop fetching after receiving a specific offset. Leave blank to fetch all relevant data. */ + "end.offset.to"?: number; + /** + * @description __Parallel Requests__: You can configure multiple requests to be executed in parallel by changing this number. + * __Recommendation__: A high parallelism count can result in being flagged for rate-limiting by the API vendor, so usually you should leave this to the default setting of 1. Modify this setting only if the source is taking longer to iterate through all pages than is acceptable for your use case. + */ + "request.parallelism.count"?: number; + /** @description __Path to next token__: Choose which part of the API response should be treated as relevant token for the next request. This must be a valid JSON Path (for JSON responses) or XPath (for XML responses). */ + "response.next.token.data.path"?: string; + /** @description __(Optional) Stop After Token__: You can configure the platform to stop fetching after receiving a specific token. Leave blank to fetch all relevant data. */ + "end.token.to"?: string; + /** @description __Path to next URL__: Choose which part of the API response should be treated as relevant urls for the next request. This must be a valid JSON Path (for JSON responses) or XPath (for XML responses). */ + "response.next.url.data.path"?: string; + /** @description __(Optional) Stop After URL__: You can configure the platform to stop fetching after receiving a specific URL. Leave blank to fetch all relevant data. */ + "end.url.to"?: string; + /** @description __Start ID from__: Set the starting value of ID that should be used for the first request. */ + "start.id.from"?: string; + /** @description __(Optional) Stop After ID__: You can configure the platform to stop fetching after receiving a specific ID. Leave blank to fetch all relevant data. */ + "end.id.to"?: number; + /** @description __ID Name__: Specify Path to ID in API response. Choose which part of the API response should be treated as relevant IDs for the next request. This must be a valid JSON Path (for JSON responses) or XPath (for XML responses). */ + "response.id.field.name"?: string; + /** @description __Next ID Inclusive?__: Set this to true if the highest ID in current response should be used as the starting ID for fetching next page of data. */ + "param.id.inclusive"?: boolean; + /** + * @description __Page Variable Name__: Enter the name of the variable used for pagination in your GraphQL query. This variable will be automatically incremented in subsequent requests until empty results are returned. + * __Applicable__ if `iteration.type` is `graphql.page`. + */ + "graphql.page.name"?: string; + /** + * @description __Starting Page Number__: Enter the initial page number to start fetching data from. This number will be used as the first value for your page variable and will be incremented in subsequent requests. + * __Applicable__ if `iteration.type` is `graphql.page`. + */ + "graphql.page.start"?: string; + /** + * @description __Cursor Variable Name__: Enter the name of the variable used for cursor-based pagination in your GraphQL query. This variable will be updated with the cursor value from each response to fetch the next page of results. + * + * Example: If your query uses `$cursor`, enter `"cursor"` + * __Applicable__ if `iteration.type` is `graphql.cursor`. + */ + "graphql.cursor.name"?: string; + /** + * @description __Initial Cursor Source__: Advanced: Only needed for specific APIs like Monday.com where the initial cursor query has a different structure than subsequent queries. + * + * Most GraphQL APIs don't need this - only use if your API requires: + * 1. A different query structure to get the first cursor + * 2. A different query structure for subsequent pages + * + * Format: `stepKey.path` (e.g., `step1.cursor`) + * + * Example: Monday.com needs this because: + * - First query: `query { boards { items_page { cursor... } } }` + * - Later queries: `query { next_items_page(cursor: $cursor) { ... } }` + * + * __Applicable__ if `iteration.type` is `graphql.cursor`. + */ + "graphql.cursor.template"?: string; + /** + * @description __Path to Next Cursor__: Specify where to find the next cursor value in the GraphQL response. The cursor from this path will be used in the next request. Iteration stops when a blank cursor is returned. + * + * Example: For a response like `{ data: { products: { edges: [{ cursor: "abc" }] } } }`, use `$.data.products.edges[-1:].cursor` + * + * __Applicable__ if `iteration.type` is `graphql.cursor`. + */ + "response.graphql.cursor.path"?: string; + /** @description __Job Status Param__: Choose which part of the API response should be treated as relevant property for evaluating the status of the asynchronous job. This must be a valid JSON Path (for JSON responses) or XPath (for XML responses). */ + "async.iteration.path"?: string; + /** @description __Job Completion Value__: Set the value of Job Status Param that would indicate completion of the asynchronous job. */ + "async.iteration.value"?: string; + /** + * @description __Data Format of Response File__: When an API endpoint returns a file in response (or response body is plain text that should be parsed as a file), the platform can be configured to parse the response content accordingly. + * + * @enum {string} + */ + "file.response.format"?: "csv" | "json" | "txt" | "xml"; + /** + * @description __Include Previous Step Results__: When enabled, this setting preserves results from all steps in a multi-step REST workflow, rather than only keeping the final step's results. + * + * For example, in a two-step workflow: + * Step 1 returns: `{ "data": 1, "cursor": "abc123" }` + * Step 2 returns: `{ "data": 2, "cursor": null }` + * + * With this enabled, the final output includes both: + * `[ { "data": 1, "cursor": "abc123" }, { "data": 2, "cursor": null } ]` + * + * Without this (default), only the last step's results are kept: + * `[ { "data": 2, "cursor": null } ]` + * + * This is particularly useful for: + * • API calls that require initial requests to get pagination tokens + * • Multi-step workflows where early responses contain important metadata + * • Cases where discarding intermediate results would lose valuable information + * + * __Applicable__ if `iteration.type` is `paging.incrementing`, `paging.incrementing.offset`, `paging.next.token`, `paging.next.url`, `link.header`, `response.id.number`, `response.id.string`, `graphql.page`, `graphql.cursor`, `data.map.key.queue`, `async.poll`, `body.as.file`, `static.url`. + * Default value: `false`. + */ + "results.pass.through"?: boolean; + })[]; + /** + * @description __Fetch Data__: The interval at which Nexla should scan this source for new data. This must be in the form of a cron expression. + * Example: `0 0 22 10 11 ? 2022`. + */ + "start.cron"?: string; + }; + }); + nexla_monitor_data_source: components["schemas"]["rest_data_source"]; + nexla_rest_data_source: { + source_type: "nexla_rest"; + } & Omit & ({ + source_config?: { + /** + * @description __Webhook Authorization Type__: Webhooks are usually authorized via a Nexla API key. + * + * An additional layer of authorization can be enabled via Hash-Based Message Authentication Code(HMAC). Some third parties send a hash along with the payload. In such cases, Nexla can validate that the incoming call was generated by the authorized partner. Set the HMAC properties that will be used by Nexla for validation against the incoming hash. + * + * Default Value: `Nexla API Key` + * + * @enum {string} + */ + "webhook.auth_type": "Nexla API Key" | "Payload Hash"; + /** + * @description Set this to `true` if you want to enforce the detection of only one Nexset. + * + * Set this to `false` to instruct the platform to monitor each call to the webhook for changes in the Nexset schema. + * + * Default value: `true` + */ + "schema.detection.once": boolean; + /** + * @description Set to `true` to support incoming webhook authentication via the payload hash. + * + * Default value: `false` + */ + "hash.enabled": boolean; + /** + * @description __Hashing Algorithm__: The hash algorithm used to generate the hash during payload hashing. + * + * __Required__ if `hash.enabled` is `true` + * + * @enum {string} + */ + hash_algorithm?: "HmacSHA1" | "HmacSHA256" | "HmacSHA384" | "HmacSHA512" | "HmacMD5"; + /** + * @description Parameter/header name in the incoming request containing the hash. + * + * __Required__ if `hash.enabled` is `true` + */ + hash_request_param?: string; + /** + * @description The hash secret key for payload hashing. + * + * __Required__ if `hash.enabled` is `true` + */ + hash_secret_key?: string; + }; + }); + oracle_data_source: { + source_type: "oracle"; + } & Omit & components["schemas"]["WithCDCSupport"]; + oracle_autonomous_data_source: { + source_type: "oracle_autonomous"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + vector_db_data_source: { + source_config?: { + /** + * @description __Scheduling Frequency__: The interval at which Nexla should scan this source for new data. This must be in the form of a cron expression. + * + * @example 0 0 22 10 11 ? 2022 + */ + "start.cron": string; + }; + }; + pinecone_data_source: { + source_type: "pinecone"; + } & Omit & components["schemas"]["vector_db_data_source"] & ({ + source_config?: { + /** @description __Index__: Specify the name of the index to be queried within the selected Pinecone database. */ + database: string; + /** + * @description __Namespace__: Set the namespace to query within the selected Pinecone database. + * You can use the default namespace or specify a custom namespace. + * + * @default + */ + collection?: string; + /** + * @description __Query Type__: Define the type of operation to perform, such as fetching vectors, + * performing similarity search, or retrieving vectors by ID. + * - `fetch_vectors` (Fetch Vectors): Retrieve vectors from the Pinecone database. + * - `similarity_search` (Similarity Search): Retrieve similar vectors to a given vector. + * - `fetch_ids` (Fetch IDs): Retrieve vectors by their unique identifiers. + * + * @default similarity_search + * @enum {string} + */ + query_type: "fetch_vectors" | "similarity_search" | "fetch_ids"; + /** + * @description __Top K Similar Vectors__: Specify the number of top similar vectors to retrieve. + * Applicable when `query_type` is `similarity_search`. + * + * @default 20 + */ + topK?: string; + /** + * @description __Search Filter__: Specify a filter to refine which vectors are retrieved from the database. + * Applicable when `query_type` is `similarity_search`. + * + * @default + */ + "pinecone.filter"?: string; + /** + * @description __Search By Criteria__: Specify the type of search to perform for similarity search. + * - `dense_vector` (Dense Vector): Provide a dense vector for similarity search. + * - `vector_id` (Vector Identifier): Provide the unique identifier of the vector to fetch. + * + * Applicable when `query_type` is `similarity_search`. + * + * @default dense_vector + * @enum {string} + */ + search_by?: "dense_vector" | "vector_id"; + /** + * @description __Dense Vector__: Provide the dense vector for similarity search. + * + * Required when `query_type` is `similarity_search` and `search_by` is `dense_vector`. + */ + dense_vector?: string; + /** + * @description __Sparse Vector Indices__: List the indices of non-zero values in the sparse vector for similarity search. + * + * Applicable when `query_type` is `similarity_search` and `search_by` is `dense_vector`. + */ + sparse_vector_indices?: string; + /** + * @description __Sparse Vector Values__: Provide the values corresponding to the indices of the sparse vector for similarity search. + * + * Applicable when `query_type` is `similarity_search` and `search_by` is `dense_vector`. + */ + sparse_vector_values?: string; + /** + * @description __Vector Identifier__: Unique identifier of the vector to fetch. + * + * This field is required when using the `similarity_search` query type and `search_by` is `vector_id`. + */ + vector_id?: string; + /** + * @description __Pinecone Prefix__: Specify a prefix to refine which vectors are retrieved from the database. + * + * Applicable when `query_type` is `similarity_search` or `fetch_vectors`. + */ + "pinecone.prefix"?: string; + /** + * @description __Include Values?__: Set this field to `true` to include the values of the vectors in the output. + * Applicable when `query_type` is `similarity_search`. + * + * @default true + */ + "pinecone.includeValues"?: boolean; + /** + * @description __Include Metadata?__: Set this field to `true` to include the metadata of the vectors in the output. + * Applicable when `query_type` is `similarity_search`. + * + * @default true + */ + "pinecone.includeMetadata"?: boolean; + }; + }); + postgres_data_source: { + source_type: "postgres"; + } & Omit & components["schemas"]["WithCDCSupport"]; + redshift_data_source: { + source_type: "redshift"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + s3_data_source: { + source_type: "s3"; + } & Omit & components["schemas"]["file_data_source"]; + s3_iceberg_data_source: { + source_type: "s3_iceberg"; + } & Omit & ({ + source_config?: { + /** + * @description __Check for data__: The interval at which Nexla should scan this source for new data. This must be in the form of a cron expression. + * + * @example 0 0 22 10 11 ? 2022 + */ + "start.cron": string; + /** + * @description __Warehouse Directory__: Path to the directory containing the Iceberg table. + * Nexla uses the path-based Hadoop catalog to discover tables in S3. So if your Iceberg table `sales` is in `s3://my-nexla-bucket/product/sales`, then set this property to `my-nexla-bucket`. + */ + "iceberg.warehouse.dir": string; + /** + * @description __Table Name__: Name of the table from which data will be read. + * Nexla uses the path-based Hadoop catalog to discover tables in S3. So if your Iceberg table `sales` is in `s3://my-nexla-bucket/product/sales`, then set this property to `product.sales` to read from the table. + */ + "iceberg.table.name": string; + /** + * @description __Table Scan Modes__: Select the mode to query the table using Apache Iceberg time travel features. + * The default value can be used to skip timetravel features. The other modes can be used to query table as-of a branch, tag, timestamp, snapshot or read appended data between two snapshots. + * + * * `none` - (Do not use time travel features) Select this mode if you do not want to use any time travel features and want to read all data from the table. + * * `branch` - (Branch) Select this mode if you want to query the table as of a branch. + * * `tag` - (Tag) Select this mode if you want to query the table as of a tag. + * * `timestamp` - (Timestamp) Select this mode if you want to query the table as of a timestamp. + * * `snapshot` - (Snapshot) Select this mode if you want to query the table as of a snapshot. + * * `incremental` - (Incremental) Select this mode if you want to read appended data between two snapshots. + * + * @enum {string} + */ + "iceberg.timetravel.modes": "none" | "branch" | "tag" | "timestamp" | "snapshot" | "incremental"; + /** + * @description __Branch to Query__: Branch to query using Apache Iceberg time travel features. Not compatible with time travel by timestamp. + * + * __Applicable and required__ if `iceberg.timetravel.modes` is set to `branch`. + */ + "iceberg.timetravel.as.of.branch"?: string; + /** + * @description __Tag to Query__: Tag to query using Apache Iceberg time travel features. Not compatible with time travel by timestamp. + * + * __Applicable and required__ if `iceberg.timetravel.modes` is set to `tag`. + */ + "iceberg.timetravel.as.of.tag"?: string; + /** + * @description __Snapshot ID to Query__: Snapshot ID to query using Apache Iceberg time travel features. + * + * __Applicable and required__ if `iceberg.timetravel.modes` is set to `snapshot`. + */ + "iceberg.timetravel.as.of.snapshot"?: string; + /** + * @description __Timestamp to Query__: Timestamp to query using Apache Iceberg time travel features. Supported formats: `YYYY-MM-DD HH:MM:SS` or Unix timestamp in seconds. + * + * __Applicable and required__ if `iceberg.timetravel.modes` is set to `timestamp`. + */ + "iceberg.timetravel.as.of.timestamp"?: string; + /** + * @description __Snapshot ID to start reading from__: Read appended data between two snapshots with the incremental read feature. This is the starting snapshot ID and is required for this feature. Incremental reads are not compatible with other time travel features nor with upserts and deletes. + * + * __Applicable and required__ if `iceberg.timetravel.modes` is set to `incremental`. + */ + "iceberg.incremental.start-snapshot-id"?: string; + /** + * @description __Snapshot ID to end reading at__: Read appended data between two snapshots with the incremental read feature. This is the ending snapshot ID and is optional, if the starting snapshot ID is specified but the ending snapshot ID is not provided will read all subsequent data. Incremental reads are not compatible with other time travel features nor with upserts and deletes. + * + * __Applicable__ if `iceberg.timetravel.modes` is set to `incremental`. + */ + "iceberg.incremental.end-snapshot-id"?: string; + }; + }); + sharepoint_data_source: { + source_type: "sharepoint"; + } & Omit & components["schemas"]["file_data_source"]; + snowflake_data_source: { + source_type: "snowflake"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + soap_data_source: { + source_type: "soap"; + } & Omit & { + source_config?: { + /** + * @description __Scheduling Frequency__: The interval at which Nexla should scan this source for new data. This must in the form of a cron expression. + * + * @example 0 0 22 10 11 ? 2022 + */ + "start.cron": string; + /** @description __WSDL URL__: Enter the WSDL URL of the Soap service to which you wish to connect. */ + "soap.wsdl.url": string; + /** @description __SOAP Service Name__: Enter the name of the SOAP Service that you wish to access. */ + "soap.service": string; + /** + * @description __SOAP Binding Name__: Enter the name of the SOAP binding element. + * + * The SOAP binding element defines the message format and protocol details for each port. The binding element has two attributes: the name attribute and the type attribute. + */ + "soap.binding": string; + /** @description __SOAP Operation Name__: Select the operation for the binding that you wish to access. */ + "soap.operation": string; + /** + * @description __SOAP Binding Port Type__: Enter the port type of the SOAP binding element. + * + * The SOAP binding element defines the message format and protocol details for each port. The binding element has two attributes: the name attribute and the type attribute. + */ + "soap.port.type": string; + /** @description __SOAP Service Port__: Select the Service Port that you wish to access. */ + "soap.service.port": string; + /** + * @description __Operation Parameters and Values__: Enter a dictionary of operation parameters and relevant operation parameter values for this source. + * + * example: + * ``` + * { + * "TrackRequest/ClientDetail/AccountNumber": "12345", + * "TrackRequest/TransactionDetail/Localization/LanguageCode": "EN" + * } + * ``` + */ + "soap.params": string; + }; + }; + sqlserver_data_source: { + source_type: "sqlserver"; + } & Omit & components["schemas"]["WithCDCSupport"]; + sybase_data_source: { + source_type: "sybase"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + teradata_data_source: { + source_type: "teradata"; + } & Omit & components["schemas"]["WithoutCDCSupport"]; + tibco_data_source: { + source_type: "tibco"; + } & Omit & components["schemas"]["stream_data_source"] & ({ + source_config?: { + /** + * @description Select whether the source is a `topic` or `queue`. + * + * @enum {string} + */ + "source.type": "topic" | "queue"; + /** @description __Topic or Queue Name__: Configure the topic or queue from which data needs to be ingested. */ + "source.name": string; + }; + }); + webdav_data_source: { + source_type: "webdav"; + } & Omit & components["schemas"]["file_data_source"]; + data_set_schema: { + properties?: { + [key: string]: string; + }; + /** Format: url */ + $schema?: string; + "$schema-id"?: number; + }; + UserSimplified: { + id?: number; + full_name?: string; + email?: string; + /** Format: date-time */ + email_verified_at?: string; + }; + OrgSimplified: { + id?: number; + name?: string; + email_domain?: string; + email?: string; + client_identifier?: string; + org_webhook_host?: string; + }; + DataSource: { + /** Format: int32 */ + id?: number; + /** Format: int32 */ + owner_id?: number; + /** Format: int32 */ + org_id?: number; + name?: string; + /** Format: nullable */ + description?: string; + status?: string; + source_type?: string; + connector?: { + /** Format: int32 */ + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + /** Format: nullable */ + vendor_id?: string; + }; + DataSetBrief: { + id?: number; + owner_id?: number; + org_id?: number; + name?: string; + description?: string; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + }; + DataSinkSimplified: { + /** Format: int32 */ + id?: number; + /** Format: int32 */ + owner_id?: number; + /** Format: int32 */ + org_id?: number; + name?: string; + status?: string; + sink_type?: string; + }; + DataSet: { + /** Format: int32 */ + id?: number; + owner?: components["schemas"]["UserSimplified"]; + org?: components["schemas"]["OrgSimplified"]; + name?: string; + description?: string; + status?: string; + /** @description Id of the data source this Nexset was detected from. This is only relevant for detected Nexsets. */ + data_source_id?: number | null; + /** @description Details about the data source this Nexset was detected from. This is only relevant for detected Nexsets. */ + data_source?: components["schemas"]["DataSource"]; + /** @description Details about the Nexset this Nexset is derived from. This element is empty in case of detected Nexsets and has 1 item in case of derived Nexsets. */ + parent_data_sets?: components["schemas"]["DataSetBrief"][]; + /** @description Details about every data sink this Nexset is directly connected to. */ + data_sinks?: components["schemas"]["DataSinkSimplified"][]; + access_roles?: components["schemas"]["AccessRoles"]; + /** + * @description The ID of the Transform entity that has been used to create the output of this Nexset from the parent Nexset. + * + * This element is null for detected Nexsets. + */ + transform_id?: number | null; + /** @description JSON schema of this Nexset's output. */ + output_schema?: Record; + /** + * Format: nullable + * @description Reference ID of the Nexset that this Nexset was created as a copy of. This is only valid if the Nexset was created by issuing a request to the `copy` endpoint of another Nexset. + */ + copied_from_id?: number; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + tags?: string[]; + /** @description The flow type of the origin node. */ + flow_type?: string; + }; + DataSetMutable: OneOf<[{ + /** + * @description Set this to `false` to indicate that the payload contains the `transform` code that should be used for creating the output schema. + * + * @enum {boolean} + */ + has_custom_transform?: false; + /** @description Transform code that should be applied on the parent Nexset. */ + transform?: Record; + }, { + /** + * @description Set this to `true` to indicate that the payload contains the `transform_id` of a transform that should be applied for creating the output schema. + * + * @enum {boolean} + */ + has_custom_transform?: true; + /** @description Id of a reusable record transform that should be applied on the parent Nexset. */ + transform_id?: number; + }]> & ({ + name?: string; + description?: string; + /** @description Nexset ID of the parent Nexset on which all the rules should be applied for creating this Nexset. */ + parent_data_set_id?: number; + /** + * @description You can document details about important Nexset attributes by annotating them with short descriptions. + * + * Set descriptions for all such attributes by adding them to this object. Attributes should be added in standard JSON schema path format. You can refer to Nexset's output schema structure for guidance. + */ + output_schema_annotations?: { + properties?: { + [key: string]: { + /** @description Set a short description for this attribute. */ + description?: string; + }; + }; + }; + /** + * @description Set this to true and attach the relevant `output_validation_schema` if you want all records to be validated against some JSON schema validation rules. + * + * Records that fail validation will be routed to the Nexset error queue instead of the Nexset output. + */ + output_schema_validation_enabled?: boolean; + /** + * @description You can set JSON Schema validation rules that should be applied on every record of this Nexset. + * + * See https://json-schema.org/learn/getting-started-step-by-step.html for guidance on writing JSON schema validation rules. + */ + output_validation_schema?: { + [key: string]: Record | string; + }; + /** @description You can choose to attach one or more data sinks to this Nexset. Each data sink will receive the output of this Nexset when the Nexset generates output records. */ + data_sinks?: (number | Record)[]; + /** @description `Reserved for Nexla UI`: This field is used by Nexla UI for display instructions when displaying this Nexset on the UI. This entry has no impact on how the Nexla data plane processes data for this Nexset. */ + custom_config?: Record; + tags?: string[]; + }); + DataSetCreate: components["schemas"]["DataSetMutable"]; + data_sink: { + name?: string; + description?: string; + /** @description __Credential ID__: Nexla data credential that contains all authentication information for this destination. */ + data_credentials_id?: number; + /** @description __Nexset ID__: Set the Nexset ID whose output records will be written out to this destination. */ + data_set_id?: number; + /** @description __Connector Type__: Connector codename. */ + sink_type?: string; + }; + database_data_sink: { + /** + * @description __Create Table In Destination__ If the desired table doesn't exist in your database, you can instruct Nexla to create a table when this destination is first activated. + * + * Default Value: `false` + */ + create_destination?: boolean; + sink_config?: { + /** @description __Database__: If the Destination credentials allow access to multiple databases, specify the database to which the destination table belongs. This is only needed if the `data_credentials` entry for this destination is not limited to one database. */ + database?: string; + /** @description __Table__: Set the table to which you wish to push Nexset records. */ + table: string; + /** + * @description __Table Update Mode__: Select whether records should be inserted or upserted into the database. + * + * @enum {string} + */ + "insert.mode": "INSERT" | "UPSERT"; + /** + * @description __Primary Key Columns__: Set all columns that should be set as primary keys of the table. For multiple columns, enter a comma-separated list of column names. + * + * __Required__ if `insert.mode` is set to `UPSERT` + */ + "primary.key"?: string; + /** @description __Mapping__: Set rules for how Nexset record attributes should be written into Database Columns. */ + mapping: { + /** + * @description Most databases require manual mapping of attributes into columns. With manual mapping, you can set a single attribute to be written into multiple columns. Additionally, you can specify the desired data format for each column. + * + * + * * `auto`: Automatically map attributes to database columns. Column data types will be inferred from record values, and the nesting of attributes will be preserved. __Only available for select warehouses and databases.__ + * + * * `manual`: Explicitly define attribute mapping and database columns. + * + * @enum {string} + */ + mode: "manual"; + /** + * @description __Attribute to Database Column Mapping__: Define how attributes should be mapped to database columns. + * + * __Required__ if `mapping.mode` is `manual` + * + * __Object Definition Rules__: + * + * 1. Each Nexset record attribute that needs to be written to one or more columns should be listed as a property of this `mapping` object. In the example, the properties are `nexset_attr_1` and `nexset_attr_2`. + * + * 2. Each database column to which the attribute needs to be written is a property of the attribute object above. In the example, `nexset_attr_1` is set to write to columns `db_col_1` and `db_col_2`. + * + * 3. Each database column property has a value that defines the desired data format allowed by the database. Here, data written to `db_col_1` will be written as `TEXT`. + * + * __Example__ + * ``` + * { + * "nexset_attr_1": + * { + * "db_col_1": "TEXT", + * "db_col_2": "TEXT" + * }, + * "nexset_attr_2": + * + * { + * "db_col_3": "FLOAT64" + * } + * } + * ``` + */ + mapping?: { + [key: string]: { + [key: string]: string; + }; + }; + /** + * @description __Tracker Mode__: Each record that flows through Nexla has an associated unique tracker ID. Set this to `RECORD` to configure the tracker ID to be written out to a database column along with the Nexset record. + * + * * `NONE`: The tracker ID won't be written to the database. + * * `RECORD`: The short form tracker ID containing all required lineage information will be written out in the relevant column. + * + * @enum {string} + */ + tracker_mode: "NONE" | "RECORD"; + /** + * @description __Column Name for Nexla Tracker__: Name of the column used for the Nexla record tracker information. + * + * __Applicable and Required__ if `tracker.mode` is set to `RECORD` + */ + tracker_name?: string; + }; + /** + * @description __Allow column updates with nulls__: Set as false to allow partial upsert of a record with only non-null values. + * + * Only valid if `insert.mode` is `UPSERT` + * + * Default Value: `true` + */ + "upsert.nulls"?: boolean; + }; + }; + as400_data_sink: { + sink_type: "as400"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "TINYINT" | "SMALLINT" | "MEDIUMINT" | "INT" | "BIGINT" | "DECIMAL" | "FLOAT" | "DOUBLE" | "BIT" | "BOOLEAN" | "BOOL" | "CHAR" | "VARCHAR(4096)" | "BINARY" | "VARBINARY(65535)" | "TINYBLOB" | "BLOB" | "MEDIUMBLOB" | "LONGBLOB" | "TINYTEXT" | "TEXT" | "MEDIUMTEXT" | "LONGTEXT" | "ENUM" | "SET" | "DATE" | "DATETIME" | "TIMESTAMP" | "YEAR"; + }; + }; + }; + }; + }); + file_data_sink: { + sink_config?: { + /** @description __Path to Write__: Set the path to which you want to write all files. */ + path: string; + /** + * @description __Subfolder Path Format__: You can configure the platform to automatically create subfolders and partition files into those subfolders. + * + * Use Nexla system macros like `{YYYY}`, `{MM}`, etc. to create date-time subfolders. You can also split folders by values of an attribute in the Nexset by using the macro `{record.}`. + * + * Default Value: `{YYYY}/{MM}/{dd}` + */ + "output.dir.name.pattern"?: string; + /** + * @description __Custom File Name Prefix__: Generated file names are in the format --. Set this property to define the prefix of each file. + * + * You can use Nexla system macros like `{YYYY}`, `{MM}`, etc. to create date-time patterns. + */ + "file.name.prefix"?: string; + /** + * @description __Maximum File Size (in MB)__: The maximum size (MB) of each generated file. Data will automatically be partitioned into multiple files. + * + * Default Value: `4096` + */ + "max.file.size.mb"?: number; + /** + * @description __File Format__: Set the output format of the generated files. + * + * * `csv`: Files where column values are separated by commas. + * * `tsv`: Files where column values are separated by tabs. + * * `json`: Files where each row is a valid JSON object. Note that this is a JSON-line file with a `.json` extension. + * * `xlsx`: Excel files. + * * `edi`: EDI files. + * * `avro`: AVRO files. + * * `parquet`: Parquet files. + * * `orc`: ORC files. + * * `fw`: Fixed width files where each column has a fixed width. + * + * @enum {string} + */ + data_format: "csv" | "tsv" | "json" | "xml" | "xlsx" | "edi" | "avro" | "parquet" | "orc" | "fw"; + /** + * @description __Parent XML Tag__: Data from each record is wrapped around a parent XML tag when writing to XML files. Set the wrapper tag that should be used. The default wrapper tag is `root`. + * + * Only valid if `data_format` is `orc` + * + * Default Value: `root` + */ + "xml.root"?: string; + /** + * @description __ORC Compression Type__: Select the type of compression algorithm that should be used to compress file contents into ORC files. + * + * Only valid if `data_format` is `orc` + * + * Default Value: `zlib` + * + * @enum {string} + */ + "orc.compress"?: "none" | "zlib" | "snappy" | "lzo" | "lz4"; + /** + * @description __Write Attribute Names As Header?__: Set this option if you want the attribute names to be added as a header row on each file. + * + * Only applicable if `data_format` is `fw`. + * + * Default Value: `true` + */ + "write.header"?: boolean; + /** + * @description __Length of Each Field in File__: Enter the length of each field in the file. This should be entered as a comma-separated list of numbers, for ex: `10,12,8,15,20`. + * + * Only applicable if `data_format` is `fw`. + */ + "field.lengths"?: string; + /** + * @description __Padding Character__: Choose or type the character that should be used to separate fields. For ex: `_`. + * + * Only applicable if `data_format` is `fw`. + * Default: single space ` ` + * + * Common Values: + * * ` `: Single Space + * * `` : No Padding + * * `,`: Comma + * * `\\t`: Tab + * * `\\n`: Newline + * * `;`: Semicolon + * * `|`: Pipe + * * `^`: Caret + */ + "padding.character"?: string; + /** + * @description __Line Separation Character__: Specify the character that should be recognized by the platform as an indicator of a new line. + * + * Only applicable if `data_format` is `fw`. + * + * Default: `\\n` + * + * Common Values: + * * `\\t`: Tab + * * `\\n`: Newline + */ + "line.separator"?: string; + /** @description __Mapping__: Set rules for how Nexset record attributes should be written into the generated files. */ + mapping?: { + /** + * @description Based on the desired `data_format`, Nexla automatically converts Nexset records into appropriate contents in output files. + * + * However, for some `data_formats` such as `csv` that support the concept of additional rules like the column order, you can choose to manually assign Nexset attributes to output row mapping. + * + * + * * `auto` : Automatically map Nexset attributes to output files. Recommended for data_formats `json`, `xml`, `edi`, `avro`, `parquet`, and `orc`. + * + * * `manual`: Explicitly define attribute mapping and columns in the output file. Recommended for data_formats `csv`, `tsv`, `xlsx`, and `GOOGLE_SPREADSHEET`. + * + * @enum {string} + */ + mode: "auto" | "manual"; + /** + * @description __Attribute to File Column Mapping__: Define how attributes should be mapped to database columns. + * + * __Required__ if `mapping.mode` is `manual` + * + * __Object Definition Rules__: + * + * Each Nexset record attribute that needs to be written to one or more columns should be listed as a property of this `mapping` object. In the example, the properties are `nexset_attr_1` and `nexset_attr_2`. + * + * __Example__ + * ``` + * { + * "nexset_attr_1": [ + * "file_col_1", + * "file_col_2" + * ], + * "nexset_attr_2": [ + * "file_col_3" + * ] + * } + * ``` + */ + mapping?: { + [key: string]: string[]; + }; + /** + * Format: strings + * @description __Field Order__: The order of the column names in the output file. + * + * __Required__ if `mapping.mode` is `manual` + */ + fields_order?: unknown[]; + /** + * @description __Optional Header Content__: You can choose to add a predefined header in each generated output file. The format of this string should match the format of a row of data in that `data_format`. + * + * For example, if the `data_format` is `xml`, the header would be an XML object like `{now}`, but if the desired value is a JSON string, it would be `"{\"header\":\"{now}\"}`. + * + * Note that this field supports Nexla date-time macros like `{now}`. + */ + header_template?: string; + /** + * @description __Tracker Mode__: Each record that flows through Nexla has an associated unique tracker ID. Set this to `RECORD` to configure the tracker ID to be written out to a column along with the Nexset record. + * + * * `NONE`: The tracker won't be written to the database. + * * `RECORD`: The short-form tracker containing all required lineage information will be written out in the relevant column. + * + * @enum {string} + */ + tracker_mode: "NONE" | "RECORD"; + /** + * @description __Column Name for Nexla Tracker__: The column name for the Nexla record tracker information. + * + * __Applicable and Required__ if `tracker.mode` is set to `RECORD`. + */ + tracker_name?: string; + }; + }; + }; + azure_blb_data_sink: { + sink_type: "azure_blb"; + } & Omit & components["schemas"]["file_data_sink"]; + azure_data_lake_data_sink: { + sink_type: "azure_data_lake"; + } & Omit & components["schemas"]["file_data_sink"]; + azure_synapse_data_sink: { + sink_type: "azure_synapse"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "BIGINT" | "INT" | "SMALLINT" | "TINYINT" | "BIT" | "NUMERIC" | "MONEY" | "SMALLMONEY" | "REAL" | "DATETIME" | "SMALLDATETIME" | "CHAR(3000)" | "VARCHAR(100)" | "VARCHAR(3000)" | "VARCHAR(max)" | "TEXT" | "NCHAR" | "NVARCHAR" | "NTEXT" | "BINARY" | "VARBINARY" | "TABLE" | "UNIQUEIDENTIFIER" | "DECIMAL(18, 4)" | "FLOAT(12)"; + }; + }; + }; + }; + }); + bigquery_data_sink: { + sink_type: "bigquery"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + /** @description __Partitioning Column__: You can choose to have the data partitioned by a specific column when it is written to the destination table. Set the partitioning column if you wish to leverage that capability. */ + "partitioning.column"?: string; + /** @description __Clustering Columns__: You can choose to leverage column clustering when data is written to the destination tables. Set this value to a comma-separated list of columns that should be used for clustering. */ + "clustering.columns"?: string; + mapping?: { + /** @enum {string} */ + mode?: "auto" | "manual"; + mapping?: { + [key: string]: { + [key: string]: "BOOLEAN" | "DATE" | "DATETIME" | "TIME" | "TIMESTAMP" | "FLOAT64" | "INT64" | "STRING" | "NUMERIC"; + }; + }; + }; + }; + }); + box_data_sink: { + sink_type: "box"; + } & Omit & components["schemas"]["file_data_sink"]; + cloudsql_mysql_data_sink: { + sink_type: "cloudsql_mysql"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "TINYINT" | "SMALLINT" | "MEDIUMINT" | "INT" | "BIGINT" | "DECIMAL" | "FLOAT" | "DOUBLE" | "BIT" | "BOOLEAN" | "BOOL" | "CHAR" | "VARCHAR(4096)" | "BINARY" | "VARBINARY(65535)" | "TINYBLOB" | "BLOB" | "MEDIUMBLOB" | "LONGBLOB" | "TINYTEXT" | "TEXT" | "MEDIUMTEXT" | "LONGTEXT" | "ENUM" | "SET" | "DATE" | "DATETIME" | "TIMESTAMP" | "YEAR"; + }; + }; + }; + }; + }); + cloudsql_postgres_data_sink: { + sink_type: "cloudsql_postgres"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "BIGINT" | "INT8" | "BIGSERIAL" | "SERIAL8" | "BIT" | "BIT_VARYING" | "VARBIT" | "BOOLEAN" | "BOOL" | "BOX" | "BYTEA" | "CHAR" | "VARCHAR" | "CHARACTER VARYING" | "CIDR" | "CIRCLE" | "DATE" | "DOUBLE PRECISION" | "INET" | "INTEGER" | "INT" | "INT4" | "INTERVAL" | "JSON" | "JSONB" | "LINE" | "LSEG" | "MACADDR" | "MONEY" | "NUMERIC" | "DECIMAL" | "PATH" | "PG_LSN" | "POINT" | "POLYGON" | "REAL" | "FLOAT4" | "SMALLINT" | "INT2" | "SMALLSERIAL" | "SERIAL2" | "SERIAL" | "SERIAL4" | "TEXT" | "TIME" | "TIMETZ" | "TIMESTAMP" | "TIMESTAMPTZ" | "TSQUERY" | "TSVECTOR" | "TXID_SNAPSHOT" | "UUID" | "XML"; + }; + }; + }; + }; + }); + cloudsql_sqlserver_data_sink: { + sink_type: "cloudsql_sqlserver"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "BIGINT" | "INT" | "SMALLINT" | "TINYINT" | "BIT" | "NUMERIC" | "MONEY" | "SMALLMONEY" | "REAL" | "DATETIME" | "SMALLDATETIME" | "CHAR(3000)" | "VARCHAR(100)" | "VARCHAR(3000)" | "VARCHAR(max)" | "TEXT" | "NCHAR" | "NVARCHAR" | "NTEXT" | "BINARY" | "VARBINARY" | "TABLE" | "UNIQUEIDENTIFIER" | "DECIMAL(18, 4)" | "FLOAT(12)"; + }; + }; + }; + }; + }); + stream_data_sink: { + sink_config?: Record; + }; + confluent_kafka_data_sink: { + sink_type: "confluent_kafka"; + } & Omit & components["schemas"]["stream_data_sink"] & { + sink_config?: { + /** @description __Topic__: Select the topic for your data. */ + topic: string; + /** + * @description __Include Nexla Metadata in Message?__: Enable this check to include Nexla metadata as part of the message. + * Default value: `false` + */ + "include.metadata"?: boolean; + /** @description __Attribute used as Message Key__: Set the Nexset record attribute that will be used as a key for the Kafka message. */ + "message.key"?: string; + }; + }; + data_map_data_sink: { + sink_type: "data_map"; + } & Omit & { + sink_config?: { + /** @description __Mapping__: Rules for how Nexset record attributes should be written into Nexla Dynamic Lookup columns. */ + mapping: { + /** + * @description __Mapping Mode__: Setting this to `auto` instructs the platform to automatically create column names to match attribute names. + * + * @enum {string} + */ + mode: "auto"; + }; + }; + data_map: { + /** @description __Lookup Name__: This destination will create and be linked to a Nexla Lookup. You can choose a different name for the lookup, although we usually recommend using the same name. */ + name: string; + /** @description __Primary Key__: Set the Nexset attribute that should be used as a primary key in the lookup. */ + map_primary_key: string; + /** + * @description __Return Default Values__: If this value is set to true, any query for lookup rows will return the default values of a column if the Nexset record does not contain that attribute. + * + * Default value: `true` + */ + emit_data_default?: boolean; + /** + * @description __Column Default Values__: You can set default values for any column if a value for that column is not present in a Nexset row. + * + * Example: + * ``` + * { + * "attr1" : "NA", + * "attr2": "" + * } + * + * ``` + */ + data_defaults?: { + [key: string]: string; + }; + }; + }; + databricks_data_sink: { + sink_type: "databricks"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + /** @enum {string} */ + mode?: "auto" | "manual"; + mapping?: { + [key: string]: { + [key: string]: "SMALLINT" | "INT2" | "INTEGER" | "INT" | "INT4" | "BIGINT" | "INT8" | "DECIMAL(18,4)" | "NUMERIC(18,4)" | "REAL" | "FLOAT4" | "DOUBLE PRECISION" | "FLOAT8" | "BOOLEAN" | "BOOL" | "CHAR" | "CHARACTER" | "VARCHAR(65535)" | "CHARACTER VARYING" | "DATE" | "TIMESTAMP" | "TIMESTAMPTZ"; + }; + }; + }; + }; + }); + delta_lake_azure_blb_data_sink: { + sink_type: "delta_lake_azure_blb"; + } & Omit & ({ + sink_config?: { + /** @description __Path to Write__: Set the path to which you want to write all files. */ + path: string; + /** + * @description __Subfolder Path Format__: You can configure the platform to automatically create subfolders and partition files into those subfolders. + * + * Use Nexla system macros like `{YYYY}`, `{MM}`, etc. to create date-time subfolders. You can also split folders based on the values of an attribute in the Nexset by using the macro `{record.}`. + * + * Default Value: `{YYYY}/{MM}/{dd}` + */ + "output.dir.name.pattern"?: string; + /** + * @description __File Format__: Set the output format of the generated files. + * + * @enum {string} + */ + data_format: "delta"; + /** + * @description __Insertion Mode__: Select this mode if you wish to always add new records to the table. + * + * * `Insert`: __Append / Insert new records__ - Select this mode if you wish to always add new records to the table. + * + * * `Update`: __Update Records in Table__ - Select this mode if you wish to update existing records anytime the table already contains records with matching keys. + * + * Default value: `Insert` + * + * @enum {string} + */ + "delta.table.insert.mode"?: "Insert" | "Update"; + /** + * @description __Record Identifier Keys/Fields__: Enter the fields/keys that will be used for matching during updating. This entry should be in the form of a comma-separated string. + * + * __Applicable and required__ if `delta.table.insert.mode` is `Update` + */ + "delta.table.keys"?: string; + }; + }); + delta_lake_azure_data_lake_data_sink: { + sink_type: "delta_lake_azure_data_lake"; + } & Omit & ({ + sink_config?: { + /** @description __Path to Write__: Set the path to which you want to write all files. */ + path: string; + /** + * @description __Subfolder Path Format__: You can configure the platform to automatically create subfolders and partition files into those subfolders. + * + * Use Nexla system macros like `{YYYY}`, `{MM}`, etc. to create date-time sub-folders. You can also split folders based on the values of an attribute in the Nexset by using the macro `{record.}`. + * + * Default Value: `{YYYY}/{MM}/{dd}` + */ + "output.dir.name.pattern"?: string; + /** + * @description __File Format__: Set the output format of the generated files. + * + * @enum {string} + */ + data_format: "delta"; + /** + * @description __Insertion Mode__: Select this mode if you wish to always add new records to the table. + * + * * `Insert`: __Append / Insert new records__ - Select this mode if you wish to always add new records to the table. + * + * * `Update`: __Update Records in Table__ - Select this mode if you wish to update existing records anytime the table already contains records with matching keys. + * + * Default value: `Insert` + * + * @enum {string} + */ + "delta.table.insert.mode"?: "Insert" | "Update"; + /** + * @description __Record Identifier Keys/Fields__: Enter the fields/keys that will be used for matching during updating. This entry should be in the form of a comma-separated string. + * + * __Applicable and required__ if `delta.table.insert.mode` is `Update` + */ + "delta.table.keys"?: string; + }; + }); + delta_lake_s3_data_sink: { + sink_type: "delta_lake_s3"; + } & Omit & ({ + sink_config?: { + /** @description __Path to Write__: Set the path to which you want to write all files. */ + path: string; + /** + * @description __Subfolder Path Format__: You can configure the platform to automatically create subfolders and partition files into those subfolders. + * + * Use Nexla system macros like `{YYYY}`, `{MM}`, etc. to create date-time sub-folders. You can also split folders based on the values of an attribute in the Nexset by using the macro `{record.}`. + * + * Default Value: `{YYYY}/{MM}/{dd}` + */ + "output.dir.name.pattern"?: string; + /** + * @description __File Format__: Set the output format of the generated files. + * + * @enum {string} + */ + data_format: "delta"; + /** + * @description __Insertion Mode__: Select this mode if you wish to always add new records to the table. + * + * * `Insert`: __Append / Insert new records__ - Select this mode if you wish to always add new records to the table. + * + * * `Update`: __Update Records in Table__ - Select this mode if you wish to update existing records anytime the table already contains records with matching keys. + * + * Default value: `Insert` + * + * @enum {string} + */ + "delta.table.insert.mode"?: "Insert" | "Update"; + /** + * @description __Record Identifier Keys/Fields__: Enter the fields/keys that will be used for matching during updating. This entry should be in the form of a comma-separated string. + * + * __Applicable and required__ if `delta.table.insert.mode` is `Update` + */ + "delta.table.keys"?: string; + }; + }); + dropbox_data_sink: { + sink_type: "dropbox"; + } & Omit & components["schemas"]["file_data_sink"]; + nosql_data_sink: { + sink_config?: { + /** + * @description __Database__: If the destination credentials allow access to multiple databases, specify the database to which the destination collection belongs. + * + * __Required__ if the `data_credentials` access for this destination is not limited to one database. + */ + database: string; + /** @description __Collection__: Set the collection in which you wish to create documents. */ + collection: string; + }; + }; + dynamodb_data_sink: { + sink_type: "dynamodb"; + } & Omit & components["schemas"]["nosql_data_sink"]; + email_data_sink: { + sink_type: "email"; + } & Omit & components["schemas"]["file_data_sink"]; + firebase_data_sink: { + sink_type: "firebase"; + } & Omit & components["schemas"]["nosql_data_sink"]; + firebolt_data_sink: { + sink_type: "firebolt"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + /** + * @description __Firebolt Table Type__: Set the type of Firebolt table to which this destination is to be mapped. + * + * + * @enum {string} + */ + "firebolt.table.type"?: "FACT" | "DIMENSION"; + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "INT" | "INTEGER" | "BIGINT" | "LONG" | "FLOAT" | "DOUBLE" | "DOUBLE PRECISION" | "BOOLEAN" | "VARCHAR" | "TEXT" | "STRING" | "DATE" | "DATETIME" | "TIMESTAMP" | "ARRAY"; + }; + }; + }; + }; + }); + ftp_data_sink: { + sink_type: "ftp"; + } & Omit & components["schemas"]["file_data_sink"]; + gcp_alloydb_data_sink: { + sink_type: "gcp_alloydb"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "BIGINT" | "INT8" | "BIGSERIAL" | "SERIAL8" | "BIT" | "BIT_VARYING" | "VARBIT" | "BOOLEAN" | "BOOL" | "BOX" | "BYTEA" | "CHAR" | "VARCHAR" | "CHARACTER VARYING" | "CIDR" | "CIRCLE" | "DATE" | "DOUBLE PRECISION" | "INET" | "INTEGER" | "INT" | "INT4" | "INTERVAL" | "JSON" | "JSONB" | "LINE" | "LSEG" | "MACADDR" | "MONEY" | "NUMERIC" | "DECIMAL" | "PATH" | "PG_LSN" | "POINT" | "POLYGON" | "REAL" | "FLOAT4" | "SMALLINT" | "INT2" | "SMALLSERIAL" | "SERIAL2" | "SERIAL" | "SERIAL4" | "TEXT" | "TIME" | "TIMETZ" | "TIMESTAMP" | "TIMESTAMPTZ" | "TSQUERY" | "TSVECTOR" | "TXID_SNAPSHOT" | "UUID" | "XML"; + }; + }; + }; + }; + }); + gcp_spanner_data_sink: { + sink_type: "gcp_spanner"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "ARRAY" | "BOOL" | "BYTES" | "DATE" | "JSON" | "INT64" | "NUMERIC" | "FLOAT64" | "STRING" | "TIMESTAMP"; + }; + }; + }; + }; + }); + gcs_data_sink: { + sink_type: "gcs"; + } & Omit & components["schemas"]["file_data_sink"]; + gdrive_data_sink: { + sink_type: "gdrive"; + } & Omit & ({ + sink_config?: { + /** + * @description __Path to Write__: Set the path to which you want to write all files. + * + * For Google Drive, this is the unique ID of the file/folder into which you want to write the data. + * + * Example: `1MZhNV996K77b-kLYC4yIdG7PJiUUGBwV` + */ + path: string; + /** + * @description __Path (display format)__: This should be the human-readable Google Drive path. This is not required but is useful for keeping track of the actual folder path. + * + * Example: `demo/demodata` + */ + "ui.display_path"?: string; + /** + * @description You can choose to always write the Nexset records to a specific Google spreadsheet instead of creating new files in a Google Drive folder. + * + * Add this property and set its value to `application/vnd.google-apps.spreadsheet` if you want to write your data to a specific spreadsheet instead of creating new files in the target folder. + * + * @enum {string} + */ + "ui.selected_file_mime_type"?: "application/vnd.google-apps.spreadsheet"; + /** + * @description __Subfolder Path Format__: You can configure the platform to automatically create sub-folders and partition files into those sub-folders. + * + * Use Nexla system macros like `{YYYY}`, `{MM}`, etc. to create datetime sub-folders. You can also split folders based on the values of an attribute in the Nexset by using the macro `{record.}`. + * + * Default Value: `{YYYY}/{MM}/{dd}` + * + * __Applicable__: Only if `ui.selected_file_mime_type` is absent + */ + "output.dir.name.pattern"?: string; + /** + * @description __Custom File Name Prefix__: Generated file names are in the format --. Set this property to define the prefix of each file. + * + * You can use Nexla system macros like `{YYYY}`, `{MM}`, etc. to create datetime patterns. + * + * __Applicable__: Only if `ui.selected_file_mime_type` is absent + */ + "file.name.prefix"?: string; + /** + * @description __Maximum File Size (in MB)__: The maximum size (MB) of each generated file. Data will be automatically partitioned into multiple files. + * + * Default Value: `4096` + * + * __Applicable__: Only if `ui.selected_file_mime_type` is absent + */ + "max.file.size.mb"?: number; + /** + * @description __File Format__: Set the output format of the generated files. + * + * @enum {string} + */ + data_format: "csv" | "tsv" | "json" | "xml" | "xlsx" | "edi" | "avro" | "parquet" | "orc" | "GOOGLE_SPREADSHEET"; + /** + * @description __Row Insertion Mode__: If you are writing the Nexset to a specific sheet, choose whether Nexla should completely overwrite an existing sheet while writing out data during the latest execution or always append new records as new rows. + * + * __Applicable and required__: only if `ui.selected_file_mime_type` is `application/vnd.google-apps.spreadsheet`. + * + * @enum {string} + */ + "insert.mode"?: "OVERWRITE" | "INSERT_ROWS"; + /** + * @description __Configure Sheet Name And Starting Cell__: If you are writing the Nexset to a specific sheet, you can configure the name of the sheet and/or starting cell for the data output. + * + * This is an optional step that allows you to define the exact sheet name and/or starting cell for the generated spreadsheet. + * + * __Applicable__: only if `ui.selected_file_mime_type` is `application/vnd.google-apps.spreadsheet`. + */ + range?: string; + /** + * @description __Cell Range To Clear Before Writing Data__: If you are writing the Nexset to a specific sheet, you can configure the platform to clear a specific cell range before writing a new batch of records. + * + * __Applicable__: only if `ui.selected_file_mime_type` is `application/vnd.google-apps.spreadsheet`. + */ + "clear.range"?: string; + }; + }) & components["schemas"]["file_data_sink"]; + google_pubsub_data_sink: { + sink_type: "google_pubsub"; + } & Omit & components["schemas"]["stream_data_sink"] & { + sink_config?: { + /** @description __Topic__: Select the topic for your data. */ + topic: string; + /** @description __Subscription__: Set the subscription name if you are choosing to create a new topic. */ + subscription?: string; + /** + * @description __Include Nexla Metadata in Message?__: Enable this check to include Nexla metadata as part of the message. + * + * Default value: `false` + */ + "include.metadata"?: boolean; + /** @description __Attribute used as Message Key__: Set the Nexset record attribute that will be used as a key for the Kafka message. */ + "message.key"?: string; + }; + }; + jms_data_sink: { + sink_type: "jms"; + } & Omit & components["schemas"]["stream_data_sink"] & ({ + sink_config?: { + /** + * @description __Type of Destination__: Select whether this destination is a topic or a queue. + * + * @enum {string} + */ + "target.type": "topic" | "queue"; + /** @description __Topic / Queue Name __: Configure the name of the topic/queue where data should be written. */ + "target.name": string; + /** + * @description __Data Format__: Select the data format parser to be used for data written to this topic or queue. This is usually `json`. + * + * @enum {string} + */ + "writer.type": "csv" | "tsv" | "txt" | "xml" | "json"; + }; + }); + kafka_data_sink: { + sink_type: "kafka"; + } & Omit & components["schemas"]["stream_data_sink"] & { + sink_config?: { + /** @description __Topic__: Select the topic for your data. */ + topic: string; + /** + * @description __Include Nexla Metadata in Message?__: Enable this check to include Nexla metadata as part of the message. + * Default value: `false` + */ + "include.metadata"?: boolean; + /** @description __Attribute used as Message Key__: Set the Nexset record attribute that will be used as a key for the Kafka message. */ + "message.key"?: string; + }; + }; + min_io_s3_data_sink: { + sink_type: "min_io_s3"; + } & Omit & components["schemas"]["file_data_sink"]; + mongo_data_sink: { + sink_type: "mongo"; + } & Omit & components["schemas"]["nosql_data_sink"]; + mysql_data_sink: { + sink_type: "mysql"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "TINYINT" | "SMALLINT" | "MEDIUMINT" | "INT" | "BIGINT" | "DECIMAL" | "FLOAT" | "DOUBLE" | "BIT" | "BOOLEAN" | "BOOL" | "CHAR" | "VARCHAR(4096)" | "BINARY" | "VARBINARY(65535)" | "TINYBLOB" | "BLOB" | "MEDIUMBLOB" | "LONGBLOB" | "TINYTEXT" | "TEXT" | "MEDIUMTEXT" | "LONGTEXT" | "ENUM" | "SET" | "DATE" | "DATETIME" | "TIMESTAMP" | "YEAR"; + }; + }; + }; + }; + }); + netsuite_jdbc_data_sink: { + sink_type: "netsuite_jdbc"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "BIGINT" | "INT8" | "BIGSERIAL" | "SERIAL8" | "BIT" | "BIT_VARYING" | "VARBIT" | "BOOLEAN" | "BOOL" | "BOX" | "BYTEA" | "CHAR" | "VARCHAR" | "CHARACTER VARYING" | "CIDR" | "CIRCLE" | "DATE" | "DOUBLE PRECISION" | "INET" | "INTEGER" | "INT" | "INT4" | "INTERVAL" | "JSON" | "JSONB" | "LINE" | "LSEG" | "MACADDR" | "MONEY" | "NUMERIC" | "DECIMAL" | "PATH" | "PG_LSN" | "POINT" | "POLYGON" | "REAL" | "FLOAT4" | "SMALLINT" | "INT2" | "SMALLSERIAL" | "SERIAL2" | "SERIAL" | "SERIAL4" | "TEXT" | "TIME" | "TIMETZ" | "TIMESTAMP" | "TIMESTAMPTZ" | "TSQUERY" | "TSVECTOR" | "TXID_SNAPSHOT" | "UUID" | "XML"; + }; + }; + }; + }; + }); + oracle_data_sink: { + sink_type: "oracle"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "VARCHAR2(3000)" | "NUMBER(10,0)" | "NUMBER(1,0)" | "BINARY_FLOAT" | "BINARY_DOUBLE" | "RAW(2000)" | "LONG RAW" | "CHAR(2000)" | "NCHAR(2000)" | "BLOB" | "DATE" | "INTERVAL YEAR TO MONTH" | "INTERVAL DAY TO SECOND"; + }; + }; + }; + }; + }); + oracle_autonomous_data_sink: { + sink_type: "oracle_autonomous"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "VARCHAR2(3000)" | "NUMBER(10,0)" | "NUMBER(1,0)" | "BINARY_FLOAT" | "BINARY_DOUBLE" | "RAW(2000)" | "LONG RAW" | "CHAR(2000)" | "NCHAR(2000)" | "BLOB" | "DATE" | "TIMESTAMP" | "INTERVAL YEAR TO MONTH" | "INTERVAL DAY TO SECOND"; + }; + }; + }; + }; + }); + vector_db_data_sink: { + /** + * @description __Create Table In Destination__ If the desired table doesn't exist in your database, you can instruct Nexla to create a table when this destination is first activated. + * + * Default Value: `false` + */ + create_destination?: boolean; + sink_config?: { + /** @description __Database__: If the Destination credentials allow access to multiple databases, specify the database to which the destination table belongs. This is only needed if the `data_credentials` entry for this destination is not limited to one database. */ + database?: string; + /** @description __Table__: Set the table to which you wish to push Nexset records. */ + table: string; + /** + * @description __Table Update Mode__: Select whether records should be inserted or upserted into the database. + * + * @enum {string} + */ + "insert.mode": "INSERT" | "UPSERT"; + /** + * @description __Primary Key Columns__: Set all columns that should be set as primary keys of the table. For multiple columns, enter a comma-separated list of column names. + * + * __Required__ if `insert.mode` is set to `UPSERT` + */ + "primary.key"?: string; + /** @description __Mapping__: Set rules for how Nexset record attributes should be written into Database Columns. */ + mapping: { + /** + * @description Most databases require manual mapping of attributes into columns. With manual mapping, you can set a single attribute to be written into multiple columns. Additionally, you can specify the desired data format for each column. + * + * + * * `auto`: Automatically map attributes to database columns. Column data types will be inferred from record values, and the nesting of attributes will be preserved. __Only available for select warehouses and databases.__ + * + * * `manual`: Explicitly define attribute mapping and database columns. + * + * @enum {string} + */ + mode: "manual"; + /** + * @description __Attribute to Database Column Mapping__: Define how attributes should be mapped to database columns. + * + * __Required__ if `mapping.mode` is `manual` + * + * __Object Definition Rules__: + * + * 1. Each Nexset record attribute that needs to be written to one or more columns should be listed as a property of this `mapping` object. In the example, the properties are `nexset_attr_1` and `nexset_attr_2`. + * + * 2. Each database column to which the attribute needs to be written is a property of the attribute object above. In the example, `nexset_attr_1` is set to write to columns `db_col_1` and `db_col_2`. + * + * 3. Each database column property has a value that defines the desired data format allowed by the database. Here, data written to `db_col_1` will be written as `TEXT`. + * + * __Example__ + * ``` + * { + * "nexset_attr_1": + * { + * "db_col_1": "TEXT", + * "db_col_2": "TEXT" + * }, + * "nexset_attr_2": + * + * { + * "db_col_3": "FLOAT64" + * } + * } + * ``` + */ + mapping?: { + [key: string]: { + [key: string]: string; + }; + }; + /** + * @description __Tracker Mode__: Each record that flows through Nexla has an associated unique tracker ID. Set this to `RECORD` to configure the tracker ID to be written out to a database column along with the Nexset record. + * + * * `NONE`: The tracker ID won't be written to the database. + * * `RECORD`: The short form tracker ID containing all required lineage information will be written out in the relevant column. + * + * @enum {string} + */ + tracker_mode: "NONE" | "RECORD"; + /** + * @description __Column Name for Nexla Tracker__: Name of the column used for the Nexla record tracker information. + * + * __Applicable and Required__ if `tracker.mode` is set to `RECORD` + */ + tracker_name?: string; + }; + /** + * @description __Allow column updates with nulls__: Set as false to allow partial upsert of a record with only non-null values. + * + * Only valid if `insert.mode` is `UPSERT` + * + * Default Value: `true` + */ + "upsert.nulls"?: boolean; + }; + }; + pinecone_data_sink: { + sink_type: "pinecone"; + } & Omit & { + sink_config?: { + /** @description __Index__: Enter the name of the index where the vectors will be stored. */ + database: string; + /** + * @description __Namespace__: Set the namespace to store the vectors within the selected Pinecone database. + * You can use the default namespace or specify a custom namespace. + * + * @default + */ + collection: string; + /** @description __Vector Mapping__: Enter the mapping of the vector fields to the columns in the collection. */ + vector_mapping?: string[]; + /** + * @description __Upsert Parallelism__: Enter the number of parallel upserts to be performed. + * + * @default 1 + */ + "pinecone.upsert.parallelism"?: number; + }; + } & components["schemas"]["vector_db_data_sink"]; + postgres_data_sink: { + sink_type: "postgres"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "BIGINT" | "INT8" | "BIGSERIAL" | "SERIAL8" | "BIT" | "BIT_VARYING" | "VARBIT" | "BOOLEAN" | "BOOL" | "BOX" | "BYTEA" | "CHAR" | "VARCHAR" | "CHARACTER VARYING" | "CIDR" | "CIRCLE" | "DATE" | "DOUBLE PRECISION" | "INET" | "INTEGER" | "INT" | "INT4" | "INTERVAL" | "JSON" | "JSONB" | "LINE" | "LSEG" | "MACADDR" | "MONEY" | "NUMERIC" | "DECIMAL" | "PATH" | "PG_LSN" | "POINT" | "POLYGON" | "REAL" | "FLOAT4" | "SMALLINT" | "INT2" | "SMALLSERIAL" | "SERIAL2" | "SERIAL" | "SERIAL4" | "TEXT" | "TIME" | "TIMETZ" | "TIMESTAMP" | "TIMESTAMPTZ" | "TSQUERY" | "TSVECTOR" | "TXID_SNAPSHOT" | "UUID" | "XML"; + }; + }; + }; + }; + }); + redshift_data_sink: { + sink_type: "redshift"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + /** @description __Partitioning Column__: You can choose to have the data partitioned by a specific column when it is written to the destination table. Set the partitioning column if you wish to leverage that capability. */ + "partitioning.column"?: string; + /** @description __Clustering Columns__: You can choose to leverage column clustering when data is written to the destination tables. Set this value to a comma-separated list of columns that should be used for clustering. */ + "clustering.columns"?: string; + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "SMALLINT" | "INT2" | "INTEGER" | "INT" | "INT4" | "BIGINT" | "INT8" | "DECIMAL(18,4)" | "NUMERIC(18,4)" | "REAL" | "FLOAT4" | "DOUBLE PRECISION" | "FLOAT8" | "BOOLEAN" | "BOOL" | "CHAR" | "CHARACTER" | "VARCHAR(65535)" | "CHARACTER VARYING" | "DATE" | "TIMESTAMP" | "TIMESTAMPTZ"; + }; + }; + }; + }; + }); + rest_data_sink: { + sink_type: "rest"; + } & Omit & ({ + sink_config?: { + /** + * @description __URL__: Set the URL you wish to send data to. + * You can use Nexla system macros like `{YYYY}`, `{MM}`, etc., and the output will be automatically converted into the corresponding time values. You can also set dynamic URLs based on the values of an attribute in the record by using the macro `{record.}`. + */ + "url.template": string; + /** + * @description __Method__: HTTP method used for this rest endpoint. This will usually be `POST`, `PUT`, `GET`, `PATCH`, or `DELETE`. + * + * @enum {string} + */ + method: "POST" | "PUT" | "GET" | "PATCH" | "DELETE"; + /** + * @description __Content Format__: Content format used for this rest endpoint. Usually, this will be `application/json`. + * + * @enum {string} + */ + "content.type": "application/json" | "application/xml"; + /** + * @description __Nexset Record to Payload Template__: In the case of 1 API call per Nexset record, you can customize how a Nexset record is added to the API payload. The record itself is referenced as a `{message.json}` macro. + * Default and recommended value: `{message.json}` + * Note: + * 1. Usually, we recommend formatting the API payload with a Nexset transform. + * 2. This property should not be used to format the payload when multiple Nexset records have to be batched together into 1 API call. In this case, please use the `batch.mode` and `body.transform.function` properties. + */ + "body.template": string; + /** + * @description __Would you like to batch your records together?__: Select this option if you would like to combine multiple records in each API request. You can set the batching algorithm by configuring the related properties. + * Default Value: `false` + * __Supported__ only if `content.type` is `application/json`. + */ + "batch.mode"?: boolean; + /** + * @description __Maximum Poll Records__: Set the maximum number of records that can be batched together when combining multiple records into an API call. + * __Applicable and required__ if `batch.mode` is `true`. + */ + "max.poll.records"?: number; + /** + * @description __Batching Algorithm__: You can define how the platform should combine multiple records in a single API call with a Scala function. Use `messages` to reference records within your Scala function. + * Sample Scala Functions: + * 1. Payload: JSON array of records: + * `"body.transform.function": "messages"` + * 2. Payload: JSON object containing array of records as a property (e.g., items): + * `"body.transform.function": "Map(\"items\" -> messages)"` + * 3. Payload: JSON object with each record modified before batching: + * `"body.transform.function": "Map(\"items\"-> messages.map(m => {\nm.put(\"attributes\" , Map(\"type\"-> \"Account\"))\n m\n})\n)"` + * will result in + * `{ + * "items:[ + * { ...record1, "attributes": {"type": "Account"}}, + * { ...record2, "attributes": {"type": "Account"}} + * ] + * }` + */ + "body.transform.function"?: string; + /** + * @description __Capture API Response__: Set this to `true` if you would like to capture the API response for each API call. Responses, along with requests, will be pushed to an autogenerated Nexla webhook. + * Default value: `false`. + */ + "create.datasource"?: boolean; + }; + }); + s3_data_sink: { + sink_type: "s3"; + } & Omit & components["schemas"]["file_data_sink"]; + s3_iceberg_data_sink: { + sink_type: "s3_iceberg"; + } & Omit & ({ + sink_config?: { + /** + * @description __Warehouse Directory__: Path to the directory where you want the Iceberg table to be created/updated. + * Nexla uses the path-based Hadoop catalog to write to tables in S3. So if you wish to write to Iceberg table `sales` in `s3://my-nexla-bucket/product/sales`, then set this property to `my-nexla-bucket/product`. + */ + "iceberg.warehouse.dir": string; + /** + * @description __Table name__: Name of the table where data will be appended. + * Nexla uses the path-based Hadoop catalog to write to tables in S3. So if you wish to write to Iceberg table `sales` in `s3://my-nexla-bucket/product/sales`, then set this property to `sales` to write to the table. + */ + "iceberg.table.name": string; + /** + * @description __Update Mode__: Select the mode to insert data into the table. The default mode is `insert` which appends data to the table. The `upsert` mode will update records if they already exist in the table. + * + * @default upsert + * @enum {string} + */ + "iceberg.insert.mode"?: "insert" | "upsert"; + /** @description __ID Fields for Upserts__: Comma separated list of column names that will be used to identify records for upserts. If this field is not set, Nexla will append data to the table. */ + "iceberg.id-fields"?: string; + /** + * @description __Enable Change Data Capture (CDC)__: Select this option if you would like the platform to monitor database transaction logs for ingesting data. + * Note that your DBA will need to grant necessary Change Data Capture permissions for Nexla to access transaction logs. Contact Nexla support for relevant instructions. + * If there are limitations preventing you from enabling CDC controls on your database, you can still setup this source for incremental ingestion by disabling this option and instead selecting incremental table ingestion rules. + * + * @default true + */ + "cdc.enabled"?: boolean; + /** @description __Table name prefix__: Prefix for table name. */ + table_name_prefix?: string; + /** @description __Table name suffix__: Suffix for table name. */ + table_name_suffix?: string; + /** @description __Partition keys__: Comma separated list of column names to use for partitioning when creating a new table. If the table already exists we will not modify the table spec and this option will be ignored. */ + "iceberg.partition-keys"?: string; + }; + }); + sharepoint_data_sink: { + sink_type: "sharepoint"; + } & Omit & components["schemas"]["file_data_sink"]; + snowflake_data_sink: { + sink_type: "snowflake"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + /** @description __Partitioning Column__: You can choose to have the data partitioned by a specific column when it is written to the destination table. Set the partitioning column if you wish to leverage that capability. */ + "partitioning.column"?: string; + /** @description __Clustering Columns__: You can choose to leverage column clustering when data is written to the destination tables. Set this value to a comma-separated list of columns that should be used for clustering. */ + "clustering.columns"?: string; + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "NUMBER" | "DECIMAL" | "NUMERIC" | "INT" | "INTEGER" | "BIGINT" | "SMALLINT" | "FLOAT" | "FLOAT4" | "FLOAT8" | "DOUBLE" | "DOUBLE PRECISION" | "REAL" | "VARCHAR" | "CHAR" | "CHARACTER" | "STRING" | "TEXT" | "BINARY" | "VARBINARY" | "BOOLEAN" | "DATE" | "DATETIME" | "TIME" | "TIMESTAMP" | "TIMESTAMP_LTZ" | "TIMESTAMP_NTZ" | "TIMESTAMP_TZ" | "VARIANT" | "OBJECT" | "ARRAY"; + }; + }; + }; + }; + }); + snowflake_dcr_data_sink: { + sink_type: "snowflake_dcr"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + /** @description __Partitioning Column__: You can choose to have the data partitioned by a specific column when it is written to the destination table. Set the partitioning column if you wish to leverage that capability. */ + "partitioning.column"?: string; + /** @description __Clustering Columns__: You can choose to leverage column clustering when data is written to the destination tables. Set this value to a comma-separated list of columns that should be used for clustering. */ + "clustering.columns"?: string; + /** @description __Account Name for Consumer__: Set the account name for the consumer. This can be retrieved using select current-account() */ + "cleanroom.consumer.accountname": string; + /** @description __Join Key Columns__: Select the columns on which the cleanroom queries have to be joined. */ + "cleanroom.join.keys"?: string; + /** @description __Dimension Columns__: Select the columns to be treated as cleanroom dimension columns. */ + "cleanroom.dimensions"?: string; + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "NUMBER" | "DECIMAL" | "NUMERIC" | "INT" | "INTEGER" | "BIGINT" | "SMALLINT" | "FLOAT" | "FLOAT4" | "FLOAT8" | "DOUBLE" | "DOUBLE PRECISION" | "REAL" | "VARCHAR" | "CHAR" | "CHARACTER" | "STRING" | "TEXT" | "BINARY" | "VARBINARY" | "BOOLEAN" | "DATE" | "DATETIME" | "TIME" | "TIMESTAMP" | "TIMESTAMP_LTZ" | "TIMESTAMP_NTZ" | "TIMESTAMP_TZ" | "VARIANT" | "OBJECT" | "ARRAY"; + }; + }; + }; + }; + }); + sqlserver_data_sink: { + sink_type: "sqlserver"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "BIGINT" | "INT" | "SMALLINT" | "TINYINT" | "BIT" | "NUMERIC" | "MONEY" | "SMALLMONEY" | "REAL" | "DATETIME" | "SMALLDATETIME" | "CHAR(3000)" | "VARCHAR(100)" | "VARCHAR(3000)" | "VARCHAR(max)" | "TEXT" | "NCHAR" | "NVARCHAR" | "NTEXT" | "BINARY" | "VARBINARY" | "TABLE" | "UNIQUEIDENTIFIER" | "DECIMAL(18, 4)" | "FLOAT(12)"; + }; + }; + }; + }; + }); + teradata_data_sink: { + sink_type: "teradata"; + } & Omit & components["schemas"]["database_data_sink"] & ({ + sink_config?: { + mapping?: { + mapping?: { + [key: string]: { + [key: string]: "BYTEINT" | "SMALLINT" | "INTEGER" | "BIGINT" | "FLOAT" | "DOUBLE PRECISION" | "VARCHAR" | "VARBYTE(64000)" | "DATE" | "TIME" | "TIMESTAMP"; + }; + }; + }; + }; + }); + tibco_data_sink: { + sink_type: "tibco"; + } & Omit & components["schemas"]["stream_data_sink"] & ({ + sink_config?: { + /** + * @description Select whether this destination is a topic or a queue. + * + * @enum {string} + */ + "target.type": "topic" | "queue"; + /** @description __Topic / Queue Name __: Configure the name of the topic/queue where data should be written. */ + "target.name": string; + /** + * @description __Data Format__: Select the data format parser for the data to be written to this topic or queue. This is usually `json`. + * + * @enum {string} + */ + "writer.type": "csv" | "tsv" | "txt" | "xml" | "json"; + }; + }); + DataMap: { + /** Format: int32 */ + id?: number; + name?: string; + description?: string; + public?: boolean; + managed?: boolean; + /** @example string */ + data_type?: string; + /** Format: nullable */ + data_format?: string; + /** Format: nullable */ + data_sink_id?: string; + emit_data_default?: boolean; + use_versioning?: boolean; + /** @example id */ + map_primary_key?: string; + data_defaults?: Record; + /** + * Format: date-time + * @example 2021-07-26T21:29:58.000Z + */ + updated_at?: string; + /** + * Format: date-time + * @example 2021-07-26T21:29:58.000Z + */ + created_at?: string; + owner?: { + /** Format: int32 */ + id?: number; + full_name?: string; + /** Format: email */ + email?: string; + }; + org?: components["schemas"]["OrgSimplified"]; + access_roles?: components["schemas"]["AccessRoles"]; + /** Format: nullable */ + data_set_id?: number; + /** + * Format: int32 + * @example 200 + */ + map_entry_count?: number; + map_entry_schema?: Record; + tags?: string[]; + }; + DataMapBase: { + name?: string; + description?: string; + /** + * @description Key name which should be used as the primary key for looking up rows in this data map. + * + * @example id + */ + map_primary_key?: string; + /** @description The default values to use for any key if the key is not present in the corresponding row. */ + data_defaults?: { + [key: string]: string | number; + }; + /** @description This property defines whether a lookup query should return values with applicable default values for missing properties in the row. */ + emit_data_default?: boolean; + tags?: string[]; + }; + DataMapCreate: components["schemas"]["DataMapBase"] & Record & ({ + /** + * @description Enter an array of data map entries if you wish to seed this data map with some rows of data while creating the data map. + * + * You can also update data map rows by calling the endpoint for updating entries. + */ + data_map?: ({ + [key: string]: string | number; + })[]; + }); + DataMapMutable: components["schemas"]["DataMapBase"]; + /** @enum {string} */ + ResourceType: "transform"; + DataCredentialBrief: { + id?: number; + name?: string; + description?: string; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + }; + /** @enum {string} */ + CodeType: "jolt_standard" | "jolt_custom"; + /** @enum {string} */ + OutputType: "record"; + Transform: { + /** + * Format: int32 + * @example 1 + */ + id?: number; + /** @example test */ + name?: string; + resource_type?: components["schemas"]["ResourceType"]; + reusable?: boolean; + owner?: components["schemas"]["UserSimplified"]; + org?: components["schemas"]["OrgSimplified"]; + access_roles?: components["schemas"]["AccessRoles"]; + data_credentials?: components["schemas"]["DataCredentialBrief"]; + runtime_data_credentials?: components["schemas"]["DataCredentialBrief"]; + /** @example test */ + description?: string; + code_type?: components["schemas"]["CodeType"]; + output_type?: components["schemas"]["OutputType"]; + code_config?: Record; + /** Format: nullable */ + custom_config?: Record; + /** @example none */ + code_encoding?: string; + code?: Record[]; + managed?: boolean; + data_sets?: number[]; + /** Format: nullable */ + copied_from_id?: number; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + tags?: string[]; + }; + TransformMutable: { + name: string; + description?: string; + /** + * @description Type of code container. This must be set to `record` to signify that this code container is to be used to convert an input record of a Nexset into an output record for that Nexset. + * + * @enum {string} + */ + output_type: "record"; + /** @description Whether or not this transform can be referenced by multiple Nexsets. This should always be `true` for reusable record transforms. */ + reusable: boolean; + /** + * @description Type of code in the `code` block. Use `jolt_custom` if the code in the code block is a `python`, `python3` or `javascript` transform code block wrapped inside a Jolt operation block. + * @enum {string} + */ + code_type: "jolt_custom" | "jolt_standard"; + /** + * @description Whether or not the code in the `code` block is encoded. Set this to `none` for reusable record transforms. + * + * @enum {string} + */ + code_encoding: "none"; + /** + * @description Code to be executed for transforming records. + * If you wish to execute a Python or Javascript code snippet during transformation, you'll need to `base64`-encode that code snippet and wrap it inside the following Jolt operation object. + * + * For example, if you want to execute this Python code as a reusable record transform + * ```python + * def transform(input, metadata, args): + * # Sample python transform to pass all attributes through + * return input + * ``` + * the corresponding `code` object would be + * ```json + * [ + * { + * "operation": "nexla.custom", + * "spec": { + * "language": "python", + * "encoding": "base64", + * "script": "ZGVmIHRyYW5zZm9ybShpbnB1dCwgbWV0YWRhdGEsIGFyZ3MpOgogICMgU2FtcGxlIHB5dGhvbiB0cmFuc2Zvcm0gdG8gcGFzcyBhbGwgYXR0cmlidXRlcyB0aHJvdWdoCiAgcmV0dXJuIGlucHV0" + * } + * } + * ] + * ``` + */ + code: OneOf<[({ + /** + * @description Jolt operation for this code block. For `python`, `python3` and `javascript`, it must be `nexla.custom`. + * + * @enum {string} + */ + operation?: "nexla.custom"; + /** @description Jolt specification for this block. See the Python example above for sample specs. */ + spec?: { + /** + * @description Code language of the encoded script. + * + * @enum {string} + */ + language?: "python" | "python3" | "javascript"; + /** + * @description Python or Javascript code snippets must always be Base64-encoded. + * + * @enum {string} + */ + encoding?: "base64"; + /** @description Base64-encoded Python or Javascript code snippet that must be executed for transforming records. */ + script?: string; + }; + })[], { + [key: string]: Record; + }[]]>; + /** + * @description Configuration block useful for converting code into Nexla UI's Nexset Designer Rule blocks. + * + * This should not be sent in the payload unless the transform is being created using the Nexla UI's Nexset Designer screen. + */ + custom_config?: Record; + tags?: string[]; + }; + /** + * @description The type of resource that this code container is to be used for. + * - `transform`: For user defined code that is used in a Nexset transform. + * - `ai_function`: For user defined code that is used in an AI function. See `ai_function_type` for the scenarios where this is used. + * - `source`: For user defined code in a source. + * - `sink`: For user defined code in a sink. + * - `error`: For user defined code that acts as an error handler. + * - `validator`: For user defined code that acts as a validator. + * + * @enum {string} + */ + code_containers_ResourceType: "transform" | "ai_function" | "source" | "sink" | "error" | "validator"; + /** + * @description The type of output that this code container produces. + * + * - `record`: Produces a full Nexset record. Used only when the code container is used to modify a Nexset, i.e output_type: `transform` + * - `attribute`: Produces value for an attribute inside a Nexset record. Used only when the code container is used to modify a value inside a Nexset, i.e output_type: `transform` + * - `custom`: Produces custom output. Used as a catch-all output format when a code container is used in any context other than for Nexset modification. + * + * @enum {string} + */ + code_containers_OutputType: "record" | "attribute" | "custom"; + /** @enum {string} */ + code_containers_CodeType: "jolt_standard" | "jolt_custom" | "python" | "python3" | "javascript"; + CodeContainerMutable: { + /** @example test */ + name: string; + /** @example test */ + description?: string; + /** + * Format: nullable + * @description Credential ID for accessing the code repository (e.g., Github). This is for code containers in which the code is saved in a remote repository. + * + * Note that this is not required for script connectors in which the script is hosted in a Nexla Github repository. + */ + data_credentials_id?: number; + resource_type?: components["schemas"]["code_containers_ResourceType"]; + /** + * @description The type of AI function that this code container is used for. + * - `chunker`: Used to chunk parsed file content into smaller chunks in a Document Ingestion flow. + * - `context_enricher`: Used to enrich the context of a query in a RAG flow. + * - `query_rewriter`: Used to rewrite a query in a RAG flow. + * - `reranker`: Used to rerank a list of results in a RAG flow. + * + * @enum {string} + */ + ai_function_type?: "chunker" | "context_enricher" | "query_rewriter" | "reranker"; + output_type: components["schemas"]["code_containers_OutputType"]; + code_type: components["schemas"]["code_containers_CodeType"]; + /** + * @description Whether or not the code in the `code` block is encoded. Set this to `none` for reusable record transforms. + * + * @enum {string} + */ + code_encoding: "none" | "base64"; + /** + * @description Code to be executed in this code container. + * + * Please refer to the endpoints for `transforms`, `attribute_transforms` and `validators` for respective `code` block requirements. + */ + code: OneOf<[string, { + /** @example nexla.custom */ + operation?: string; + spec?: { + /** @example python */ + language?: string; + /** @example base64 */ + encoding?: string; + script?: string; + }; + }[]]>; + /** @description Whether or not this transform can be referenced by multiple resources. This should always be `true` for reusable record and attribute transforms. */ + reusable: boolean; + tags?: string[]; + custom_config?: Record; + }; + CodeContainer: { + /** + * Format: int32 + * @example 1 + */ + id?: number; + /** @example test */ + name?: string; + resource_type?: components["schemas"]["code_containers_ResourceType"]; + /** + * @description The type of AI function that this code container is used for. + * - `chunker`: Used to chunk parsed file content into smaller chunks in a Document Ingestion flow. + * - `context_enricher`: Used to enrich the context of a query in a RAG flow. + * - `query_rewriter`: Used to rewrite a query in a RAG flow. + * - `reranker`: Used to rerank a list of results in a RAG flow. + * + * @enum {string} + */ + ai_function_type?: "chunker" | "context_enricher" | "query_rewriter" | "reranker"; + reusable?: boolean; + public?: boolean; + owner?: components["schemas"]["UserSimplified"]; + org?: components["schemas"]["OrgSimplified"]; + access_roles?: components["schemas"]["AccessRoles"]; + data_credentials?: components["schemas"]["DataCredentialBrief"]; + runtime_data_credentials?: components["schemas"]["DataCredentialBrief"]; + /** @example test */ + description?: string; + code_type?: components["schemas"]["code_containers_CodeType"]; + output_type?: components["schemas"]["code_containers_OutputType"]; + code_config?: Record; + /** Format: nullable */ + custom_config?: Record; + /** @example none */ + code_encoding?: string; + code?: { + /** @example nexla.custom */ + operation?: string; + spec?: { + /** @example python */ + language?: string; + /** @example base64 */ + encoding?: string; + /** @example ZGVmIHRyYW5zZm9ybShpbnB1dCwgbWV0YWRhdGEsIGFyZ3MpOgogICMgU2FtcGxlIHB5dGhvbiB0cmFuc2Zvcm0gdG8gcGFzcyBhbGwgYXR0cmlidXRlcyB0aHJvdWdoCiAgcmV0dXJuIGlucHV0 */ + script?: string; + }; + }[]; + managed?: boolean; + data_sets?: number[]; + /** Format: nullable */ + copied_from_id?: number; + /** + * Format: date-time + * @example 2023-01-24T03:26:43.000Z + */ + updated_at?: string; + /** + * Format: date-time + * @example 2023-01-24T03:26:43.000Z + */ + created_at?: string; + tags?: string[]; + }; + /** @enum {string} */ + attribute_transforms_CodeType: "python" | "javascript"; + /** @enum {string} */ + attribute_transforms_OutputType: "attribute"; + AttributeTransform: { + /** + * Format: int32 + * @example 1 + */ + id?: number; + /** @example test */ + name?: string; + resource_type?: components["schemas"]["ResourceType"]; + reusable?: boolean; + owner?: components["schemas"]["UserSimplified"]; + org?: components["schemas"]["OrgSimplified"]; + access_roles?: components["schemas"]["AccessRoles"]; + data_credentials?: components["schemas"]["DataCredentialBrief"]; + runtime_data_credentials?: components["schemas"]["DataCredentialBrief"]; + /** @example test */ + description?: string; + code_type?: components["schemas"]["attribute_transforms_CodeType"]; + output_type?: components["schemas"]["attribute_transforms_OutputType"]; + code_config?: Record; + /** Format: nullable */ + custom_config?: Record; + /** @example none */ + code_encoding?: string; + code?: string; + managed?: boolean; + data_sets?: number[]; + /** Format: nullable */ + copied_from_id?: number; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + tags?: string[]; + }; + AttributeTransformMutable: { + name: string; + description?: string; + /** + * @description Type of code container. This must be set to `attribute` to signify that this code container is to be used for generating the value of an output attribute. + * + * @enum {string} + */ + output_type: "attribute"; + /** @description Whether or not this transform can be referenced by multiple Nexsets. This should always be `true` for reusable transforms. */ + reusable: boolean; + /** + * @description Type of code in the code block. + * @enum {string} + */ + code_type: "python" | "python3" | "javascript"; + /** + * @description Whether or not the code in the `code` block is encoded. Set this to `base64` for reusable attribute transforms. + * + * @enum {string} + */ + code_encoding: "base64"; + /** + * @description Code to be executed for transforming records. + * If you wish to execute a Python or Javascript code snippet during transformation, you'll need to `base64`-encode that code snippet. + * + * For example, if you want to execute this Python code as a reusable record transform + * ```python + * def transformAttribute(input, metadata, args): + * return input.get(args[0]) + * ``` + * the corresponding `code` value would be + * ``` + * ZGVmIHRyYW5zZm9ybUF0dHJpYnV0ZShpbnB1dCwgbWV0YWRhdGEsIGFyZ3MpOgogIHJldHVybiBpbnB1dC5nZXQoYXJnc1swXSk= + * ``` + */ + code: string; + /** + * @description Configuration block useful for converting code into Nexla UI's Nexset Designer Rule blocks. + * + * This should not be sent in the payload unless the transform is being created using the Nexla UI's Nexset Designer screen. + */ + custom_config?: Record; + tags?: string[]; + }; + ProjectDataFlow: { + /** @description Unique identifier of this flow. */ + id?: number; + /** @description Unique identifier of the project this flow belongs to. */ + project_id?: number; + /** + * Format: nullable + * @description The ID of the data source which is the root node of this flow chain. + * + * Unless the flow chain has been created from a shared Nexset, flow definitions start from the source as the root node. This property reflects the source that is the root node of this flow. + */ + data_source_id?: number; + /** + * Format: nullable + * @description The ID of the Nexset which is the root node of this flow chain. + * + * This property is not null only if the flow was created from a shared Nexset. In that case, this value reflects the ID of the derived dataset created from the shared Nexset. + */ + data_set_id?: number; + /** @example 2023-01-31T01:39:54.000Z */ + updated_at?: string; + /** @example 2023-01-31T01:39:54.000Z */ + created_at?: string; + }; + Project: { + /** + * Format: int32 + * @description Unique identifier of this project. + */ + id?: number; + owner?: { + /** Format: int32 */ + id?: number; + /** @example John Johnson */ + full_name?: string; + /** @example example@nexla.com */ + email?: string; + }; + org?: { + /** Format: int32 */ + id?: number; + /** @example Nexla */ + name?: string; + /** @example nexla.com */ + email_domain?: string; + /** Format: nullable */ + client_identifier?: string; + }; + /** @example test project */ + name?: string; + /** @example it's a test project */ + description?: string; + /** + * @description List of all flows that are part of this project. + * + * > Note: This is a deprecated format of representing flows. We recommend using the `flows` entry in this object. + */ + data_flows?: components["schemas"]["ProjectDataFlow"][]; + /** + * @description List of all flows that are part of this project. + * + * > Note: This is a new recommended format of representing flows. We recommend using this format which uses unique flow ids instead of the `data_flows` object. + */ + flows?: components["schemas"]["ProjectDataFlow"][]; + access_roles?: components["schemas"]["AccessRoles"]; + tags?: string[]; + /** Format: nullable */ + copied_from_id?: string; + /** + * Format: date-time + * @example 2023-01-31T01:39:54.000Z + */ + updated_at?: string; + /** + * Format: date-time + * @example 2023-01-31T01:39:54.000Z + */ + created_at?: string; + }; + ProjectFlowIdentifier: { + /** + * @description The ID of the data source which is the root node of this flow chain. + * + * Unless the flow chain has been created from a shared Nexset, flow definitions start from the source as the root node. This property reflects the source that is the root node of this flow. + */ + data_source_id?: number; + } | { + /** + * @description The ID of the Nexset which is the root node of this flow chain. + * + * This property is not null only if the flow was created from a shared Nexset. In that case, this value reflects the ID of the derived dataset created from the shared Nexset. + */ + data_set_id?: number; + }; + ProjectFlowList: { + data_flows?: components["schemas"]["ProjectFlowIdentifier"][]; + }; + ProjectMutable: { + /** @example test project */ + name?: string; + /** @example it's a test project */ + description?: string; + } & components["schemas"]["ProjectFlowList"]; + ProjectCreate: components["schemas"]["ProjectMutable"]; + ProjectFlowListFlowNodes: { + flows?: number[]; + }; + CustodiansPayload: { + id?: number; + email?: string; + }[]; + OrgsUpdate: { + name?: string; + owner_id?: number; + billing_owner_id?: number; + email_domain?: string; + /** + * @description Recommended but not required. This is a unique identifier for the organization in the environment. This identifier is especially useful for organizations that are using SSO. + * + * Recommendation: Use the organization's domain name. Apply variants if a company needs more than one organization account in the environment. + */ + client_identifier?: string; + /** + * @description Set to `false` if an organization requires no user to be allowed username-password-based access. Applicable for organizations who use GSuite email or have SSO enabled. + * + * Default value: `true` + */ + enable_nexla_password_login?: boolean; + custodians?: components["schemas"]["CustodiansPayload"]; + }; + /** + * @description Indicates whether the user's membership in this organization is active or not. + * + * If the membership is DEACTIVATED the user can no longer access the account but the resources owned by them will still be accessible by the organization administrators. + * + * @enum {string} + */ + OrgMembershipStatus: "ACTIVE" | "DEACTIVATED"; + /** + * @description Indicates the user's account status across all organizations they are members of. + * + * @enum {string} + */ + UserStatus: "ACTIVE" | "DEACTIVATED" | "SOURCE_COUNT_CAPPED" | "SOURCE_DATA_CAPPED" | "TRIAL_EXPIRED"; + OrgMember: { + /** @description Nexla User ID of this user. */ + id?: number; + full_name?: string; + email?: string; + /** @description Indicates whether this user is an administrator for the organization or not. */ + "is_admin?"?: boolean; + access_role?: components["schemas"]["AccessRoles"]; + org_membership_status?: components["schemas"]["OrgMembershipStatus"]; + user_status?: components["schemas"]["UserStatus"]; + }; + OrgMemberMutable: OneOf<[{ + /** @description Nexla User ID of the user to be added or updated. */ + id: number; + access_role?: components["schemas"]["AccessRoles"]; + }, { + /** @description Email ID of the user to be added or updated. If a user with this Email ID already exists on the platform then the API will update that account with this membership rule. */ + email: string; + access_role?: components["schemas"]["AccessRoles"]; + }]>; + OrgMemberList: { + members?: components["schemas"]["OrgMemberMutable"][]; + }; + OrgMemberDelete: { + members?: OneOf<[{ + /** @description Nexla User ID of the user who should be removed. */ + id: number; + }, { + /** @description Email ID of the user who should be removed. */ + email: string; + }]>[]; + }; + TeamMemberResponseSchema: { + /** @description Unique ID of the user. */ + id?: number; + /** + * Format: email + * @description Email ID of the user. + */ + email?: string; + /** @description Reflects whether the user is an administrator of this team or not. */ + admin?: boolean; + }; + Team: { + id?: number; + name?: string; + description?: string; + owner?: components["schemas"]["UserSimplified"]; + org?: components["schemas"]["OrgSimplified"]; + member?: boolean; + members?: components["schemas"]["TeamMemberResponseSchema"][]; + access_roles?: components["schemas"]["AccessRoles"]; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + tags?: string[]; + }; + TeamMemberRequestSchema: OneOf<[{ + /** @description Unique ID of the user. */ + id?: number; + /** @description Reflects whether the user is an administrator of this team or not. */ + admin?: boolean; + }, { + /** + * Format: email + * @description Email ID of the user. + */ + email?: string; + /** @description Reflects whether the user is an administrator of this team or not. */ + admin?: boolean; + }]>; + TeamMutable: { + name?: string; + description?: string; + members?: components["schemas"]["TeamMemberRequestSchema"][]; + }; + TeamCreate: components["schemas"]["TeamMutable"] & Record; + TeamMemberList: { + members?: components["schemas"]["TeamMemberResponseSchema"][]; + }; + /** + * @description Indicates whether a user is on a paid, free, or trial account. Used to determine limits on records processed by the account. + * + * @enum {string} + */ + UserTier: "FREE" | "TRIAL" | "PAID" | "FREE_FOREVER"; + OrgMembership: { + /** Format: int32 */ + id?: number; + name?: string; + "is_admin?"?: boolean; + org_membership_status?: components["schemas"]["OrgMembershipStatus"]; + }; + User: { + id?: number; + /** Format: email */ + email?: string; + full_name?: string; + super_user?: boolean; + impersonated?: boolean; + default_org?: { + id?: number; + name?: string; + }; + user_tier?: components["schemas"]["UserTier"]; + status?: components["schemas"]["UserStatus"]; + account_locked?: boolean; + org_memberships?: components["schemas"]["OrgMembership"][]; + /** Format: date-time */ + email_verified_at?: string; + /** Format: date-time */ + tos_signed_at?: string; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + }; + UserAdmin: OneOf<["*", boolean, { + admin?: boolean; + org_id?: number; + }[]]>; + UsersCreateRequired: { + full_name: string; + email: string; + default_org_id?: number; + status?: components["schemas"]["UserStatus"]; + user_tier_id?: number; + user_tier?: string; + password?: string; + /** Format: date-time */ + tos_signed_at?: string; + admin?: components["schemas"]["UserAdmin"]; + }; + AccountSummary: { + data_sources?: { + counts?: { + /** Format: int32 */ + total?: number; + /** Format: int32 */ + owner?: number; + /** Format: int32 */ + collaborator?: number; + /** Format: int32 */ + active?: number; + /** Format: int32 */ + paused?: number; + /** Format: int32 */ + draft?: number; + }; + }; + data_sets?: { + counts?: { + /** Format: int32 */ + total?: number; + /** Format: int32 */ + owner?: number; + /** Format: int32 */ + collaborator?: number; + /** Format: int32 */ + active?: number; + /** Format: int32 */ + paused?: number; + /** Format: int32 */ + draft?: number; + }; + }; + data_sinks?: { + counts?: { + /** Format: int32 */ + total?: number; + /** Format: int32 */ + owner?: number; + /** Format: int32 */ + collaborator?: number; + /** Format: int32 */ + active?: number; + /** Format: int32 */ + paused?: number; + /** Format: int32 */ + draft?: number; + }; + }; + data_maps?: { + counts?: { + /** Format: int32 */ + total?: number; + /** Format: int32 */ + owner?: number; + /** Format: int32 */ + collaborator?: number; + }; + }; + }; + UserExpanded: components["schemas"]["User"] & { + account_summary?: components["schemas"]["AccountSummary"]; + }; + UsersUpdate: { + name?: string; + email?: string; + status?: components["schemas"]["UserStatus"]; + user_tier_id?: number; + user_tier?: string; + password?: string; + password_confirmation?: string; + password_current?: string; + /** Format: date-time */ + tos_signed_at?: string; + admin?: components["schemas"]["UserAdmin"]; + }; + UserTransferable: { + /** Format: nullable */ + catalog_configs?: number[]; + /** Format: nullable */ + code_containers?: number[]; + /** Format: nullable */ + code_filters?: number[]; + /** Format: nullable */ + custom_data_flows?: number[]; + /** Format: nullable */ + dashboard_transforms?: number[]; + /** Format: nullable */ + data_credentials?: number[]; + /** Format: nullable */ + data_maps?: number[]; + /** Format: nullable */ + data_schemas?: number[]; + /** Format: nullable */ + data_sets?: number[]; + /** Format: nullable */ + data_sets_api_keys?: number[]; + /** Format: nullable */ + data_sinks?: number[]; + /** Format: nullable */ + data_sinks_api_keys?: number[]; + /** Format: nullable */ + data_sources?: number[]; + /** Format: nullable */ + data_sources_api_keys?: number[]; + /** Format: nullable */ + doc_containers?: number[]; + /** Format: nullable */ + org_idp_mappings?: number[]; + /** Format: nullable */ + projects?: number[]; + /** Format: nullable */ + teams?: number[]; + /** Format: nullable */ + users_api_keys?: number[]; + }; + UserTransferred: components["schemas"]["UserTransferable"] & { + transfer_user_resources?: { + previous_owner_id?: number; + new_owner_id?: number; + org_id?: number; + }; + }; + UserSettings: { + id?: string; + owner?: { + id?: number; + full_name?: string; + email?: string; + email_verified_at?: string; + }; + org?: components["schemas"]["OrgSimplified"]; + user_settings_type?: string; + settings?: Record; + }; + Notification: { + id?: number; + owner?: components["schemas"]["UserSimplified"]; + org?: components["schemas"]["OrgSimplified"]; + access_roles?: components["schemas"]["AccessRoles"]; + level?: string; + resource_id?: number; + resource_type?: string; + message_id?: number; + message?: string; + /** Format: date-time */ + read_at?: string; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + }; + async_or_null: components["schemas"]["AsyncResponse"] | null; + /** @enum {string} */ + NotificationTypeCategory: "PLATFORM" | "SYSTEM" | "DATA"; + /** @enum {string} */ + NotificationEventType: "SHARE" | "CREATE" | "DELETE" | "UPDATE" | "ACTIVATE" | "PAUSE" | "METRICS" | "RESETPASS" | "ERROR_AGGREGATED" | "ERROR" | "MONITOR" | "WRITE" | "EMPTY_DATA" | "READ_START" | "READ_DONE" | "WRITE_START" | "WRITE_DONE"; + /** @enum {string} */ + NotificationResourceType: "ORG" | "USER" | "DATA_FLOW" | "CUSTOM_DATA_FLOW" | "SOURCE" | "DATASET" | "SINK"; + NotificationType: { + id?: number; + name?: string; + description?: string; + category?: components["schemas"]["NotificationTypeCategory"]; + default?: boolean; + status?: boolean; + event_type?: components["schemas"]["NotificationEventType"]; + resource_type?: components["schemas"]["NotificationResourceType"]; + }; + /** @enum {string} */ + NotificationChannel: "APP" | "EMAIL" | "SMS" | "SLACK" | "WEBHOOKS"; + NotificationChannelSetting: { + id?: number; + owner_id?: number; + org_id?: number; + channel?: components["schemas"]["NotificationChannel"]; + config?: { + [key: string]: string; + }; + }; + NotificationChannelSettingUpdate: { + channel?: components["schemas"]["NotificationChannel"]; + config?: { + [key: string]: string; + }; + }; + NotificationChannelSettingCreateRequired: components["schemas"]["NotificationChannelSettingUpdate"] & Record; + /** + * @description Whether or not the user should be notified about this event for this resource on the selected channel. + * + * @enum {string} + */ + NotificationSettingStatus: "PAUSED" | "ACTIVE"; + NotificationSetting: { + id?: number; + org_id?: number; + owner_id?: number; + channel?: components["schemas"]["NotificationChannel"]; + notification_resource_type?: components["schemas"]["NotificationResourceType"]; + resource_id?: number; + config?: Record; + priority?: number; + status?: components["schemas"]["NotificationSettingStatus"]; + notification_type_id?: number; + name?: string; + description?: string; + code?: number; + category?: string; + event_type?: components["schemas"]["NotificationEventType"]; + resource_type?: components["schemas"]["NotificationResourceType"]; + }; + NotificationSettingUpdate: { + channel?: components["schemas"]["NotificationChannel"]; + status?: components["schemas"]["NotificationSettingStatus"]; + /** @description Configuration properties for customizing the criteria for firing the event. */ + config?: { + [key: string]: string; + }; + notification_resource_type?: components["schemas"]["NotificationResourceType"]; + resource_id?: number; + checked?: boolean; + notification_channel_setting_id?: number; + notification_type_id?: number; + }; + NotificationSettingCreateRequired: components["schemas"]["NotificationSettingUpdate"] & Record; + /** @enum {string} */ + NotificationSettingResourceStatus: "INIT" | "PAUSED" | "ACTIVE" | "RATE_LIMITED"; + NotificationSettingTypeView: { + /** Format: int32 */ + setting_id?: number; + /** Format: int32 */ + org_id?: number; + /** Format: int32 */ + owner_id?: number; + channel?: components["schemas"]["NotificationChannel"]; + resource_type?: components["schemas"]["NotificationResourceType"]; + /** Format: int32 */ + resource_id?: number; + /** Format: nullable */ + setting_config?: string; + /** Format: int32 */ + priority?: number; + status?: components["schemas"]["NotificationSettingStatus"]; + /** Format: int32 */ + notification_type_id?: number; + setting_created_at?: string; + setting_updated_at?: string; + notification_type_name?: string; + notification_type_description?: string; + /** Format: int32 */ + notification_type_code?: number; + notification_type_category?: string; + notification_type_event_type?: components["schemas"]["NotificationEventType"]; + /** Format: int32 */ + resource_owner_id?: number; + /** Format: int32 */ + resource_org_id?: number; + resource_name?: string; + resource_description?: string; + resource_status?: components["schemas"]["NotificationSettingResourceStatus"]; + }; + DashboardMetricSet: { + /** + * Format: int32 + * @description The total number of records that were processed for this resource during the past 24 hours. + */ + records?: number; + /** + * Format: int32 + * @description The total volume (in bytes) of records that were processed for this resource during the past 24 hours. + */ + size?: number; + /** + * Format: int32 + * @description The total number of data processing errors that occurred on this resource during the past 24 hours. + */ + errors?: number; + /** + * @description Indicates whether the platform identified this resource's performance during the last 24 hours as something that might require attention or not. + * + * @enum {string} + */ + status?: "OK" | "WARNING" | "ERROR"; + }; + ResourceMetricDaily: { + /** + * Format: date + * @description The date (in UTC) that the metrics in this entry are applicable for. + */ + time?: string; + /** + * Format: int32 + * @description The total number of records that were processed on the date indicated by the `time` property. + */ + records?: number; + /** + * Format: int32 + * @description The total volume (in bytes) of records that were processed on the date indicated by the `time` property. + */ + size?: number; + /** + * Format: int32 + * @description The total number of data processing errors that occurred on the date indicated by the `time` property. + */ + errors?: number; + }; + ResourceMetricByRun: { + /** @description The run ID / ingestion cycle the the metrics in this entry are applicable for. */ + runId?: number; + /** + * @description The destination write batch id the the metrics in this entry are applicable for. + * + * > *Note*: This entry is present and applicable only if the request was made for a destination *and* the request was for grouping by `lastWritten` instead of `runId` + */ + lastWritten?: number; + /** @description The Nexset ID these records were applicable for. For a data source, this indicates the nexset that received the records these metrics correspond to. For a data sink, this indicates the nexset that had the records these metrics correspond to. */ + dataSetId?: number; + /** + * Format: int32 + * @description The total number of records that were processed during the ingestion cycle indicated by `runId` + */ + records?: number; + /** + * Format: int32 + * @description The total volume (in bytes) of records that processed during the ingestion cycle indicated by `runId` + */ + size?: number; + /** + * Format: int32 + * @description The total number of data processing errors that occurred during the ingestion cycle indicated by `runId` + */ + errors?: number; + }; + FlowResourceMetric: { + id?: number; + metric?: { + records?: number; + size?: number; + errors?: number; + }; + }; + FlowLogEntry: { + /** + * Format: unix epoch in milliseconds + * @description Timestamp at which this log entry was generated. + * + * @example 1695442864636 + */ + timestamp?: number; + /** @description The ID of the resource that generated this log entry. */ + resource_id?: number; + /** + * @description The type of flow resource that generated this log entry. + * + * @enum {string} + */ + resource_type?: "SOURCE" | "DATASET" | "SINK"; + /** + * @description Detailed information about the data processing events on the flow resource. + * + * @example Processed records=100, errors=0, size=94162" + */ + log?: string; + /** + * @description Indicates the type of event reflected by log entry. + * + * * `SUMMARY`: Log entry describing high level summary of data processing event. + * * `LOG`: Log entry describing details of data processing status. + * + * @enum {string} + */ + log_type?: "LOG" | "SUMMARY"; + /** + * @description Indicates the severity of the event reflected by this log entry. + * + * @enum {string} + */ + severity?: "INFO" | "WARNING" | "ERROR"; + }; + LogEntry: { + /** + * Format: int32 + * @description Unique ID of this change event. + */ + id?: number; + /** + * @description The type of resource that the change was performed on. + * + * @example DataSource + */ + item_type?: string; + /** + * Format: int32 + * @description Unique ID of resource that the change was performed on. + */ + item_id?: number; + /** + * @description The type of change event that was executed. + * + * @example association_added + */ + event?: string; + change_summary?: string[]; + /** + * @description This object contains before and after information on each property that was modified as a result of this change event. + * + * Each key in this object is the name of the property that was modified. The value of each property is a 2-element array. The first element shows the value of the property before the change event was executed and the second element shows the value of this property after the change event was executed. + * + * For example, the values below should be interpreted as resource `status` was changed from `ACTIVE` to `PAUSED` and resource `name` was changed from `before` to `after`. + * + * ``` + * { + * "status": ["ACTIVE", "PAUSED"], + * "name": ["before", "after"] + * + * } + * + * ``` + */ + object_changes?: { + [key: string]: (string | number | boolean | Record | unknown[] | null)[]; + }; + /** + * Format: nullable + * @description Sometimes change events result in updates to relationships between resources. This object reflects information about the resource relationship that was modified. + */ + association_resource?: { + /** @description The resource type of the resource whose relationship with this resource was modified. */ + type?: string; + /** + * Format: int32 + * @description The ID of the resource whose relationship with this resource was modified. + */ + id?: number; + }; + /** + * Format: ipv4 + * @description IP Address of the device where this change event request originated. + * + * @example 1.2.3.4 + */ + request_ip?: string; + /** @description User Agent of the browser where this change event request originated. */ + request_user_agent?: string; + /** + * Format: url + * @description Nexla UI or API URL that was accessed by the user to trigger this change event. + */ + request_url?: string; + /** @description Details about the user who triggered this change event. Note that if changes were triggered by a Nexla support team member acting on behalf of a user then this field reflects information about that user while `impersonator_id` reflects information about the Nexla support team member. */ + user?: { + /** Format: int32 */ + id?: number; + /** + * Format: email + * @example user@nexla.com + */ + email?: string; + }; + /** + * Format: nullable + * @description If the changes were made by a Nexla support team member acting on behalf of a user then this field reflects information about the Nexla support team member. + */ + impersonator_id?: string; + /** + * Format: int32 + * @description The ID of the organization that this resource belongs to. + */ + org_id?: number; + /** + * Format: int32 + * @description The ID of the user that this resource belongs to. + */ + owner_id?: number; + /** @description Email ID of the user that this resource belongs to. */ + owner_email?: string; + /** + * Format: date-time + * @example 2022-11-08T22:51:47.720Z + */ + created_at?: string; + }; + audit_log_response: components["schemas"]["LogEntry"][] | components["schemas"]["AsyncResponse"]; + /** @enum {string} */ + QuarantineResourceType: "ORG" | "USER" | "FLOW" | "PIPELINE" | "DATA_FLOW" | "CUSTOM_DATA_FLOW" | "SOURCE" | "DATASET" | "SINK"; + DataCredential: { + id?: number; + name?: string; + description?: string; + owner?: components["schemas"]["UserSimplified"]; + org?: components["schemas"]["OrgSimplified"]; + access_roles?: string[]; + credentials_version?: string; + managed?: boolean; + credentials_type?: string; + connector?: { + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + api_keys?: unknown[]; + credentials_non_secure_data?: { + [key: string]: string | Record | number; + }; + verified_status?: string; + /** Format: date-time */ + verified_at?: string; + /** Format: nullable */ + copied_from_id?: number; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + tags?: unknown[]; + }; + QuarantineSetting: { + /** Format: int32 */ + id?: number; + owner?: components["schemas"]["UserSimplified"]; + org?: components["schemas"]["OrgSimplified"]; + resource_type?: components["schemas"]["QuarantineResourceType"]; + /** Format: int32 */ + resource_id?: number; + /** + * @example { + * "start.cron": "0 50 18 1 1/1 ? *", + * "path": "/nexla_error_data/export" + * } + */ + config?: { + "start.cron"?: string; + path?: string; + }; + /** Format: int32 */ + data_credentials_id?: number; + /** @example gdrive */ + credentials_type?: string; + data_credentials?: components["schemas"]["DataCredential"]; + }; + QuarantineSettingMutable: { + /** @description Nexla data credential to a file storage system where all error data should be exported. Configure the base folder within the location accessible by this credential by setting an appropriate value for `configure["path"]`. */ + data_credentials_id?: number; + /** + * @example { + * "start.cron": "0 50 18 1 1/1 ? *", + * "path": "/nexla_error_data/export" + * } + */ + config?: { + /** + * @description The interval at which Nexla should scan all quarantined records in the user's account and export them to the specified file storage. + * + * This should be a valid cron expression. + */ + "start.cron": string; + /** @description The base folder where all quarantined records will be exported. Nexla will automatically create subfolder tree in this base folder when exporting error data files. These subfolders will be of the pattern `//////
/.json` */ + path: string; + }; + }; + QuarantineSettingCreate: components["schemas"]["QuarantineSettingMutable"] & Record; + ApprovalRequest: { + id?: number; + org_id?: number; + request_type?: string; + topic_id?: number; + status?: string; + requestor_id?: number; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + assignee_id?: number; + rejection_reason?: string; + }; + ResponseOrgAccessor: { + /** @enum {string} */ + type?: "ORG"; + /** @example nexla.com */ + email_domain?: string; + /** Format: nullable */ + client_identifier?: string; + }; + ResponseTeamAccessor: { + /** @enum {string} */ + type?: "TEAM"; + name?: string; + }; + ResponseUserAccessor: { + /** @enum {string} */ + type?: "USER"; + /** @example example@nexla.com */ + email?: string; + }; + AccessorsResponseSchema: (components["schemas"]["ResponseOrgAccessor"] | components["schemas"]["ResponseTeamAccessor"] | components["schemas"]["ResponseUserAccessor"]) & { + access_roles?: components["schemas"]["AccessRoles"]; + /** + * Format: date-time + * @example 2021-04-06T21:02:17.000Z + */ + created_at?: string; + /** + * Format: date-time + * @example 2021-04-06T21:02:17.000Z + */ + updated_at?: string; + }; + RequestUserAccessor: { + /** + * @description Choose this option if the access permissions should be granted to a user. + * + * > Note: Users can belong to multiple organizations. The platform allows granting access permission across organization boundaries only for Nexset `sharer` access role. + * + * @enum {string} + */ + type?: "USER"; + /** @description Unique ID of the user that should be granted this permission. */ + id?: number; + /** + * @description `Optional`: Only user-id or email is required to identify the user who should be granted this access permission. + * + * @example example@nexla.com + */ + email?: string; + /** @description Users can belong to multiple organizations. Specify the org_id to explicitly set the organization context for this access permission. If this is not specified the platform will try to automatically assign the permission to the user's default organization (when granting sharer access on a Nexset) or to the current organization (for all other access roles). */ + org_id?: number; + }; + RequestTeamAccessor: { + /** + * @description Choose this option if the access permissions should be granted to a team. + * + * @enum {string} + */ + type?: "TEAM"; + /** @description Unique ID of the team that should be granted this permission. */ + id?: number; + }; + RequestOrgAccessor: { + /** + * @description Choose this option if the access permissions should be granted to an entire organization. + * + * @enum {string} + */ + type?: "ORG"; + /** @description Unique ID of the organization that should be granted this permission. */ + id?: number; + }; + AccessorsMutable: (components["schemas"]["RequestUserAccessor"] | components["schemas"]["RequestTeamAccessor"] | components["schemas"]["RequestOrgAccessor"]) & { + access_roles?: components["schemas"]["AccessRoles"]; + }; + AccessorsRequestSchema: { + accessors?: components["schemas"]["AccessorsMutable"][]; + }; + MarketplaceDomain: { + id?: number; + org_id?: null | number; + owner_id?: null | number; + name?: string; + description?: string; + /** @description ID of the parent domain. Domains may build a hierarchy. */ + parent_id?: number; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + }; + MarketplaceDomainCreate: { + org_id?: null | number; + owner_id?: null | number; + name?: string; + description?: string; + /** @description ID of the parent domain. Domains may build a hierarchy. */ + parent_id?: number; + custodians?: components["schemas"]["CustodiansPayload"]; + }; + MarketplaceDomainsItem: { + id?: number; + name?: string; + description?: null | string; + /** @description Data samples coming from data sets that the marketplace item presents. */ + data_samples?: { + [key: string]: unknown; + }[]; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + }; + MarketplaceDomainsItemCreate: { + name?: string; + description?: null | string; + /** @description ID of the org's dataset that marketplace item should present. User should have a `read` permission for the dataset. */ + data_set_id?: number; + }; + CustodiansResponse: { + id?: number; + email?: string; + full_name?: string; + }[]; + UserBrief: { + id?: number; + full_name?: string; + email?: string; + org?: number; + impersonated?: boolean; + impersonator?: components["schemas"]["UserSimplified"] & { + org?: number; + }; + }; + OrgMembershipToken: { + api_key?: string; + status?: components["schemas"]["OrgMembershipStatus"]; + "is_admin?"?: boolean; + }; + Token: { + access_token?: string; + /** @enum {string} */ + token_type?: "Bearer"; + expires_in?: number; + user?: components["schemas"]["UserBrief"]; + org_membership?: components["schemas"]["OrgMembershipToken"]; + org?: components["schemas"]["OrgSimplified"]; + }; + AuthConfig: { + id?: number; + owner?: components["schemas"]["UserSimplified"]; + org?: components["schemas"]["OrgSimplified"]; + /** @description Unique external identifier for the authentication configuration. */ + uid?: string; + /** @enum {string} */ + protocol?: "saml" | "oidc" | "google" | "password"; + name?: string; + description?: string; + /** @description If true, this configuration is available to all organizations in the environment. */ + global?: boolean; + /** @description If true, Nexla will automatically create users in the environment when they log in using this authentication configuration. */ + auto_create_users_enabled?: boolean; + /** @description The format of the NameID element in the SAML assertion. This is used to identify the user. */ + name_identifier_format?: string; + /** @description The base URL of the Nexla environment where the user should be redirected after authentication. */ + nexla_base_url?: string; + service_entity_id?: string; + assertion_consumer_url?: string; + logout_url?: string; + metadata_url?: string; + idp_entity_id?: string; + idp_sso_target_url?: string; + idp_slo_target_url?: string; + idp_cert?: string; + security_settings?: string; + oidc_domain?: string; + oidc_keys_url_key?: string; + oidc_token_verify_url?: string; + oidc_id_claims?: string; + oidc_access_claims?: string; + client_config?: { + [key: string]: unknown; + }; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + }; + AuthConfigPayload: { + id?: number; + owner_id?: number; + org_id?: number; + uid?: string; + protocol?: string; + name?: string; + description?: string; + global?: boolean; + enabled_by_default?: boolean; + auto_create_users_enabled?: boolean; + name_identifier_format?: string; + nexla_base_url?: string; + service_entity_id?: string; + assertion_consumer_url?: string; + idp_entity_id?: string; + idp_sso_target_url?: string; + idp_slo_target_url?: string; + idp_cert?: string; + security_settings?: { + [key: string]: unknown; + }; + metadata?: string; + oidc_domain?: string; + oidc_keys_url_key?: string; + oidc_id_claims?: { + [key: string]: unknown; + }; + oidc_access_claims?: { + [key: string]: unknown; + }; + client_config?: { + [key: string]: unknown; + }; + secret_config?: { + [key: string]: unknown; + }; + check_state?: boolean; + }; + SignupRequest: { + email?: string; + full_name?: string; + g_captcha_response?: string; + /** + * @description Invite code is required for self sign up. This is a unique code that is shared with the user to allow them to sign up. + * Included in the signup link if invites is used. + */ + invite?: string; + /** @description Free form object to store marketing/private information about the user. */ + personal_info?: { + [key: string]: unknown; + }; + }; + AuthSetting: { + id?: number; + org?: components["schemas"]["OrgSimplified"]; + auth_config?: { + id?: number; + uid?: string; + /** @enum {string} */ + protocol?: "saml" | "oidc" | "google" | "password"; + name?: string; + description?: string; + global?: boolean; + }; + enabled?: boolean; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + }; + AuthSettingPayload: { + enabled?: boolean; + }; + /** @enum {string} */ + async_task_types: "BulkDeleteNotifications" | "BulkMarkAsReadNotifications" | "BulkPauseFlows" | "CallProbe" | "ChownUserResources" | "DeactivateUser" | "GetAuditLogs"; + /** + * @description The status of the task. + * @enum {string} + */ + async_task_statuses: "pending" | "running" | "completed" | "failed" | "cancelled"; + /** @description Success */ + AsyncTask: { + /** @description The unique identifier of the task. */ + id?: string; + type?: components["schemas"]["async_task_types"]; + status?: components["schemas"]["async_task_statuses"]; + /** @description The progress of the task. */ + progress?: number; + /** @description The priority of the task. */ + priority?: number; + /** + * Format: date-time + * @description The date and time the task was created. + */ + created_at?: string; + /** + * Format: date-time + * @description The date and time the task was completed, failed or stopped in another way. + */ + stopped_at?: string; + /** @description The result of the task. */ + result?: Record; + /** @description The URL to the result of the task. */ + result_url?: string; + /** @description The error that occurred during the task. */ + error?: Record; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + }; + AsyncTaskPayload: { + /** + * @description The type of the task. + * @enum {string} + */ + type?: "BulkDeleteNotifications" | "BulkMarkAsReadNotifications" | "BulkPauseFlows" | "CallProbe" | "ChownUserResources" | "DeactivateUser" | "GetAuditLogs"; + /** @description The priority of the task. At the moment doesn't have any effect. */ + priority?: number; + /** @description The arguments for the task. Use `GET /async_tasks/explain_arguments/{task_type}` to get the list of possible arguments for task. */ + arguments?: { + [key: string]: unknown; + }; + }; + Runtime: { + /** @description The unique identifier of the runtime. */ + id?: number; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + /** @description The name of the runtime. */ + name?: string; + /** @description The description of the runtime. */ + description?: string; + /** + * Format: date-time + * @description The date and time when the runtime was created. + */ + created_at?: string; + /** + * Format: date-time + * @description The date and time when the runtime was last updated. + */ + updated_at?: string; + /** @description Whether the runtime is active or not. */ + active?: boolean; + /** @description The path to the Docker image. */ + dockerpath?: string; + /** @description Whether the runtime is managed or not. */ + managed?: boolean; + /** @description The configuration of the runtime. */ + config?: { + [key: string]: unknown; + }; + }; + RuntimePayload: { + /** @description The name of the runtime. */ + name?: string; + /** @description The description of the runtime. */ + description?: string; + /** @description Whether the runtime is active or not. */ + active?: boolean; + /** @description The path to the Docker image. */ + dockerpath?: string; + /** @description Whether the runtime is managed or not. */ + managed?: boolean; + /** @description The configuration of the runtime. */ + config?: { + [key: string]: unknown; + }; + }; + GenAiConfig: { + /** Format: int32 */ + id?: number; + owner?: components["schemas"]["UserSimplified"]; + org?: components["schemas"]["OrgSimplified"]; + data_credentials?: components["schemas"]["DataCredential"]; + name?: string; + description?: string; + /** @enum {string} */ + status?: "ACTIVE" | "PAUSED"; + config?: { + api_version?: string; + openai_model?: string; + google_ai_model?: string; + enable_doc_recommendations?: boolean; + [key: string]: unknown; + }; + /** @enum {string} */ + type?: "genai_openai" | "genai_googleai"; + }; + GenAiConfigPayload: { + name?: string; + description?: string | null; + status?: ("active" | "paused") | null; + config?: { + api_version?: string | null; + openai_model?: string | null; + google_ai_model?: string | null; + enable_doc_recommendations?: boolean; + [key: string]: unknown; + }; + /** @enum {string} */ + type?: "genai_openai" | "genai_googleai"; + data_credentials_id?: number; + }; + GenAiConfigCreatePayload: components["schemas"]["GenAiConfigPayload"]; + GenAiOrgSetting: { + id?: number; + org_id?: number; + /** @enum {string} */ + gen_ai_usage?: "gen_docs" | "check_code" | "all"; + get_ai_config_id?: number; + /** @description GenAI configuration (only if accessible) */ + gen_ai_config?: components["schemas"]["GenAiConfig"]; + }; + GenAiOrgSettingPayload: { + org_id?: number; + gen_ai_config_id?: number; + /** @enum {string} */ + gen_ai_usage?: "all" | "gen_docs" | "check_code"; + }; + nexset_record: { + [key: string]: string; + }; + }; + responses: { + /** @description Success */ + data_credential_many: { + content: { + "application/json": ({ + id?: number; + name?: string; + description?: string | null; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + access_roles?: components["schemas"]["AccessRoles"]; + credentials_version?: string; + credentials_type?: string; + connector?: { + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + verified_status?: string; + /** Format: date-time */ + verified_at?: string; + copied_from_id?: number | null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + vendor?: { + id?: number; + name?: string; + display_name?: string; + connection_type?: string; + }; + template_config?: Record | null; + })[]; + }; + }; + /** @description Success */ + data_credential_one: { + content: { + "application/json": { + id?: number; + name?: string; + description?: string; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + access_roles?: components["schemas"]["AccessRoles"]; + credentials_version?: string; + managed?: boolean; + credentials_type?: string; + connector?: { + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + api_keys?: unknown[]; + credentials_non_secure_data?: { + [key: string]: string | Record | number; + }; + verified_status?: string; + /** Format: date-time */ + verified_at?: string; + copied_from_id?: null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + tags?: unknown[]; + }; + }; + }; + /** @description Success */ + FlowsManyWithMetric: { + content: { + "application/json": { + flows?: components["schemas"]["FlowNodes"]; + } & components["schemas"]["FlowElements"] & { + metrics?: { + origin_node_id?: number; + records?: number; + size?: number; + errors?: number; + /** Format: date-time */ + reporting_date?: string; + run_id?: number; + }[]; + }; + }; + }; + /** @description Success */ + FlowsOne: { + content: { + "application/json": { + flows?: components["schemas"]["FlowOriginNode"][]; + } & components["schemas"]["FlowElements"]; + }; + }; + /** @description Either success or failure */ + genai_recommendation_response: { + content: { + "application/json": { + /** + * @description Should be 200 or "ok" for normal response. If GenAI service is not available, you will get another status code. + * @example ok + */ + status: string; + output?: { + /** @description The response message from GenAI service. Contains Markdown text. If there is an error, this field will not be present. */ + response?: string; + /** + * @description The status code from external GenAI service. + * @example 200 + */ + statusCode?: number; + /** + * @description If there is an error, this field will contain the error message, and 'response' field will not be present. + * @example GenAI Integration has not been enabled for your Nexla organization. Please contact your organization admin or Nexla support for enabling the integration. + */ + errorMessage?: string; + }; + }; + }; + }; + /** @description Success */ + data_source_many: { + content: { + "application/json": ({ + id?: number; + name?: string; + description?: string | null; + status?: string; + ingest_method?: string; + source_format?: string; + managed?: boolean; + code_container_id?: number | null; + copied_from_id?: number | null; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + source_type?: string; + connector_type?: string; + connector?: { + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + access_roles?: components["schemas"]["AccessRoles"]; + auto_generated?: boolean; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + data_sets?: unknown[]; + tags?: unknown[]; + flow_type?: string; + ingestion_mode?: string; + run_ids?: unknown[]; + has_template?: boolean; + vendor_endpoint?: { + id?: number; + name?: string; + display_name?: string; + }; + vendor?: { + id?: number; + name?: string; + display_name?: string; + connection_type?: string; + }; + })[]; + }; + }; + /** @description Success */ + data_source_one: { + content: { + "application/json": { + id?: number; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + access_roles?: components["schemas"]["AccessRoles"]; + name?: string; + description?: null; + status?: string; + data_sets?: { + version?: number; + id?: number; + owner_id?: number; + org_id?: number; + name?: string; + description?: string; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + }[]; + ingest_method?: string; + source_format?: string; + /** @description The source configuration properties that were set as `source_config` in the payload-to-source create/update API calls. */ + source_config?: { + [key: string]: string; + }; + poll_schedule?: null; + managed?: boolean; + code_container_id?: null; + source_type?: string; + connector_type?: string; + connector?: { + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + api_keys?: unknown[]; + auto_generated?: boolean; + data_credentials?: { + id?: number; + name?: string; + description?: string; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + access_roles?: components["schemas"]["AccessRoles"]; + credentials_version?: string; + managed?: boolean; + credentials_type?: string; + connector?: { + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + api_keys?: unknown[]; + credentials_non_secure_data?: { + [key: string]: string | Record | number; + }; + verified_status?: string; + /** Format: date-time */ + verified_at?: string; + copied_from_id?: null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + tags?: unknown[]; + }; + run_ids?: { + id?: number; + /** Format: date-time */ + created_at?: string; + }[]; + copied_from_id?: null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + tags?: unknown[]; + flow_type?: string; + ingestion_mode?: string; + }; + }; + }; + /** @description Success */ + data_source_one_expand: { + content: { + "application/json": { + id?: number; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + access_roles?: components["schemas"]["AccessRoles"]; + name?: string; + description?: null; + status?: string; + data_sets?: { + version?: number; + id?: number; + owner_id?: number; + org_id?: number; + name?: string; + description?: string; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + sample_service_id?: null; + source_schema?: components["schemas"]["data_set_schema"]; + transform?: { + version?: number; + data_maps?: unknown[]; + transforms?: unknown[]; + custom_config?: Record; + }; + output_schema?: components["schemas"]["data_set_schema"]; + }[]; + ingest_method?: string; + source_format?: string; + source_config?: { + [key: string]: string; + }; + poll_schedule?: null; + managed?: boolean; + code_container_id?: null; + source_type?: string; + connector_type?: string; + connector?: { + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + api_keys?: unknown[]; + auto_generated?: boolean; + data_credentials?: { + id?: number; + name?: string; + description?: string; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + access_roles?: components["schemas"]["AccessRoles"]; + credentials_version?: string; + managed?: boolean; + credentials_type?: string; + connector?: { + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + api_keys?: unknown[]; + credentials_non_secure_data?: { + [key: string]: string | Record | number; + }; + verified_status?: string; + /** Format: date-time */ + verified_at?: string; + copied_from_id?: null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + tags?: unknown[]; + }; + run_ids?: { + id?: number; + /** Format: date-time */ + created_at?: string; + }[]; + copied_from_id?: null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + flow_type?: string; + ingestion_mode?: string; + tags?: unknown[]; + }; + }; + }; + /** @description Success */ + data_sets_many: { + content: { + "application/json": components["schemas"]["DataSet"][]; + }; + }; + /** @description Success */ + data_sets_one: { + content: { + "application/json": components["schemas"]["DataSet"]; + }; + }; + /** @description Success */ + data_sets_sample: { + content: { + "application/json": OneOf<[({ + [key: string]: number | string | boolean | unknown[] | Record; + })[], ({ + /** @description Contents of the record in this sample. */ + rawMessage?: { + [key: string]: number | string | boolean | unknown[] | Record; + }; + /** @description Metadata about this record. */ + nexlaMetaData?: { + /** @example GDRIVE */ + sourceType?: string; + /** + * Format: int64 + * @example 1657222945906 + */ + ingestTime?: number; + /** + * Format: int32 + * @example 0 + */ + sourceOffset?: number; + /** @example aBcDeFg */ + sourceKey?: string; + /** Format: nullable */ + topic?: string; + /** @example SOURCE */ + resourceType?: string; + /** + * Format: int32 + * @example 1 + */ + resourceId?: number; + trackerId?: { + [key: string]: string; + }; + eof?: boolean; + /** + * Format: int64 + * @example 1656524615000 + */ + lastModified?: number; + /** + * Format: int64 + * @example 1657222941374 + */ + runId?: number; + tags?: { + [key: string]: string; + }; + /** @example streaming */ + flow_type?: string; + }; + })[]]>; + }; + }; + /** @description Success */ + data_sink_many: { + content: { + "application/json": ({ + id?: number; + name?: string; + description?: string | null; + status?: string; + data_set_id?: number | null; + data_map_id?: number | null; + data_source_id?: null; + sink_format?: null; + sink_schedule?: null; + in_memory?: boolean; + managed?: boolean; + copied_from_id?: number | null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + sink_type?: string; + connector_type?: string; + connector?: { + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + access_roles?: components["schemas"]["AccessRoles"]; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + data_set?: ({ + id?: number; + owner_id?: number; + org_id?: number; + name?: string; + description?: string; + status?: string; + copied_from_id?: number | null; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + }) | null; + data_map?: { + id?: number; + owner_id?: number; + org_id?: number; + name?: string; + description?: string; + public?: boolean; + /** Format: date-time */ + created_at?: string; + /** Format: date-time */ + updated_at?: string; + } | null; + tags?: unknown[]; + flow_type?: string; + has_template?: boolean; + vendor_endpoint?: { + id?: number; + name?: string; + display_name?: string; + }; + vendor?: { + id?: number; + name?: string; + display_name?: string; + connection_type?: string; + }; + })[]; + }; + }; + /** @description Success */ + data_sink_one: { + content: { + "application/json": { + id?: number; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + access_roles?: components["schemas"]["AccessRoles"]; + name?: string; + description?: null; + status?: string; + data_set_id?: number; + data_map_id?: null; + data_source_id?: null; + sink_format?: null; + /** @description The destination configuration properties that were set as `sink_config` in the payload-to-destination create/update API calls. */ + sink_config?: { + [key: string]: string; + }; + sink_schedule?: null; + in_memory?: boolean; + managed?: boolean; + sink_type?: string; + connector_type?: string; + connector?: { + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + data_set?: { + id?: number; + name?: string; + }; + data_credentials?: { + id?: number; + name?: string; + description?: string; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + access_roles?: components["schemas"]["AccessRoles"]; + credentials_version?: string; + managed?: boolean; + credentials_type?: string; + connector?: { + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + api_keys?: unknown[]; + credentials_non_secure_data?: { + [key: string]: string | Record | number; + }; + verified_status?: string; + /** Format: date-time */ + verified_at?: string; + copied_from_id?: null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + tags?: unknown[]; + }; + copied_from_id?: null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + tags?: unknown[]; + flow_type?: string; + }; + }; + }; + /** @description Success */ + data_sink_one_expand: { + content: { + "application/json": { + id?: number; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + access_roles?: components["schemas"]["AccessRoles"]; + name?: string; + description?: null; + status?: string; + data_set_id?: number; + data_map_id?: null; + data_source_id?: null; + sink_format?: null; + /** @description The destination configuration properties that were set as `sink_config` in the payload-to-destination create/update API calls. */ + sink_config?: { + [key: string]: string; + }; + sink_schedule?: null; + in_memory?: boolean; + managed?: boolean; + sink_type?: string; + connector_type?: string; + connector?: { + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + data_set?: { + id?: number; + name?: string; + description?: string; + output_schema?: components["schemas"]["data_set_schema"]; + status?: string; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + version?: number; + }; + data_credentials?: { + id?: number; + name?: string; + description?: string; + owner?: components["schemas"]["owner"]; + org?: components["schemas"]["org"]; + access_roles?: components["schemas"]["AccessRoles"]; + credentials_version?: string; + managed?: boolean; + credentials_type?: string; + connector?: { + id?: number; + type?: string; + connection_type?: string; + name?: string; + description?: string; + nexset_api_compatible?: boolean; + }; + api_keys?: unknown[]; + credentials_non_secure_data?: { + [key: string]: string | Record | number; + }; + verified_status?: string; + /** Format: date-time */ + verified_at?: string; + copied_from_id?: null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + tags?: unknown[]; + }; + copied_from_id?: null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + tags?: unknown[]; + }; + }; + }; + /** @description Success */ + data_maps_many: { + content: { + "application/json": components["schemas"]["DataMap"][]; + }; + }; + /** @description Success */ + data_maps_one: { + content: { + "application/json": components["schemas"]["DataMap"]; + }; + }; + /** @description Success */ + data_map_entries: { + content: { + "application/json": ({ + [key: string]: string | number; + })[]; + }; + }; + /** @description Success */ + Transform: { + content: { + "application/json": components["schemas"]["Transform"][]; + }; + }; + /** @description Success */ + code_containers_one_Transform: { + content: { + "application/json": components["schemas"]["Transform"]; + }; + }; + /** @description Success */ + CodeContainer: { + content: { + "application/json": components["schemas"]["CodeContainer"]; + }; + }; + /** @description Success */ + AttributeTransform: { + content: { + "application/json": components["schemas"]["AttributeTransform"][]; + }; + }; + /** @description Success */ + code_containers_one_AttributeTransform: { + content: { + "application/json": components["schemas"]["AttributeTransform"]; + }; + }; + /** @description Success */ + code_containers_many_CodeContainer: { + content: { + "application/json": components["schemas"]["CodeContainer"][]; + }; + }; + /** @description Success */ + projects_many: { + content: { + "application/json": components["schemas"]["Project"][]; + }; + }; + /** @description Success */ + projects_one: { + content: { + "application/json": components["schemas"]["Project"]; + }; + }; + /** @description Success */ + FlowsMany: { + content: { + "application/json": { + flows?: components["schemas"]["FlowNodes"]; + } & components["schemas"]["FlowElements"]; + }; + }; + /** @description Success */ + ProjectFlowsOld: { + content: { + "application/json": components["schemas"]["ProjectDataFlow"][]; + }; + }; + /** @description Success */ + orgs_many: { + content: { + "application/json": ({ + id?: number; + name?: string; + description?: string | null; + email_domain?: string; + /** Format: email */ + email?: string | null; + client_identifier?: string | null; + /** Format: url */ + org_webhook_host?: string; + default_cluster_id?: number; + access_roles?: components["schemas"]["AccessRoles"]; + owner?: { + id?: number; + /** Format: email */ + email?: string; + full_name?: string; + super_user?: boolean; + impersonated?: boolean; + default_org?: { + id?: number; + name?: string; + }; + user_tier?: null; + status?: string; + account_locked?: boolean; + org_memberships?: unknown[]; + /** Format: date-time */ + email_verified_at?: string; + /** Format: date-time */ + tos_signed_at?: string | null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + }; + billing_owner?: { + id?: number; + /** Format: email */ + email?: string; + full_name?: string; + super_user?: boolean; + impersonated?: boolean; + default_org?: { + id?: number; + name?: string; + }; + user_tier?: null; + status?: string; + account_locked?: boolean; + org_memberships?: unknown[]; + /** Format: date-time */ + email_verified_at?: string; + /** Format: date-time */ + tos_signed_at?: string | null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + }; + admins?: unknown[]; + org_tier?: { + id?: number; + name?: string; + display_name?: string; + record_count_limit?: number; + record_count_limit_time?: string; + data_source_count_limit?: number; + trial_period_days?: number; + } | null; + members_default_access_role?: string; + status?: string; + default_reusable_code_container_access_role?: string; + require_org_admin_to_publish?: boolean; + require_org_admin_to_subscribe?: boolean; + /** Format: date-time */ + email_domain_verified_at?: string | null; + /** Format: date-time */ + name_verified_at?: string | null; + enable_nexla_password_login?: boolean; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + })[]; + }; + }; + /** @description Success */ + orgs_one: { + content: { + "application/json": { + id?: number; + name?: string; + description?: null; + cluster_id?: number; + new_cluster_id?: null; + cluster_status?: string; + email_domain?: string; + email?: null; + client_identifier?: string; + /** Format: url */ + org_webhook_host?: string; + access_roles?: components["schemas"]["AccessRoles"]; + owner?: { + id?: number; + /** Format: email */ + email?: string; + full_name?: string; + impersonated?: boolean; + default_org?: { + id?: number; + name?: string; + }; + user_tier?: null; + status?: string; + account_locked?: boolean; + org_memberships?: { + id?: number; + name?: string; + "is_admin?"?: boolean; + org_membership_status?: string; + }[]; + /** Format: date-time */ + email_verified_at?: string; + tos_signed_at?: null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + }; + billing_owner?: { + id?: number; + /** Format: email */ + email?: string; + full_name?: string; + impersonated?: boolean; + default_org?: { + id?: number; + name?: string; + }; + user_tier?: null; + status?: string; + account_locked?: boolean; + org_memberships?: { + id?: number; + name?: string; + "is_admin?"?: boolean; + org_membership_status?: string; + }[]; + /** Format: date-time */ + email_verified_at?: string; + tos_signed_at?: null; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + }; + admins?: { + id?: number; + full_name?: string; + /** Format: email */ + email?: string; + }[]; + org_tier?: { + id?: number; + name?: string; + display_name?: string; + record_count_limit?: number; + record_count_limit_time?: string; + data_source_count_limit?: number; + trial_period_days?: number; + }; + members_default_access_role?: string; + status?: string; + default_reusable_code_container_access_role?: string; + require_org_admin_to_publish?: boolean; + require_org_admin_to_subscribe?: boolean; + email_domain_verified_at?: null; + name_verified_at?: null; + enable_nexla_password_login?: boolean; + /** Format: date-time */ + updated_at?: string; + /** Format: date-time */ + created_at?: string; + }; + }; + }; + /** @description Success */ + org_members: { + content: { + "application/json": components["schemas"]["OrgMember"][]; + }; + }; + /** @description Success */ + teams_many: { + content: { + "application/json": components["schemas"]["Team"][]; + }; + }; + /** @description Success */ + teams_one: { + content: { + "application/json": components["schemas"]["Team"]; + }; + }; + /** @description Success */ + team_members: { + content: { + "application/json": components["schemas"]["TeamMemberList"][]; + }; + }; + /** @description Success */ + users_many: { + content: { + "application/json": components["schemas"]["User"][]; + }; + }; + /** @description Success */ + users_one: { + content: { + "application/json": components["schemas"]["User"]; + }; + }; + /** @description Success */ + users_many_expand: { + content: { + "application/json": components["schemas"]["UserExpanded"][]; + }; + }; + /** @description Success */ + users_one_expand: { + content: { + "application/json": components["schemas"]["UserExpanded"]; + }; + }; + /** @description Success */ + users_transfer: { + content: { + "application/json": components["schemas"]["UserTransferred"]; + }; + }; + /** @description Success */ + user_settings_list: { + content: { + "application/json": components["schemas"]["UserSettings"][]; + }; + }; + /** @description Success */ + notifications_many: { + content: { + "application/json": components["schemas"]["Notification"][]; + }; + }; + /** @description Success */ + notifications_one: { + content: { + "application/json": components["schemas"]["Notification"]; + }; + }; + /** @description Success */ + notifications_count: { + content: { + "application/json": { + count?: number; + }; + }; + }; + /** @description Success */ + notification_types_many: { + content: { + "application/json": components["schemas"]["NotificationType"][]; + }; + }; + /** @description Success */ + notification_types_one: { + content: { + "application/json": components["schemas"]["NotificationType"]; + }; + }; + /** @description Success */ + notification_channel_settings_many: { + content: { + "application/json": components["schemas"]["NotificationChannelSetting"][]; + }; + }; + /** @description Success */ + notification_channel_settings_one: { + content: { + "application/json": components["schemas"]["NotificationChannelSetting"]; + }; + }; + /** @description Success */ + notification_settings_many: { + content: { + "application/json": components["schemas"]["NotificationSetting"][]; + }; + }; + /** @description Success */ + notification_settings_one: { + content: { + "application/json": components["schemas"]["NotificationSetting"]; + }; + }; + /** @description Success */ + notification_settings_many_type: { + content: { + "application/json": components["schemas"]["NotificationSettingTypeView"][]; + }; + }; + /** @description Success */ + flows_account_metrics: { + content: { + "application/json": { + /** + * Format: int32 + * @description Status of the report request. This must be `200` or `Ok` for the metrics object in the response to be considered valid. + */ + status?: number; + metrics?: { + data?: { + /** + * Format: int32 + * @description The total number of records processed during the specified time range. + */ + records?: number; + /** + * Format: int32 + * @description The total volume of records processed (in bytes) during the specified time range. + */ + size?: number; + }; + /** Format: date-time */ + start_time?: string; + /** Format: date-time */ + end_time?: string; + }[]; + }; + }; + }; + /** @description Success */ + flows_dashboard: { + content: { + "application/json": { + /** + * Format: int32 + * @description Status of the report request. This must be `200` or `Ok` for the metrics object in the response to be considered valid. + */ + status?: number; + metrics?: { + sources?: { + [key: string]: components["schemas"]["DashboardMetricSet"]; + }; + sinks?: { + [key: string]: components["schemas"]["DashboardMetricSet"]; + }; + datasets?: { + [key: string]: components["schemas"]["DashboardMetricSet"]; + }; + /** Format: date-time */ + start_time?: string; + /** Format: date-time */ + end_time?: string; + }; + }; + }; + }; + /** @description Success */ + user_metrics: { + content: { + "application/json": { + metrics?: { + /** + * Format: date + * @description The date (in UTC) that the metrics in this entry are applicable for. + */ + time?: string; + /** + * Format: int32 + * @description The total number of records that were processed on the date indicated by the `time` property. + */ + records?: number; + /** + * Format: int32 + * @description The total volume (in bytes) of records that were processed on the date indicated by the `time` property. + */ + size?: number; + /** + * Format: int32 + * @description The total number of data processing errors that occurred on the date indicated by the `time` property. + */ + errors?: number; + }[]; + /** + * Format: int32 + * @description Status of the report request. This must be `200` or `Ok` for the metrics object in the response to be considered valid. + */ + status?: number; + }; + }; + }; + /** @description Success */ + ResourceMetricsDaily: { + content: { + "application/json": { + metrics?: components["schemas"]["ResourceMetricDaily"][]; + /** + * Format: int32 + * @description Status of the report request. This must be `200` or `Ok` for the metrics object in the response to be considered valid. + */ + status?: number; + }; + }; + }; + /** @description Success */ + ResourceMetricsByRuns: { + content: { + "application/json": { + metrics?: { + data?: components["schemas"]["ResourceMetricByRun"][]; + /** @description A special metadata object that indicates the relevant response Nexla metadata for the metrics responses. This is useful for iterating through multiple pages of valid data. */ + meta?: { + /** @description Current page that this response corresponds to. */ + currentPage?: number; + /** @description Total number of valid pages of metrics data given the current page size. */ + pageCount?: number; + /** @description Total number of metrics entries that are available for this resource. */ + totalCount?: number; + }; + }; + /** + * Format: int32 + * @description Status of the report request. This must be `200` or `Ok` for the metrics object in the response to be considered valid. + */ + status?: number; + }; + }; + }; + /** @description Success */ + FlowMetricsResponse: { + content: { + "application/json": { + /** @description Status of the report request. This must be 200 for the data object in the response to be considered valid. */ + status?: number; + /** @description Message signifying status of the report request. This must be `Ok` for the data object in the response to be considered valid. */ + message?: string; + metrics?: { + /** @description Flow metrics data aggregated by resource id for the time period specified. If the request includes `groupby=runId` then the metrics are further grouped by `runId` to separate number of records processed per run id per resource. */ + data?: OneOf<[{ + [key: string]: { + data_sources?: components["schemas"]["FlowResourceMetric"][]; + data_sets?: components["schemas"]["FlowResourceMetric"][]; + data_sinks?: components["schemas"]["FlowResourceMetric"][]; + }; + }, { + data_sources?: components["schemas"]["FlowResourceMetric"][]; + data_sets?: components["schemas"]["FlowResourceMetric"][]; + data_sinks?: components["schemas"]["FlowResourceMetric"][]; + }]>; + /** @description A special metadata object that indicates the relevant response Nexla metadata for the metrics responses. This is useful for iterating through multiple pages of valid data. */ + meta?: { + /** @description Current page that this response corresponds to. */ + currentPage?: number; + /** @description Total number of valid pages of metrics data given the current page size. */ + pageCount?: number; + /** @description Total number of metrics entries that are available for this resource. */ + totalCount?: number; + }; + }; + }; + }; + }; + /** @description Success */ + FlowLogsResponse: { + content: { + "application/json": { + /** @description Status of the report request. This must be 200 for the data object in the response to be considered valid. */ + status?: number; + /** @description Message signifying status of the report request. This must be `Ok` for the data object in the response to be considered valid. */ + message?: string; + logs?: { + data?: components["schemas"]["FlowLogEntry"][]; + /** @description A special metadata object that indicates the relevant response Nexla metadata for the responses. This is useful for iterating through multiple pages of valid data. */ + meta?: { + /** @description Current page that this response corresponds to. */ + current_page?: number; + /** @description Total number of valid pages of logs data given the current page size. */ + pages_count?: number; + /** @description Total number of log entries that are available for this resource. */ + total_count?: number; + /** @description The id of the organization this flow belongs to. */ + org_id?: number; + /** @description The run id (denoting ingestion cycle) that these log were generated as part of. */ + run_id?: number; + }; + }; + }; + }; + }; + /** @description Success */ + quarantine_settings_one: { + content: { + "application/json": components["schemas"]["QuarantineSetting"]; + }; + }; + /** @description Success */ + approval_requests_many: { + content: { + "application/json": components["schemas"]["ApprovalRequest"][]; + }; + }; + /** @description Success */ + approval_requests_one: { + content: { + "application/json": components["schemas"]["ApprovalRequest"]; + }; + }; + /** @description Success */ + accessors_list: { + content: { + "application/json": components["schemas"]["AccessorsResponseSchema"][]; + }; + }; + /** @description List of marketplace domains */ + domains_many: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["MarketplaceDomain"][]; + }; + }; + /** @description Single marketplace domain */ + domains_one: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["MarketplaceDomain"]; + }; + }; + /** @description List of marketplace domain items */ + domain_items_many: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["MarketplaceDomainsItem"][]; + }; + }; + /** @description List of custodians */ + custodians_many: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["CustodiansResponse"]; + }; + }; + /** @description Success */ + token: { + content: { + "application/json": components["schemas"]["Token"]; + }; + }; + /** @description Success */ + auth_configs_many: { + content: { + "application/json": components["schemas"]["AuthConfig"][]; + }; + }; + /** @description Success */ + auth_config_one: { + content: { + "application/json": components["schemas"]["AuthConfig"]; + }; + }; + /** @description Success */ + self_signup_requests_response: { + content: { + "application/json": ({ + /** @description The unique identifier of the self sign up request. */ + id?: number; + /** + * @description The status of the self sign up request. + * @enum {string} + */ + status?: "pending" | "email_verified" | "approved" | "rejected"; + /** @description The email address of the user. */ + email?: string; + /** @description The full name of the user. */ + full_name?: string; + /** @description The unique identifier of the invite. */ + invite_id?: number; + /** + * Format: date-time + * @description The date and time when the self sign up request was created. + */ + created_at?: string; + /** + * Format: date-time + * @description The date and time when the self sign up request was last updated. + */ + updated_at?: string; + })[]; + }; + }; + /** @description Success */ + self_signup_blocked_domains_response: { + content: { + "application/json": { + /** @description The unique identifier of the blocked domain. */ + id?: number; + /** @description The email domain that is blocked. */ + domain?: string; + }[]; + }; + }; + /** @description Success */ + auth_settings_many: { + content: { + "application/json": components["schemas"]["AuthSetting"][]; + }; + }; + /** @description Success */ + auth_setting_one: { + content: { + "application/json": components["schemas"]["AuthSetting"]; + }; + }; + /** @description Success */ + async_tasks_many: { + content: { + "application/json": components["schemas"]["AsyncTask"][]; + }; + }; + /** @description Success */ + async_task_one: { + content: { + "application/json": components["schemas"]["AsyncTask"]; + }; + }; + /** @description Success */ + runtimes_many: { + content: { + "application/json": components["schemas"]["Runtime"][]; + }; + }; + /** @description Success */ + runtimes_one: { + content: { + "application/json": components["schemas"]["Runtime"]; + }; + }; + /** @description Success */ + gen_ai_configs_many: { + content: { + "application/json": components["schemas"]["GenAiConfig"][]; + }; + }; + /** @description Success */ + gen_ai_configs_one: { + content: { + "application/json": components["schemas"]["GenAiConfig"]; + }; + }; + /** @description Success */ + gen_ai_org_settings_many: { + content: { + "application/json": components["schemas"]["GenAiOrgSetting"][]; + }; + }; + /** @description Success */ + gen_ai_org_settings_one: { + content: { + "application/json": components["schemas"]["GenAiOrgSetting"]; + }; + }; + }; + parameters: { + access_roles?: "collaborator" | "operator" | "admin" | "owner"; + accept?: "application/vnd.nexla.api.v1+json" | "application/json"; + expand?: 1; + page?: number; + per_page?: number; + size?: number; + }; + requestBodies: never; + headers: never; + pathItems: never; +} + +export type $defs = Record; + +export type external = Record; + +export interface operations { + + /** + * Get All Credentials + * @description Returns all data credentials accessible to the authenticated user. + */ + get_data_credentials: { + parameters: { + query?: { + access_role?: components["parameters"]["access_roles"]; + /** @description (Optional) Set this to the type of credentials you want to filter by. Connection type or vendor name can be used there. */ + credentials_type?: string; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["data_credential_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Create a Credential + * @description Creates a Nexla data credential with the specified configuration in your Nexla account. + * + * > Note: `name`, `credentials_type`, and `credentials` are required. + */ + create_data_credential: { + requestBody?: { + content: { + "application/json": { + credentials_type: "json"; + } & Omit; + }; + }; + responses: { + 200: components["responses"]["data_credential_one"]; + /** @description Unauthorized */ + 400: { + content: never; + }; + }; + }; + /** + * Get Credential by ID + * @description Returns a credential object if a valid ID is provided. + */ + get_data_credential: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the credential that needs to be fetched. */ + credential_id: number; + }; + }; + responses: { + 200: components["responses"]["data_credential_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Update Credential + * @description Updates a data credential in the authenticated user's account. + * + * > Note: This method does not perform partial updating of the `credentials` object. The entire `credentials` object will be updated if this is added to the payload. + */ + update_data_credential: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the credential that needs to be fetched. */ + credential_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["data_credential"]; + }; + }; + responses: { + 200: components["responses"]["data_credential_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Delete a Credential + * @description Deletes a credential from your Nexla account. + */ + delete_data_credential: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the credential that needs to be deleted. */ + credential_id: string; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": { + /** @description Response status code */ + code?: string; + /** @description Response status text */ + message?: string; + }; + }; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Get Credential by ID with expanded references + * @description Returns a credential object along with advanced information about associated references if a valid ID is provided. + */ + get_data_credential_expanded: { + parameters: { + query?: { + expand?: components["parameters"]["expand"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the credential that needs to be fetched */ + credential_id: number; + }; + }; + responses: { + 200: components["responses"]["data_credential_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Test credential validity + * @description Use this endpoint to check whether or not a credential is valid. + */ + data_credential_probe: { + parameters: { + query?: { + /** @description If set to 'true', request will be executed in a deferred way, and results will be provided later. */ + async?: boolean; + /** @description The unique ID of the asynchronous request. In case it's provided, returns info about the deferred request. */ + request_id?: number; + }; + path: { + /** @description The unique ID of the credential being used. */ + credential_id: number; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": components["schemas"]["probe_response_with_async_results"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Invalid credential */ + 403: { + content: { + "application/json": { + /** @enum {integer} */ + status?: 403; + /** @description Detailed reason for the credential authentication failure. */ + message?: string; + }; + }; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Preview Storage Structure + * @description Use this endpoint to preview the structure/hierarchy of storage to which this credential grants access. For example, you can use this endpoint to see the folder and file structure of a file storage system or the table-column structure of a database. + * This can be used to inspect the directory hierarchy of file content storage or the database schema of a database/warehouse storage system. Note that this endpoint is only valid for credentials for storage systems wherein a storage structure needs to be reviewed. + */ + preview_storage_structure: { + parameters: { + query?: { + /** @description If set to 'true', request will be executed in a deferred way, and results will be provided later. */ + async?: boolean; + /** @description The unique ID of the asynchronous request. In case it's provided, returns info about the deferred request. */ + request_id?: number; + }; + path: { + /** @description The unique ID of the credential being used. */ + credential_id: number; + }; + }; + requestBody?: { + content: { + "application/json": OneOf<[{ + /** + * @description Specify the hierarchy depth that should be scanned and returned in the response. + * + * If no other payload properties are provided, the depth is relative to the storage root. If this request contains instructions about the slice of storage to be previewed (using the path/database/table properties), the depth will be applied relative to that storage slice. + * + * We recommend using a depth = 1 to ensure that only small, relevant slices of the storage system are scanned. + */ + depth: number; + /** + * @description Folder or subfolder path for which you wish to retrieve the content structure. The path string should be structured from the root of the location to the credential. For example, `demo-out.nexla.com/users/test` will return the folder tree for the contents of the `test` subfolder. + * + * This is relevant for file-type connectors. + */ + path?: string; + }, { + /** + * @description Specify the hierarchy depth that should be scanned and returned in the response. + * + * If no other payload properties are provided, the depth is relative to the storage root. If this request contains instructions about the slice of storage to be previewed (using the path/database/table properties), the depth will be applied relative to that storage slice. + * + * We recommend using a depth = 1 to ensure that only small, relevant slices of storage are scanned. + */ + depth: number; + /** + * @description Name of the database from which you wish to fetch table names or collections. + * + * Relevant for Database and NoSql document-type connectors. + */ + database?: string; + /** + * @description Name of the table of which you wish to fetch the column structure. + * + * Relevant for database-type connectors. + */ + table?: string; + }]>; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": components["schemas"]["probe_tree_with_async"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Preview Connector Content + * @description Use this endpoint to preview the data content in a storage system. + * + * 1. For file systems, this can be used to preview the file content of any specific file. + * 2. For database systems, it can be used to preview sample rows from a table or query result. + * 3. For the rest connector, it can be used to preview the results of any API request. + * 4. For streaming connectors, it can be used to preview some records in a topic. + * + * For most connectors, it can also be used to determine the type of records that might be detected in the resulting Nexset. + */ + preview_connector_content: { + parameters: { + query?: { + /** @description If set to 'true', request will be executed in a deferred way, and results will be provided later. */ + async?: boolean; + /** @description The unique ID of the asynchronous request. In case it's provided, returns info about the deferred request. */ + request_id?: number; + }; + path: { + /** @description The unique ID of the credential being used. */ + credential_id: number; + }; + }; + requestBody?: { + content: { + "application/json": OneOf<[{ + /** @description __For file type connectors__: Set the path to the file from which you wish to preview content. */ + path?: string; + }, { + [key: string]: string | Record | number | unknown[]; + }]>; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": components["schemas"]["probe_sample_with_async"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get All Flows + * @description Returns all flows accessible to the authenticated user. + */ + get_flows: { + parameters: { + query?: { + flows_only?: 1; + include_run_metrics?: 1; + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per_page"]; + access_role?: components["parameters"]["access_roles"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["FlowsManyWithMetric"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Flow by ID + * @description Returns a flow object if a valid flow ID is provided. + */ + get_flow_by_id: { + parameters: { + query?: { + flows_only?: 1; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the flow that needs to be fetched. */ + flow_id: number; + }; + }; + responses: { + 200: components["responses"]["FlowsOne"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete a Flow + * @description Deletes a flow from your Nexla account. + */ + delete_flow: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the flow that needs to be deleted. */ + flow_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": { + /** @description Response status code */ + code?: string; + /** @description Response status text */ + message?: string; + }; + }; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Activate a Flow + * @description To activate the entire flow, use either the `origin_node_id` from any data source, set or sink in the flow, or include the ?all=1 or ?full_tree=1 query parameter. + * + * >**Note**: + * > 1. All endpoints for activating or pausing a flow operate on the specific resource given and all of the flow nodes downstream from that resource. This allows for pausing and activating sub-flows while leaving the rest of the flow state unchanged. + * > + * > 2. You can also activate a flow by using the id of the `data_source`/ `data_set` / `data_sink` that the flow node is linked to. See relevant endpoints in the API references for those resources. + */ + flow_activate_with_flow_id: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the flow that needs to be activated. */ + flow_id: number; + /** + * @description Set this query parameter if the flow node ID you are making a call with is not an origin flow node but you want to activate the full flow chain. + * Not necessary if the flow node is an origin flow node. + */ + all: 1; + }; + }; + responses: { + 200: components["responses"]["FlowsOne"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Pause a Flow + * @description To pause the entire flow, use either the `origin_node_id` from any data source, set or sink in the flow, or include the ?all=1 or ?full_tree=1 query parameter. + * + * >**Note**: + * > 1. All endpoints for activating or pausing a flow operate on the specific resource given and all of the flow nodes downstream from that resource. This allows for pausing and activating sub-flows while leaving the rest of the flow state unchanged. + * > + * > 2. You can also pause a flow by using the id of the `data_source`/ `data_set` / `data_sink` that the flow node is linked to. See relevant endpoints in the API references for those resources. + */ + flow_pause_with_flow_id: { + parameters: { + query?: { + /** @description If set to 'true', request will be executed in a deferred way, and results will be provided later. Notice: only works with all=1 or full_tree=1. */ + async?: boolean; + /** @description The unique ID of the asynchronous request. In case it's provided, returns info about the deferred request. */ + request_id?: number; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the flow that needs to be paused. */ + flow_id: number; + /** + * @description Set this query parameter if the flow node ID you are making a call with is not an origin flow node but you want to pause the full flow chain. + * Not necessary if the flow node is an origin flow node. + */ + all: 1; + }; + }; + responses: { + /** @description OK */ + 200: { + content: { + "application/json": components["schemas"]["flow_one_with_async"]; + }; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Copy a Flow + * @description Use this endpoint to create a copy of an existing flow. + */ + flow_copy_with_flow_id: { + parameters: { + path: { + /** @description The unique ID of the flow that needs to be paused. */ + flow_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": { + /** @description Set this to `true` if you do want to reuse the credentials of this flow instead of creating a clone of the credentials also. */ + reuse_data_credentials?: boolean; + /** @description Set this to `true` if you want the new flow to be accessible by all users who have access to the this flow. */ + copy_access_controls?: boolean; + /** @description This is relevant for flows where one or more destinations have sources as their children. Set this to `true` if you want to create a clone of the flows that originate from those sources also. */ + copy_dependent_data_flows?: boolean; + /** @description The default API behavior is to create the new flow in the account of the authenticated user making this call. Set this property if you want a different user to be the owner of the new flow. */ + owner_id?: number; + /** @description The default API behavior is to create the new flow in the org that the authenticated user making this call belongs to. Set this property if you want the flow to be created in a different org. */ + org_id?: number; + }; + }; + }; + responses: { + 200: components["responses"]["FlowsOne"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Generate an AI suggestion for flow documentation + * @description Request a suggestion for Flow documentation. GenAI has to be configured properly for this request, or else you get a message with an error. + */ + flow_docs_recommendation: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the flow */ + flow_id: number; + }; + }; + responses: { + 200: components["responses"]["genai_recommendation_response"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Flow (by Resource ID) + * @description Returns a flow object if a valid resource type and resource ID is provided. + * + * > Note: This is a variant of flow endpoints where the flow node can referenced not by its own ID, but by the ID of the unique resource that is linked to that flow node. + */ + get_flow_by_resource_id: { + parameters: { + query?: { + flows_only?: 1; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The type of node linked to the flow you want to reference. For ex, set this to `data_sources` if you want to refer to the flow originating in a `data_source`. */ + resource_type: "data_sources" | "data_sinks" | "data_sets"; + /** @description The unique id of the resource whose flow you want to reference. For ex, set this to id of the `data_source` if you want to refer to the flow originating in a specific `data_source`. */ + resource_id: number; + }; + }; + responses: { + 200: components["responses"]["FlowsOne"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete a Flow (by Resource ID) + * @description Deletes a flow from your Nexla account. + * + * > Note: This is a variant of flow endpoints where the flow node can referenced not by its own ID, but by the ID of the unique resource that is linked to that flow node. + */ + delete_flow_by_resource_id: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The type of node linked to the flow you want to reference. For ex, set this to `data_sources` if you want to refer to the flow originating in a `data_source`. */ + resource_type: "data_sources" | "data_sinks" | "data_sets"; + /** @description The unique id of the resource whose flow you want to reference. For ex, set this to id of the `data_source` if you want to refer to the flow originating in a specific `data_source`. */ + resource_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": { + /** @description Response status code */ + code?: string; + /** @description Response status text */ + message?: string; + }; + }; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Activate a Flow (with Resource ID) + * @description To activate the entire flow include the ?all=1 or ?full_tree=1 query parameter. + * + * >**Note**: + * > 1. All endpoints for activating or pausing a flow operate on the specific resource given and all of the flow nodes downstream from that resource. This allows for pausing and activating sub-flows while leaving the rest of the flow state unchanged. + * > 2. This is a variant of flow endpoints where the flow node can referenced not by its own ID, but by the ID of the unique resource that is linked to that flow node. + */ + flow_activate_with_resource_id: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The type of node linked to the flow you want to reference. For ex, set this to `data_sources` if you want to refer to the flow originating in a `data_source`. */ + resource_type: "data_sources" | "data_sinks" | "data_sets"; + /** @description The unique id of the resource whose flow you want to reference. For ex, set this to id of the `data_source` if you want to refer to the flow originating in a specific `data_source`. */ + resource_id: number; + /** + * @description Set this query parameter if the resource ID you are making a call with is not an origin flow node but you want to activate the full flow chain. + * Not necessary if the flow node is an origin flow node. + */ + all: 1; + }; + }; + responses: { + 200: components["responses"]["FlowsOne"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Pause a Flow (with Resource ID) + * @description To pause the entire flow include the entire flow include the ?all=1 or ?full_tree=1 query parameter. + * + * >**Note**: + * > 1. All endpoints for activating or pausing a flow operate on the specific resource given and all of the flow nodes downstream from that resource. This allows for pausing and activating sub-flows while leaving the rest of the flow state unchanged. + * > + * > 2. This is a variant of flow endpoints where the flow node can referenced not by its own ID, but by the ID of the unique resource that is linked to that flow node. + */ + flow_pause_with_resource_id: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The type of node linked to the flow you want to reference. For ex, set this to `data_sources` if you want to refer to the flow originating in a `data_source`. */ + resource_type: "data_sources" | "data_sinks" | "data_sets"; + /** @description The unique id of the resource whose flow you want to reference. For ex, set this to id of the `data_source` if you want to refer to the flow originating in a specific `data_source`. */ + resource_id: number; + /** + * @description Set this query parameter if the flow node ID you are making a call with is not an origin flow node but you want to pause the full flow chain. + * Not necessary if the flow node is an origin flow node. + */ + all: 1; + }; + }; + responses: { + 200: components["responses"]["FlowsOne"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get All Sources + * @description Returns all data sources accessible to the authenticated user. + */ + get_data_sources: { + parameters: { + query?: { + access_role?: components["parameters"]["access_roles"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["data_source_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Create a Source + * @description Creates a new data source in the authenticated user's account. + * + * Depending on the type of source you want to create (`source_type`), properties like `source_config` and `data_credentials_id` will require appropriate configuration. + * + * > Note: `name`, `source_type`, `source_config` and `data_credentials_id` are required. + */ + create_data_source: { + requestBody?: { + content: { + "application/json": { + source_type: "json"; + } & Omit; + }; + }; + responses: { + 200: components["responses"]["data_source_one"]; + /** @description Unauthorized */ + 400: { + content: never; + }; + }; + }; + /** + * Get Source by ID + * @description Returns a source object if a valid ID is provided. + */ + get_data_source: { + parameters: { + query?: { + expand?: components["parameters"]["expand"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the source that needs to be fetched. */ + source_id: number; + }; + }; + responses: { + 200: components["responses"]["data_source_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Update a Source + * @description Updates a data source in the authenticated user's account. + * + * Depending on the type of source you want to update (`source_type`), properties like `source_config` and `data_credentials_id` will require appropriate configuration. + * + * > Note: This method does not perform partial updating of `source_config`. The entire `source_config` object will be updated if this is added to the payload. + */ + update_data_source: { + parameters: { + path: { + /** @description The unique ID of the source that needs to be updated. */ + source_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["data_source"]; + }; + }; + responses: { + 200: components["responses"]["data_source_one"]; + /** @description Unauthorized */ + 400: { + content: never; + }; + }; + }; + /** + * Delete a Source + * @description Deletes a source from your Nexla account. + */ + delete_data_source: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the source that needs to be deleted. */ + source_id: string; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": { + /** @description Response status code */ + code?: string; + /** @description Response status text */ + message?: string; + }; + }; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Get Source by ID with Expanded References + * @description Returns a source object along with advanced information about associated references if a valid ID is provided. + */ + get_data_source_expanded: { + parameters: { + query?: { + expand?: components["parameters"]["expand"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the source that needs to be fetched */ + source_id: number; + }; + }; + responses: { + 200: components["responses"]["data_source_one_expand"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Activate a Source + * @description Activate a paused data source. + */ + activate_source: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the source that needs to be activated. */ + source_id: number; + }; + }; + responses: { + 200: components["responses"]["data_source_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Pause a Source + * @description Pause an active data source. + */ + pause_source: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the source that needs to be paused. */ + source_id: number; + }; + }; + responses: { + 200: components["responses"]["data_source_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Copy a Source + * @description Use this endpoint to create a copy of an existing flow. + */ + copy_source: { + parameters: { + path: { + /** @description The unique ID of the source that needs to be copied. */ + source_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": { + /** @description Set this to `true` if you do want to reuse the credentials of this source instead of creating a clone of the credentials also. */ + reuse_data_credentials?: boolean; + /** @description Set this to `true` if you want the new source to be accessible by all users who have access to the this sink. */ + copy_access_controls?: boolean; + /** @description The default API behavior is to create the new source in the account of the authenticated user making this call. Set this property if you want a different user to be the owner of the new source. */ + owner_id?: number; + /** @description The default API behavior is to create the new source in the org that the authenticated user making this call belongs to. Set this property if you want the source to be created in a different org. */ + org_id?: number; + }; + }; + }; + responses: { + 200: components["responses"]["data_source_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get All Nexsets + * @description Retrieves all Nexsets accessible to the authenticated user. + */ + get_nexsets: { + parameters: { + query?: { + access_role?: components["parameters"]["access_roles"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["data_sets_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + }; + }; + /** + * Create a Nexset + * @description Creates a Nexset from another Nexset. + * + * The endpoint accepts a parent Nexset ID along with all transform and validation rules that should be applied to the parent Nexset. + * + * The two payload variants reflect the following two ways of specifying transform rules: + * 1. Attach the transform code that should be applied: Set `has_custom_transform: false`, and attach a `transform` code snippet. + * 2. Use the ID of a reusable record transform: Set `has_custom_transform: false`, and attach the `transform_id` of the record transform to be applied. + */ + create_nexset: { + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["DataSetCreate"]; + }; + }; + responses: { + 200: components["responses"]["data_sets_one"]; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get a Nexset + * @description Returns a Nexset object if a valid ID is provided. + */ + get_nexset: { + parameters: { + query?: { + expand?: components["parameters"]["expand"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the Nexset that needs to be fetched. */ + set_id: number; + }; + }; + responses: { + 200: components["responses"]["data_sets_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Update a Nexset + * @description Updates a Nexset in the authenticated user's account. + */ + update_nexset: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the Nexset to be updated. */ + set_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["DataSetMutable"]; + }; + }; + responses: { + 200: components["responses"]["data_sets_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete a Nexset + * @description Deletes a Nexset from the authenticated user's account. + */ + delete_nexset: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the Nexset that needs to be deleted. */ + set_id: string; + }; + }; + responses: { + /** @description Success */ + 200: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Activate Nexset + * @description Activates a paused Nexset. + */ + activate_nexset: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the Nexset that needs to be activated. */ + set_id: number; + }; + }; + responses: { + 200: components["responses"]["data_sets_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Pause Nexset + * @description Pauses an active Nexset. + */ + pause_nexset: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the Nexset that needs to be paused. */ + set_id: number; + }; + }; + responses: { + 200: components["responses"]["data_sets_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Copy Nexset + * @description Use this endpoint to create a clone of an existing Nexset. + */ + copy_nexset: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the Nexset that needs to be copied. */ + set_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": { + /** @description Set this to `true` if you want the new Nexset to be accessible by all users who have access to the this Nexset. */ + copy_access_controls?: boolean; + /** @description The default API behavior is to create the new Nexset in the account of the authenticated user making this call. Set this property if you want a different user to be the owner of the new Nexset. */ + owner_id?: number; + /** @description The default API behavior is to create the new Nexset in the org that the authenticated user making this call belongs to. Set this property if you want the Nexset to be created in a different org. */ + org_id?: number; + }; + }; + }; + responses: { + 200: components["responses"]["data_sets_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Nexset Samples + * @description Use this endpoint to fetch some sample records from this Nexset. Use the relevant query parameters to control whether the samples returned are from the live Nexset topic or the Nexset sample cache. + */ + get_nexset_samples: { + parameters: { + query?: { + /** @description The maximum number of samples that should be returned in the response. */ + count?: number; + /** @description Set this to true to fetch Nexla metadata about each sample record along with the record content. */ + include_metadata?: boolean; + /** @description Set this to true to fetch live sample records from the Nexset topic. */ + live?: boolean; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the Nexset that needs to be fetched. */ + set_id: number; + }; + }; + responses: { + 200: components["responses"]["data_sets_sample"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Generate an AI suggestion for Nexset documentation + * @description Request a suggestion for Nexset documentation. GenAI has to be configured properly for this request, or else you get a message with an error. + */ + data_set_docs_recommendation: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the Nexset that needs to be activated. */ + set_id: number; + }; + }; + responses: { + 200: components["responses"]["genai_recommendation_response"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get All Sinks + * @description Retrieves all data sinks accessible to the authenticated user. + */ + get_data_sinks: { + parameters: { + query?: { + access_role?: components["parameters"]["access_roles"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["data_sink_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Create a Sink + * @description Creates a Nexla data_sink with the specified configuration in your Nexla account. + * + * > Note: `name` ,`data_set_id`, `sink_type`, `sink_config` and `data_credentials_id` are required. + */ + create_data_sink: { + requestBody?: { + content: { + "application/json": { + sink_type: "json"; + } & Omit; + }; + }; + responses: { + 200: components["responses"]["data_sink_one"]; + /** @description Unauthorized */ + 400: { + content: never; + }; + }; + }; + /** + * Get Sink by ID + * @description Returns a data_sink object if a valid ID is provided. + */ + get_data_sink: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the sink that needs to be fetched. */ + sink_id: number; + }; + }; + responses: { + 200: components["responses"]["data_sink_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Update Sink + * @description Updates a data_sink object in the authenticated user's account. + * + * > Note: This method does not perform partial updating of the `sink_config` object. The entire `sink_config` object will be updated if this is added to the payload. + */ + update_data_sink: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the sink that needs to be fetched. */ + sink_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["data_sink"]; + }; + }; + responses: { + 200: components["responses"]["data_sink_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Delete a Sink + * @description Deletes a sink from your Nexla account. + */ + delete_data_sink: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the sink that needs to be deleted. */ + sink_id: string; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": { + /** @description Response status code */ + code?: string; + /** @description Response status text */ + message?: string; + }; + }; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Get Sink by ID with Expanded References + * @description Returns a data_sink object along with advanced information about associated references if a valid ID is provided. + */ + get_data_sink_expanded: { + parameters: { + query?: { + expand?: components["parameters"]["expand"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the sink that needs to be fetched. */ + sink_id: number; + }; + }; + responses: { + 200: components["responses"]["data_sink_one_expand"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Activate a Sink + * @description Activate a paused data sink. + */ + activate_data_sink: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the sink that needs to be activated. */ + sink_id: number; + }; + }; + responses: { + 200: components["responses"]["data_sink_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Pause a Sink + * @description Pause an active data sink. + */ + pause_data_sink: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the sink that needs to be paused. */ + sink_id: number; + }; + }; + responses: { + 200: components["responses"]["data_sink_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Copy a Sink + * @description Use this endpoint to create a copy of an existing data sink. + */ + copy_data_sink_source: { + parameters: { + path: { + /** @description The unique ID of the sink that needs to be copied. */ + sink_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": { + /** @description Set this to `true` if you do want to reuse the credentials of this sink instead of creating a clone of the credentials also. */ + reuse_data_credentials?: boolean; + /** @description Set this to `true` if you want the new sink to be accessible by all users who have access to the this sink. */ + copy_access_controls?: boolean; + /** @description The default API behavior is to create the new sink in the account of the authenticated user making this call. Set this property if you want a different user to be the owner of the new sink. */ + owner_id?: number; + /** @description The default API behavior is to create the new sink in the org that the authenticated user making this call belongs to. Set this property if you want the sink to be created in a different org. */ + org_id?: number; + }; + }; + }; + responses: { + 200: components["responses"]["data_sink_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get all Data Maps + * @description Retrieves all lookups (data maps) accessible to the authenticated user. + */ + get_data_maps: { + parameters: { + query?: { + access_role?: components["parameters"]["access_roles"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["data_maps_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create a Static Data Map + * @description Creates a new static data map in the authenticated user's account. Dynamic data maps can only be created by creating a Destination (Sink) of the type `data_map`. + * + * For statically assigned data maps, you can choose to add data rows to the data map by either of the following methods: + * 1. Send data map entries with this request. In this case, the rows of data are sent as a `data_map` array of objects. + * 2. Send data map entries as a separate call to add/update entries. + * + * You must include `map_primary_key` to specify which map attribute should be used for data matching. + */ + create_static_data_map: { + requestBody?: { + content: { + "application/json": components["schemas"]["DataMapCreate"]; + }; + }; + responses: { + 200: components["responses"]["data_maps_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get Data Map by ID + * @description Retrieves a data map object if a valid ID is provided. + * + * This call to `/data_maps` **does not** return data map entries, as they can be a large array of objects for big data maps. + * + * You can include the `expand` query parameter to fetch the data map entries of smaller static data maps. + */ + get_data_map: { + parameters: { + query?: { + expand?: components["parameters"]["expand"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the data map that needs to be fetched. */ + data_map_id: number; + }; + }; + responses: { + 200: components["responses"]["data_maps_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Update Data Map Metadata + * @description Updates a data map in the authenticated user's account. + * + * This endpoint is suitable for updating the metadata of a data map. We recommend using the data map entries update and delete endpoints to update data map rows. + */ + update_data_map_metadata: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the data map that needs to be updated. */ + data_map_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["DataMapMutable"]; + }; + }; + responses: { + 200: components["responses"]["data_maps_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete a Data Map + * @description Deletes a data map from your Nexla account. + */ + delete_data_map: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the data map that needs to be deleted. */ + data_map_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": { + /** @description Response status code */ + code?: string; + /** @description Response status text */ + message?: string; + }; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Upsert Static Data Map Entries + * @description Updates the entries in a static data map. Use this endpoint to add new entries or update the row corresponding to a specific key. + */ + upsert_data_map_entries: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the data map that needs to be updated. */ + data_map_id: number; + }; + }; + requestBody?: { + content: { + "application/json": { + /** @description Enter the array of data map entries that you wish to upsert. This call will result in an upsert on the data map, i.e., new rows will be added for keys not present in the data map, and relevant rows will be updated for keys that are already present. */ + entries?: ({ + [key: string]: string | number; + })[]; + }; + }; + }; + responses: { + 200: components["responses"]["data_map_entries"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Check Data Map Entries + * @description Returns the rows of data from the data map that matches a desired key or key pattern. + * + * This endpoint can be used to check whether the data map contains rows of data that match the desired key, keys, or key patterns. Key names should be provided in the path in the format described below. + */ + check_data_map_entries: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the data map that needs to be fetched. */ + data_map_id: number; + /** @description One or more comma-separated keys for accessing data map entries. These keys may contain simple matching expressions with `*` wildcard characters. The response will contain entries whose primary key values match the pattern. */ + entry_keys: string | number; + }; + }; + responses: { + 200: components["responses"]["data_map_entries"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Data Map Entries + * @description Deletes specific entries from the data map. + * + * Use this endpoint to remove specific entries from the data map. + */ + delete_data_map_entries: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the data map that needs to be fetched. */ + data_map_id: number; + /** @description One or more comma-separated keys for accessing access data map entries. These keys may contain simple matching expressions with `*` wildcard characters. */ + entry_keys: string | number; + }; + }; + responses: { + /** @description OK */ + 200: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get all Reusable Record Transforms + * @description Reusable record transforms are reusable code blocks that can be used to modify an input record of a Nexset into an output record of that Nexset. + * Use this endpoint to fetch all reusable record transforms. + */ + get_reusable_record_transforms: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["Transform"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create a Reusable Record Transform + * @description Create a new reusable record transform. + */ + create_reusable_record_transform: { + requestBody?: { + content: { + "application/json": components["schemas"]["TransformMutable"]; + }; + }; + responses: { + 200: components["responses"]["code_containers_one_Transform"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get A Reusable Record Transform + * @description Returns a reusable record transform object if a valid ID is provided. + */ + get_reusable_record_transform: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the transform that needs to be fetched. */ + transform_id: number; + }; + }; + responses: { + 200: components["responses"]["code_containers_one_Transform"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Update Reusable Record Transform + * @description Updates a transform in the authenticated user's account. + */ + update_reusable_record_transform: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the transform that needs to be updated. */ + transform_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["CodeContainerMutable"]; + }; + }; + responses: { + 200: components["responses"]["CodeContainer"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete a Reusable Record Transform + * @description Use this endpoint to delete a reusable record transform. + */ + delete_reusable_record_transform: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the transform that needs to be deleted. */ + transform_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": { + /** @description Response status code */ + code?: string; + /** @description Response status text */ + message?: string; + }; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Copy a Reusable Record Transform + * @description Use this endpoint to create a copy of an existing reusable record transform. + */ + copy_transform: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the transform to be copied. */ + transform_id: number; + }; + }; + responses: { + 200: components["responses"]["code_containers_one_Transform"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get all Public Reusable Record Transforms + * @description The Nexla team regularly adds common reusable record transforms that are made available to all Nexla accounts. + * + * Use this endpoint to fetch all such "publicly" available reusable record transforms. + */ + get_public_reusable_record_transforms: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["Transform"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get all Attribute Transforms + * @description Reusable attribute transforms are reusable code blocks that can be used to define the value of an output attribute in a Nexset. These code blocks can be used to enhance the set of transforms available to end users when using the Nexset Designer. + * + * Use this endpoint to fetch all attribute transforms accessible to the authenticated user. + */ + get_attribute_transforms: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["AttributeTransform"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create an Attribute Transform + * @description Create a new attribute transform. + */ + create_attribute_transform: { + requestBody?: { + content: { + "application/json": components["schemas"]["AttributeTransformMutable"]; + }; + }; + responses: { + 200: components["responses"]["code_containers_one_AttributeTransform"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get Attribute Transform by ID + * @description Returns an attribute transform object if a valid ID is provided. + */ + get_attribute_transform: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the attribute transform that needs to be fetched. */ + attribute_transform_id: number; + }; + }; + responses: { + 200: components["responses"]["code_containers_one_AttributeTransform"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Update Attribute Transform + * @description Updates an attribute transform in the authenticated user's account. + */ + update_attribute_transform: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the attribute transform. */ + attribute_transform_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["AttributeTransformMutable"]; + }; + }; + responses: { + 200: components["responses"]["code_containers_one_AttributeTransform"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete an Attribute Transform + * @description Deletes an attribute transform from your Nexla account. + */ + delete_attribute_transform: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the attribute transform that needs to be deleted. */ + attribute_transform_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get all Public Attribute Transforms + * @description The Nexla team regularly adds common reusable attribute transforms that are made available to all Nexla accounts. + * + * Use this endpoint to fetch all such "publicly" available reusable attribute transforms. + */ + get_public_attribute_transforms: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["AttributeTransform"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get all Code Containers + * @description Use this endpoint to fetch all code containers accessible to the authenticated user. + */ + get_code_containers: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["code_containers_many_CodeContainer"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create a Code Container + * @description Use this endpoint to create a new code container. + */ + create_code_container: { + requestBody?: { + content: { + "application/json": components["schemas"]["CodeContainerMutable"]; + }; + }; + responses: { + 200: components["responses"]["CodeContainer"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get Code Container by ID + * @description Returns a code container object if a valid ID is provided. + */ + get_code_container: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the container that needs to be fetched. */ + code_container_id: number; + }; + }; + responses: { + 200: components["responses"]["CodeContainer"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Update a Code Container + * @description Updates a code container in the authenticated user's account. + */ + update_code_container: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the container that needs to be updated. */ + code_container_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["CodeContainerMutable"]; + }; + }; + responses: { + 200: components["responses"]["CodeContainer"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete a Code Container + * @description Deletes a code container from the authenticated user's account. + */ + delete_code_container: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the container that needs to be deleted. */ + code_container_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": { + /** @description Response status code */ + code?: string; + /** @description Response status text */ + message?: string; + }; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Copy a Code Container + * @description Use this endpoint to create a copy of an existing code container. + */ + copy_code_container: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the code container to be copied. */ + code_container_id: number; + }; + }; + responses: { + 200: components["responses"]["CodeContainer"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get all Public Code Containers + * @description The Nexla team regularly adds common code containers that are made available to all Nexla accounts. + * + * Use this endpoint to fetch all such "publicly" available code containers. + */ + get_public_code_containers: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["code_containers_many_CodeContainer"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get all Projects + * @description Retrieves a list of all projects accessible to the authenticated user. + */ + get_projects: { + parameters: { + query?: { + access_role?: components["parameters"]["access_roles"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["projects_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create a project + * @description Creates a project with the specified configuration. Note that flows can also be attached to the project later by calling endpoints to update the project. + */ + create_project: { + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["ProjectCreate"]; + }; + }; + responses: { + 200: components["responses"]["projects_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get Project by ID + * @description Returns a project if a valid ID is provided. + */ + get_project: { + parameters: { + path: { + /** @description The unique ID of the project */ + project_id: number; + }; + }; + responses: { + 200: components["responses"]["projects_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Modify a Project + * @description Modifies a project's information and settings if a valid ID and body are provided. + */ + update_project: { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["ProjectMutable"]; + }; + }; + responses: { + 200: components["responses"]["projects_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Project by ID + * @description Deletes a project if a valid ID is provided. Note that flows belonging to the project will only be removed from the project and will not be deleted. + */ + delete_project: { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + responses: { + /** @description OK */ + 200: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Project Flows + * @description Returns a list of flows belonging to a project. + */ + get_project_flows: { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + responses: { + 200: components["responses"]["FlowsMany"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Flows to Project + * @description Adds a list of flows to a project. The existing flow list is retained and merged with the new flow list. + */ + add_project_flows: { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["ProjectFlowListFlowNodes"]; + }; + }; + responses: { + 200: components["responses"]["FlowsMany"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Project Flows List + * @description Replaces the list of flows belonging to a project. Existing flows are removed from the project. + */ + replace_project_flows: { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["ProjectFlowListFlowNodes"]; + }; + }; + responses: { + 200: components["responses"]["FlowsMany"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Remove Flows From A Project + * @description Removes data flows from a project. If no request body is provided, all flows belonging to the project will be removed. The flows themselves will not be deleted, but they will no longer belong to the project. + */ + remove_project_flows: { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + /** @description Optional list of flow identifiers. Data flows must be referenced by the resource associated with them in the GET response. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["ProjectFlowListFlowNodes"]; + }; + }; + responses: { + 200: components["responses"]["FlowsMany"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Project Flows (Deprecated) + * @description Returns a list of flows belonging to a project. + * + * > **Note**: This version of the endpoint has been deprecated. The returned flow response does not reference the new unique flow ids, instead references composite data flow ids of the type `{resource_type}/{resource_id}`. See get_project_flows for a new version of this endpoint that references unique `flow_id`. + */ + "get_project_flows_(deprecated)": { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + responses: { + 200: components["responses"]["ProjectFlowsOld"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Flows to Project (Deprecated) + * @description Adds a list of flows to a project. The existing flow list is retained and merged with the new flow list. + * + * > **Note**: This version of the endpoint has been deprecated. The request body and response does not reference flows with new unique flow_ids, instead references composite data flow ids of the type `{resource_type}/{resource_id}`. See add_project_flows for a new version of this endpoint that references unique `flow_id`. + */ + "add_project_flows_(deprecated)": { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["ProjectFlowList"]; + }; + }; + responses: { + 200: components["responses"]["ProjectFlowsOld"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Project Flows List (Deprecated) + * @description Replaces the list of flows belonging to a project. Existing flows are removed from the project. + * + * > **Note**: This version of the endpoint has been deprecated. The request body and response does not reference flows with new unique flow_ids, instead references composite data flow ids of the type `{resource_type}/{resource_id}`. See replace_project_flows for a new version of this endpoint that references unique `flow_id`. + */ + "replace_project_flows_(deprecated)": { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["ProjectFlowList"]; + }; + }; + responses: { + 200: components["responses"]["ProjectFlowsOld"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Remove Flows From A Project (Deprecated) + * @description Removes data flows from a project. If no request body is provided, all flows belonging to the project will be removed. The flows themselves will not be deleted, but they will no longer belong to the project. + * + * > **Note**: This version of the endpoint has been deprecated. The request body and response does not reference flows with new unique flow_ids, instead references composite data flow ids of the type `{resource_type}/{resource_id}`. See remove_project_flows for a new version of this endpoint that references unique `flow_id`. + */ + "remove_project_flows_(deprecated)": { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + /** @description Optional list of flow identifiers. Data flows must be referenced by the resource associated with them in the GET response. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["ProjectFlowList"]; + }; + }; + responses: { + 200: components["responses"]["ProjectFlowsOld"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get all Organizations + * @description Returns all organizations accessible to the authenticated user. + */ + get_orgs: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["orgs_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get Organization by ID + * @description Returns an organization if a valid ID is provided. + */ + get_org: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the organization that needs to be fetched. */ + org_id: number; + }; + }; + responses: { + 200: components["responses"]["orgs_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Update an Organization + * @description Updates properties of an organization. + */ + update_org: { + parameters: { + path: { + /** @description The unique ID of the organization that needs to be updated. */ + org_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["OrgsUpdate"]; + }; + }; + responses: { + 200: components["responses"]["orgs_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get All Members in Organization + * @description Retrieves a list of all users in an organization. + */ + get_org_members: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the organization. */ + org_id: number; + }; + }; + responses: { + 200: components["responses"]["org_members"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Update Organization Members + * @description Add or update members in an organization. This endpoint can also be used to modify an existing member's role in the organization. + * + * When adding a new member using their email id, if a user account for that email id does not exist on the platform then a new user account will be created. If the user already exists on the platform as a member of a different organization then their membership will get updated to include this organization also. + */ + update_org_members: { + parameters: { + path: { + /** @description The unique ID of the organization. */ + org_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["OrgMemberList"]; + }; + }; + responses: { + 200: components["responses"]["org_members"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Remove Members from an Organization. + * @description Removes one or more members from the organization. Note that this will not delete the user account from the platform, but will remove the user's ability to access this organization's resources. + */ + delete_org_members: { + parameters: { + path: { + /** @description The unique ID of the organization. */ + org_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["OrgMemberDelete"]; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": { + /** @description Response status code */ + code?: string; + /** @description Response status text */ + message?: string; + }; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get all Teams + * @description Returns all teams accessible to the authenticated user. + */ + get_teams: { + parameters: { + query?: { + access_role?: components["parameters"]["access_roles"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["teams_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create a team + * @description Creates a team with the specified configuration and members. + */ + create_team: { + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["TeamCreate"]; + }; + }; + responses: { + 200: components["responses"]["teams_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get Team by ID + * @description Returns a team if a valid ID is provided. + */ + get_team: { + parameters: { + path: { + /** @description The unique ID of the team. */ + team_id: number; + }; + }; + responses: { + 200: components["responses"]["teams_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Modify a Team + * @description Modifies a team's information and settings if a valid ID and body are provided. + */ + update_team: { + parameters: { + path: { + /** @description The unique ID of the team. */ + team_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["TeamMutable"]; + }; + }; + responses: { + 200: components["responses"]["teams_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Team by ID + * @description Deletes a team if a valid ID is provided. + */ + delete_team: { + parameters: { + path: { + /** @description The unique ID of the team. */ + team_id: number; + }; + }; + responses: { + /** @description OK */ + 200: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Team Members + * @description Returns a list of the members belonging to a team. + */ + get_team_members: { + parameters: { + path: { + /** @description The unique ID of the team. */ + team_id: number; + }; + }; + responses: { + 200: components["responses"]["team_members"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Members to A Team + * @description Adds a list of members to a team. The existing list of members will be retained and merged with the new list of members. + */ + add_team_members: { + parameters: { + path: { + /** @description The unique ID of the team. */ + team_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["TeamMemberList"]; + }; + }; + responses: { + 200: components["responses"]["team_members"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Team Members List + * @description Replaces the list of members belonging to a team. Existing members will be removed from the team. + */ + replace_team_members: { + parameters: { + path: { + /** @description The unique ID of the team. */ + team_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["TeamMemberList"]; + }; + }; + responses: { + 200: components["responses"]["team_members"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Remove Team Members + * @description Removes members from a team. If no request body is provided, all members belonging to the team will be removed. + */ + delete_team_members: { + parameters: { + path: { + /** @description The unique ID of the team. */ + team_id: number; + }; + }; + /** @description Optional list of members. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["TeamMemberList"]; + }; + }; + responses: { + 200: components["responses"]["team_members"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get All Users + * @description Returns all users that can be viewed by authenticated user. + */ + get_users: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["users_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create a User + * @description Create a new user in this environment. + * + * > This requires admin access to the provided organization. + */ + create_user: { + requestBody?: { + content: { + "application/json": components["schemas"]["UsersCreateRequired"]; + }; + }; + responses: { + 200: components["responses"]["users_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get All Users with Expanded References + * @description Returns all users that can be viewed by the authenticated user. + */ + get_users_expand: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["users_many_expand"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get User by ID + * @description Returns a user if a valid ID is provided. + */ + get_user: { + parameters: { + path: { + /** @description The unique ID of the user. */ + user_id: number; + }; + }; + responses: { + 200: components["responses"]["users_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Modify a User + * @description Modifies a user's information and settings if a valid ID and body are provided + */ + update_user: { + parameters: { + path: { + /** @description The unique ID of the user. */ + user_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["UsersUpdate"]; + }; + }; + responses: { + 200: components["responses"]["users_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get User by ID with Expanded References + * @description Returns a user if a valid ID is provided. + */ + get_user_expand: { + parameters: { + query: { + /** @description Truthy parameter for requesting expanded references. */ + expand: number | boolean; + }; + path: { + /** @description The unique ID of the user. */ + user_id: number; + }; + }; + responses: { + 200: components["responses"]["users_one_expand"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get info on current user + * @description Returns the user information of the currently logged-in user, including org memberships and current org info. + */ + get_current_user: { + responses: { + 200: components["responses"]["users_transfer"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get the current user's settings + * @description Returns all the settings for the current user. + */ + get_user_settings: { + responses: { + 200: components["responses"]["user_settings_list"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get All Notifications + * @description Returns all notifications in the authenticated user's account. Note that this only includes notifications generated to be displayed in the Nexla UI. + */ + get_notifications: { + parameters: { + query?: { + /** @description Filter by level of notifications. Values are: 'DEBUG', 'INFO', 'WARN', 'ERROR', 'RECOVERED', 'RESOLVED'." */ + level?: "DEBUG" | "INFO" | "WARN" | "ERROR" | "RECOVERED"; + /** @description Filter notifications starting from timestamp. Format is unix timestamp. */ + from?: number; + /** @description Filter notifications ending at timestamp. Format is unix timestamp. */ + to?: number; + }; + path: { + /** @description Set the read query parameter to 0 to fetch only notifications that have not yet been read, or set it to 1 to fetch only those that have been read. */ + read: number; + }; + }; + responses: { + /** @description Success */ + 200: components["responses"]["notifications_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get a Notification + * @description Returns a notification if a valid ID is provided. + */ + get_notification: { + parameters: { + path: { + /** @description The unique ID of the notification. */ + notification_id: number; + }; + }; + responses: { + 200: components["responses"]["notifications_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Delete a Notification + * @description Deletes a notification if a valid ID is provided. + */ + delete_notifications: { + parameters: { + path: { + /** @description The unique ID of the notification. */ + notification_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Delete All Notifications + * @description Deletes all notifications belonging to the authenticated user. Note that this is only the list of notifications generated to be displayed in the Nexla UI. + */ + delete_all_notifications: { + parameters: { + query?: { + /** @description If set to 'true', request will be executed in a deferred way, and results will be provided later. */ + async?: boolean; + /** @description The unique ID of the asynchronous request. In case it's provided, returns info about the deferred request. */ + request_id?: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["async_or_null"]; + }; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get Notifications Count + * @description Returns the total number of notifications in the authenticated user's account. Note that this only includes notifications generated to be displayed in the Nexla UI. + */ + get_notification_count: { + parameters: { + path: { + /** @description Set the read query parameter to 0 to fetch only notifications that have not yet been read, or set it to 1 to fetch only those that have been read. If you don't send a read query parameter, all notifications (both read and unread) will be fetched. */ + read: number; + }; + }; + responses: { + 200: components["responses"]["notifications_count"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Mark Notification Read + * @description Use this endpoint to mark one, multiple, or all notifications as read. To mark a list of notifications, send an array of notification IDs as the payload. To mark all notifications, send the notification_id query parameter with the value `all`. + */ + notifications_mark_read: { + parameters: { + query?: { + /** + * @description The unique ID of one or more notifications, or enter "all" to mark all notifications as read. + * This can be used in place of an array of IDs in the request body. + */ + notification_id?: "all" | number; + /** @description If set to 'true', request will be executed in a deferred way, and results will be provided later. */ + async?: boolean; + /** @description The unique ID of the asynchronous request. In case it's provided, returns info about the deferred request. */ + request_id?: number; + }; + }; + requestBody?: { + content: { + "application/json": number[]; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["async_or_null"]; + }; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Mark Notification Unread + * @description Use this endpoint to mark one, multiple, or all notifications as read. To mark a list of notifications, send an array of notification IDs as the payload. To mark all notifications, send the notification_id query parameter with the value `all`. + */ + notifications_mark_unread: { + parameters: { + query?: { + /** + * @description The unique ID of one or more notifications, or enter "all" to mark all notifications as unread. + * This can be used in place of an array of IDs in the request body. + */ + notification_id?: "all" | number; + }; + }; + requestBody?: { + content: { + "application/json": number[]; + }; + }; + responses: { + /** @description Success */ + 200: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get All Notification Types + * @description Fetches a list of all notifications supported by Nexla in this environment. + * + * When users choose whether or not some notifications are enabled, their choices are saved in `notification_settings` and linked to the ID of the relevant notification type. + */ + get_notification_types: { + parameters: { + query?: { + status?: "ACTIVE" | "PAUSE"; + }; + }; + responses: { + 200: components["responses"]["notification_types_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get One Notification Type + * @description Fetches details about a specific notification type supported by Nexla in this environment. + */ + list_notification_type: { + parameters: { + query: { + event_type: components["schemas"]["NotificationEventType"]; + resource_type: components["schemas"]["NotificationResourceType"]; + }; + }; + responses: { + 200: components["responses"]["notification_types_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * List Notification Channel Settings + * @description Notification channel settings contain configuration settings relevant to where notifications should be delivered. For example, the settings for the `EMAIL` channel contain the email addresses to which notifications can be sent. + * + * You can maintain multiple configuration settings for the same channel to route notifications for specific resources and types to different locations. + * + * This endpoint lists all notification channel settings in the authenticated user's account. + */ + list_notification_channel_settings: { + responses: { + 200: components["responses"]["notification_channel_settings_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create a Notification Channel Setting + * @description Create a new configuration for a notification channel. + * + * You can maintain multiple configuration settings for the same channel to route notifications for specific resources and types to different locations. + */ + create_notification_channel_setting: { + requestBody?: { + content: { + "application/json": components["schemas"]["NotificationChannelSettingCreateRequired"]; + }; + }; + responses: { + 200: components["responses"]["notification_channel_settings_one"]; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get a Notification Channel Setting + * @description Returns a notification channel setting if a valid ID is provided. + */ + get_notification_channel_setting: { + parameters: { + path: { + /** @description The unique ID of the notification channel setting. */ + notification_channel_setting_id: number; + }; + }; + responses: { + 200: components["responses"]["notification_channel_settings_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Update a Notification Channel Setting + * @description Update the configuration of a notification channel setting. + */ + update_notification_channel_setting: { + parameters: { + path: { + /** @description The unique ID of the notification channel setting. */ + notification_channel_setting_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["NotificationChannelSettingUpdate"]; + }; + }; + responses: { + 200: components["responses"]["notification_channel_settings_one"]; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete a Notification Channel Setting + * @description Deletes a notification channel setting if a valid ID is provided. + */ + delete_notification_channel_setting: { + parameters: { + path: { + /** @description The unique ID of the notification channel setting. */ + notification_channel_setting_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * List Notification Settings + * @description This endpoint lists all notification settings in the authenticated user's account. + * + * + * Notification settings contain the following user settings: + * 1. Whether the user wants to be notified about a specific event (`status` of a `notification_type` on a `notification_resource_type`) + * 2. If yes, on what `channel` the user wants to be notified + * 3. The configuration of the channel (`notification_channel_setting_id`) + * 4. Configuration parameters affect when the notification should be fired. This is usually left empty to use platform defaults, but it is relevant when users want to override the default settings of some notifications, such as `Source Data Delayed` + */ + list_notification_settings: { + parameters: { + query?: { + event_type?: components["schemas"]["NotificationEventType"]; + resource_type?: components["schemas"]["NotificationResourceType"]; + status?: components["schemas"]["NotificationSettingStatus"]; + }; + }; + responses: { + 200: components["responses"]["notification_settings_many"]; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create a Notification Setting + * @description Create a setting to designate whether, when, and how a specific notification should be fired. + */ + create_notification_setting: { + requestBody?: { + content: { + "application/json": components["schemas"]["NotificationSettingCreateRequired"]; + }; + }; + responses: { + 200: components["responses"]["notification_settings_one"]; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get a Notification Setting + * @description Returns a notification if a valid ID is provided. + */ + get_notification_setting: { + parameters: { + path: { + /** @description The unique ID of the notification setting. */ + notification_setting_id: number; + }; + }; + responses: { + 200: components["responses"]["notification_settings_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Modify a Notification Setting + * @description Modifies a notification if a valid ID and body are provided. + */ + update_notification_setting: { + parameters: { + path: { + /** @description The unique ID of the notification setting. */ + notification_setting_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["NotificationSettingUpdate"]; + }; + }; + responses: { + 200: components["responses"]["notification_settings_one"]; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete a Notification Setting + * @description Delete a notification setting if a valid ID is provided. + */ + delete_notification_setting: { + parameters: { + path: { + /** @description The unique ID of the notification setting. */ + notification_setting_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Notification Settings for an Event + * @description Use this endpoint to fetch all notification settings of a specific type. + * + * This can be used as a filter that is easy to use to understand, which returns all notifications that a user can expect to receive for a specific event. + */ + list_notification_settings_by_type: { + parameters: { + query?: { + expand?: boolean; + }; + path: { + /** @description The unique ID of the notification type. */ + notification_type_id: number; + }; + }; + responses: { + 200: components["responses"]["notification_settings_many_type"]; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get Notification Settings For a Resource + * @description Use this endpoint to fetch all notification settings for a given resource. + * + * This can be used as a filter that is easy to understand, which returns all notifications that a user can expect to receive for a specific resource. + */ + list_resource_notification_settings: { + parameters: { + query?: { + expand?: boolean; + filter_overridden_settings?: boolean; + /** @example 1 */ + notification_type_id?: number; + }; + path: { + /** @example 2 */ + resource_id: number; + resource_type: components["schemas"]["NotificationResourceType"]; + }; + }; + responses: { + 200: components["responses"]["notification_settings_many"]; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get Total Account Metrics for An Organization + * @description Retrieves total account utilization metrics for an organization. The result consists of aggregated information about records processed within the specified date range by all resources owned by users in the organization. + */ + org_account_metrics_total: { + parameters: { + query: { + /** @description The date that should be considered as the start of the metrics aggregation period. */ + from: string; + /** @description The date that should be considered as the end of the metrics aggregation period. In the absence of this parameter, the API returns metrics aggregated up to the current date. */ + to?: string; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the organization. The result will be an aggregate of metrics for all resources owned by users in the organization. */ + org_id: number; + }; + }; + responses: { + 200: components["responses"]["flows_account_metrics"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Get Total Account Metrics for a User + * @description Retrieves total account utilization metrics for a user in an organization. The result consists of aggregated information about records processed within the specified date range by all resources owned by the user. + */ + user_account_metrics_total: { + parameters: { + query: { + /** @description The ID of the organization this user belongs to. This parameter is relevant for users who belong to multiple organizations. In the absence of this parameter, the API returns an aggregate of metrics for all resources owned by the user in the user's default organization. Set this query parameter to fetch metrics for a different organization. */ + org_id?: number; + /** @description The date that should be considered as the start of the metrics aggregation period. */ + from: string; + /** @description The date that should be considered as the end of the metrics aggregation period. In the absence of this parameter, the API returns metrics aggregated up to the current date. */ + to?: string; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the user. The result will be an aggregate of metrics for all resources owned by this user. */ + user_id: number; + }; + }; + responses: { + 200: components["responses"]["flows_account_metrics"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Get 24 Hour Flow Stats for a User + * @description Retrieves the metrics and processing status of each flow that processed data in the last 24 hours. + * + * Each item reflects the total number of records processed by each stage of all flows accessible by the user that processed any data in the specified time window. + */ + user_24_hour_flow_stats: { + parameters: { + query?: { + access_role?: components["parameters"]["access_roles"]; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the user whose flows are to be fetched. */ + user_id: number; + }; + }; + responses: { + 200: components["responses"]["flows_dashboard"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Get Daily Data Processing Metrics for a User + * @description Retrieves daily data processing metrics of all sources or all destinations owned by a user. + */ + user_metrics_daily: { + parameters: { + query: { + /** @description The ID of the organization this user belongs to. This parameter is relevant for users who belong to multiple organizations. In the absence of this parameter, the API returns an aggregate of metrics for all resources owned by the user in the user's default organization. Set this query parameter to fetch metrics for a different organization. */ + org_id?: number; + /** @description The type of resource that metrics should be fetched for. Select `SOURCE` for the total data ingested by all sources owned by the user. Select `SINK` for the total data written out by all sinks owned by the user. */ + resource_type: "SOURCE" | "SINK"; + /** @description The date that should be considered as the start of the metrics reporting period. */ + from: string; + /** @description The date that should be considered as the end of the metrics reporting period. In the absence of this parameter, the API returns metrics up to the current date. */ + to?: string; + /** @description This should be set to 1 for fetching daily aggregated metrics over the specified time range. */ + aggregate: number; + }; + path: { + /** @description The unique ID of the user. The result will be an aggregate of metrics for all resources owned by this user. */ + user_id: number; + }; + }; + responses: { + 200: components["responses"]["user_metrics"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Get Daily Metrics for a Resource of a Flow + * @description Retrieves daily data processing metrics of a `data_source`, `data_set`, or `data_sink`. + */ + get_resource_metrics_daily: { + parameters: { + query: { + /** @description The date that should be considered as the start of the metrics reporting period. */ + from: string; + /** @description The date that should be considered as the end of the metrics reporting period. In the absence of this parameter, the API returns metrics up to the current date. */ + to?: string; + /** @description This should be set to 1 for fetching daily aggregated metrics over the specified time range. */ + aggregate: number; + }; + path: { + /** @description The type of resource that metrics should be fetched for. */ + resource_type: "data_sources" | "data_sinks" | "data_sets"; + /** @description The ID of resource that metrics should be fetched for. */ + resource_id: number; + }; + }; + responses: { + 200: components["responses"]["ResourceMetricsDaily"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Get Metrics By Run ID for a Resource of a Flow + * @description Retrieves data processing metrics of a `data_source`, `data_set`, or `data_sink`. The reported metrics are grouped by run id to indicate the number of records processed during each ingestion cycle of this flow. + */ + get_resource_metrics_by_run: { + parameters: { + query?: { + page?: components["parameters"]["page"]; + size?: components["parameters"]["size"]; + }; + path: { + /** @description The type of resource that metrics should be fetched for. */ + resource_type: "data_sources" | "data_sinks" | "data_sets"; + /** @description The unique id of the resource you wish to fetch metrics for. For ex, set this to id of the `data_source` if you want to fetch metrics of a specific `data_source`. */ + resource_id: number; + /** + * @description Specify the rule based on which metrics should be grouped for aggregation. + * + * Default API behavior is to group by `runId`. This choice is only applicable for data sinks where the run summary could be reported by `runId` ( for reporting number of records written out per ingestion cycle) or `lastWritten` (for reporting number of records written out in each destination write batch). + */ + groupby: "runId" | "lastWritten"; + /** @description Specify the order in which paginated results should be sorted. Default API behavior is to order by `runId`. This choice is only applicable for data sinks where the run summary could be reported by `runId` ( for reporting number of records written out per ingestion cycle) or `lastWritten` (for reporting number of records written out in each destination write batch). */ + orderby: "runId" | "lastWritten"; + }; + }; + responses: { + 200: components["responses"]["ResourceMetricsByRuns"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Get Metrics for a Flow + * @description Retrieves data processing metrics of a flow. Metrics are aggregated for each node of the flow for the specified time range. They can be be further grouped by run id to indicate the number of records processed during each ingestion cycle of this flow. + * + * > Note: This is a variant of flow endpoints where the flow node can referenced not by its own ID, but by the ID of the unique resource that is linked to that flow node. + */ + get_flow_metrics: { + parameters: { + query: { + /** @description The date that should be considered as the start of the metrics aggregation period. */ + from: string; + /** @description The date that should be considered as the end of the metrics aggregation period. In the absence of this parameter, the API returns metrics aggregated up to the current date. */ + to?: string; + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per_page"]; + }; + path: { + /** @description The type of node linked to the flow you want to reference. For ex, set this to `data_sources` if you want to refer to the flow originating in a `data_source`. */ + resource_type: "data_sources" | "data_sinks" | "data_sets"; + /** @description The unique id of the resource whose flow you want to reference. For ex, set this to id of the `data_source` if you want to refer to the flow originating in a specific `data_source`. */ + resource_id: number; + /** + * @description Specify the rule based on which metrics should be grouped for aggregation. This is an optional property. + * + * If present and set to `runId` the response will contain one entry per run id, with each entry containing metrics for all resources that processed data during that run. + */ + groupby: "runId"; + /** @description Specify the order in which paginated results should be sorted. */ + orderby: "runId" | "created_at"; + }; + }; + responses: { + 200: components["responses"]["FlowMetricsResponse"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Get Flow Execution Logs for Run ID of a Flow + * @description Retrieves flow execution logs for a specific run id of a flow. + * + * > Note: This is a variant of flow endpoints where the flow node can referenced not by its own ID, but by the ID of the unique resource that is linked to that flow node. + */ + get_flow_logs_for_run_id: { + parameters: { + query: { + /** @description The run id (denoting the ingestion cycle) for which logs have to be fetched. */ + run_id: number; + /** @description The timestamp that should be considered as the start of the logs reporting period. */ + from: number; + /** @description The timestamp that should be considered as the end of the logs reporting period. In the absence of this parameter, the API returns metrics up to the current time. */ + to?: number; + page?: components["parameters"]["page"]; + per_page?: components["parameters"]["per_page"]; + }; + path: { + /** @description The type of node linked to the flow you want to reference. For ex, set this to `data_sources` if you want to refer to the flow originating in a `data_source`. */ + resource_type: "data_sources" | "data_sinks" | "data_sets"; + /** @description The unique id of the resource whose flow you want to reference. For ex, set this to id of the `data_source` if you want to refer to the flow originating in a specific `data_source`. */ + resource_id: number; + }; + }; + responses: { + 200: components["responses"]["FlowLogsResponse"]; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Get Audit Log for a Data Source + * @description Retrieves the history of changes made to the properties of a data source. + */ + get_data_source_audit_log: { + parameters: { + query?: { + /** @description If set to 'true', request will be executed in a deferred way, and results will be provided later. */ + async?: boolean; + /** @description The unique ID of the asynchronous request. In case it's provided, returns info about the deferred request. */ + request_id?: number; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the resource being queried. */ + data_source_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["audit_log_response"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Audit Log for a Data Sink + * @description Retrieves the history of changes made to the properties of a data sink. + */ + get_data_sink_audit_log: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the resource being queried. */ + data_sink_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["audit_log_response"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Audit Log for a Nexset + * @description Retrieves the history of changes made to the properties of a Nexset. + */ + get_nexset_audit_log: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the resource being queried. */ + data_set_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["audit_log_response"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Audit Log for a Data Credential + * @description Retrieves the history of changes made to the properties of a data credential. + */ + get_data_credential_audit_log: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the resource being queried. */ + data_credential_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["audit_log_response"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Audit Log for a Data Map + * @description Retrieves the history of changes made to the properties of a data map. + */ + get_data_map_audit_log: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the resource being queried. */ + data_map_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["audit_log_response"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Audit Log for a Data Schema + * @description Retrieves the history of changes made to the properties of a data schema. + */ + get_data_schema_audit_log: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the resource being queried. */ + data_schema_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["audit_log_response"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Audit Log for a Code Container + * @description Retrieves the history of changes made to the properties of a code container. This endpoint can also be used to fetch the history of changes made to any transform object. + */ + get_code_container_audit_log: { + parameters: { + query?: { + /** @description If set to 'true', request will be executed in a deferred way, and results will be provided later. */ + async?: boolean; + /** @description The unique ID of the asynchronous request. In case it's provided, returns info about the deferred request. */ + request_id?: number; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the resource being queried. */ + code_container_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["audit_log_response"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Audit Log for a Project + * @description Retrieves the history of changes made to the properties of a project. + */ + get_project_audit_log: { + parameters: { + query?: { + /** @description If set to 'true', request will be executed in a deferred way, and results will be provided later. */ + async?: boolean; + /** @description The unique ID of the asynchronous request. In case it's provided, returns info about the deferred request. */ + request_id?: number; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the resource being queried. */ + project_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["audit_log_response"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Audit Log for a Document + * @description Retrieves the history of changes made to the properties of a document. + */ + get_doc_container_audit_log: { + parameters: { + query?: { + /** @description If set to 'true', request will be executed in a deferred way, and results will be provided later. */ + async?: boolean; + /** @description The unique ID of the asynchronous request. In case it's provided, returns info about the deferred request. */ + request_id?: number; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the resource being queried. */ + doc_container_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["audit_log_response"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Audit Log for a User + * @description Retrieves the history of changes made to the properties of a user. + */ + get_user_audit_log: { + parameters: { + query?: { + /** @description If set to 'true', request will be executed in a deferred way, and results will be provided later. */ + async?: boolean; + /** @description The unique ID of the asynchronous request. In case it's provided, returns info about the deferred request. */ + request_id?: number; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the resource being queried. */ + user_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["audit_log_response"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Audit Log for an Organization + * @description Retrieves the history of changes made to the properties of an organization. + */ + get_org_audit_log: { + parameters: { + query?: { + /** @description If set to 'true', request will be executed in a deferred way, and results will be provided later. */ + async?: boolean; + /** @description The unique ID of the asynchronous request. In case it's provided, returns info about the deferred request. */ + request_id?: number; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the resource being queried. */ + org_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["audit_log_response"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Audit Log for a Team + * @description Retrieves the history of changes made to the properties of a team. + */ + get_team_audit_log: { + parameters: { + query?: { + /** @description If set to 'true', request will be executed in a deferred way, and results will be provided later. */ + async?: boolean; + /** @description The unique ID of the asynchronous request. In case it's provided, returns info about the deferred request. */ + request_id?: number; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the resource being queried. */ + team_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": components["schemas"]["audit_log_response"]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Quarantine Data Export Settings for A User + * @description Retrieve Quarantine Data Export Settings for all resources owned by a user. + * + * Nexla detects errors during different stages of data flow such as ingestion, transformation, and output. Error records are quarantined and accessible to the user via APIs as well as files. With Quarantine Data Export Settings, you can configure Nexla to write files containing information about erroneous records across all resources owned by a user. + * + * > This endpoint returns a 404 status code if no Quarantine Data Export Settings have been configured for the user. + */ + get_user_quarantine_data_export_settings: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the user whose quarantine settings you wish to retrieve. */ + user_id: number; + }; + }; + responses: { + 200: components["responses"]["quarantine_settings_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Update Quarantine Data Export Settings for A User + * @description Updates Quarantine Data Export Settings for all resources owned by a user so that all erroneous records can be automatically exported by the platform to a file system regularly. + */ + update_user_quarantine_data_export_settings: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the user. */ + user_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["QuarantineSettingMutable"]; + }; + }; + responses: { + 200: components["responses"]["quarantine_settings_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Set Quarantine Data Export Settings for A User + * @description Sets Quarantine Data Export Settings for all resources owned by a user so that all erroneous records can be automatically exported by the platform to a file system regularly. + */ + create_quarantine_data_export_settings: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the user. */ + user_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["QuarantineSettingCreate"]; + }; + }; + responses: { + 200: components["responses"]["quarantine_settings_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Quarantine Data Export Settings for A User + * @description Deletes Updates Quarantine Data Export Settings for all resources owned by a user. Deleting this setting will ensure the platform stops exporting all erroneous records for resources owned by the user to a file storage. + */ + delete_user_quarantine_data_export_settings: { + parameters: { + path: { + /** @description The unique id of the user. */ + user_id: number; + }; + }; + responses: { + /** @description OK */ + 200: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get all pending approval requests. + * @description Use this endpoint to fetch all pending approval requests that are not assigned to any users. + */ + get_pending_approval_requests: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["approval_requests_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get all requested approval requests by the user. + * @description Use this endpoint to fetch all approval requests that are requested by the user. + */ + get_requested_approval_requests: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["approval_requests_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Approve pending approval requests + * @description Use this endpoint to approve pending approval requests that are assigned to user or it's unassigned. + */ + approve_approval_request: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the Approval Request that needs to be approved. */ + request_id: number; + }; + }; + responses: { + 200: components["responses"]["approval_requests_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Reject pending approval requests + * @description Use this endpoint to reject pending approval requests that are assigned to user or it's unassigned. + */ + reject_approval_request: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the Approval Request that needs to be approved. */ + request_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": { + reason?: string; + }; + }; + }; + responses: { + /** @description Successfully deleted */ + 200: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get Access Rules on Data Source + * @description Returns a list of the access-control rules set for this data source. + */ + get_data_source_accessors: { + parameters: { + path: { + /** @description The unique ID of the data source. */ + data_source_id: number; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Access Rules on Data Source + * @description Adds a list of accessors to a data source. The existing accessors list is retained and merged with the new accessors list. + */ + add_data_source_accessors: { + parameters: { + path: { + /** @description The unique ID of the data source. */ + data_source_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Access Rules on Data Source + * @description Replaces the list of accessors belonging to a data source. Existing accessors will be removed from the data source. + */ + replace_data_source_accessors: { + parameters: { + path: { + /** @description The unique ID of the data source. */ + data_source_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Access Rules on Data Source + * @description Removes access-control rules from a data source. If no request body is provided, all rules associated with the data source will be removed. + */ + delete_data_source_accessors: { + parameters: { + path: { + /** @description The unique ID of the data source. */ + data_source_id: number; + }; + }; + /** @description Optional list of accessors. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Access Rules on Nexset + * @description Returns a list of the access-control rules set for this Nexset. + */ + get_nexset_accessors: { + parameters: { + path: { + /** @description The unique ID of the Nexset. */ + data_set_id: number; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Access Rules on Nexset + * @description Adds new access-control rules to this Nexset. + */ + add_nexset_accessors: { + parameters: { + path: { + /** @description The unique ID of the Nexset. */ + data_set_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Access Rules on Nexset + * @description Replaces the list of access-control rules set for this Nexset. Existing rules will be removed from the Nexset, and only these new rules will be applied. + */ + replace_nexset_accessors: { + parameters: { + path: { + /** @description The unique ID of the Nexset. */ + data_set_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Access Rules on Nexset + * @description Removes access-control rules from a Nexset. If no request body is provided, all rules associated with the Nexset will be removed. + */ + delete_nexset_accessors: { + parameters: { + path: { + /** @description The unique ID of the Nexset. */ + data_set_id: number; + }; + }; + /** @description Optional list of accessors. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Access Rules on Data Sink + * @description Returns a list of the access-control rules set for this data sink. + */ + get_data_sink_accessors: { + parameters: { + path: { + /** @description The unique ID of the data sink. */ + data_sink_id: number; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Access Rules on Data Sink + * @description Adds new access-control rules to this data sink. + */ + add_data_sink_accessors: { + parameters: { + path: { + /** @description The unique ID of the data sink. */ + data_sink_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Access Rules on Data Sink + * @description Replaces the list of access-control rules set for this data sink. Existing rules will be removed from the data sink, and only these new rules will be applied. + */ + replace_data_sink_accessors: { + parameters: { + path: { + /** @description The unique ID of the data sink. */ + data_sink_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Access Rules on Data Sink + * @description Removes access-control rules from a data sink. If no request body is provided, all rules associated with the data sink will be removed. + */ + delete_data_sink_accessors: { + parameters: { + path: { + /** @description The unique ID of the data sink. */ + data_sink_id: number; + }; + }; + /** @description Optional list of accessors. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Access Rules on Data Map + * @description Returns a list of the access-control rules set for this data map. + */ + get_data_map_accessors: { + parameters: { + path: { + /** @description The unique ID of the data map. */ + data_map_id: number; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Access Rules on Data Map + * @description Adds new access-control rules to this data map. + */ + add_data_map_accessors: { + parameters: { + path: { + /** @description The unique ID of the data map. */ + data_map_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Access Rules on Data Map + * @description Replaces the list of access-control rules set for this data map. Existing rules will be removed from the data map, and only these new rules will be applied. + */ + replace_data_map_accessors: { + parameters: { + path: { + /** @description The unique ID of the data map. */ + data_map_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Access Rules on Data Map + * @description Removes access-control rules from a data map. If no request body is provided, all rules associated with the data map will be removed. + */ + delete_data_map_accessors: { + parameters: { + path: { + /** @description The unique ID of the data map. */ + data_map_id: number; + }; + }; + /** @description Optional list of accessors. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Access Rules on Data Credential + * @description Returns a list of the access-control rules set for this data credential. + */ + get_data_credential_accessors: { + parameters: { + path: { + /** @description The unique ID of the data credential. */ + data_credential_id: number; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Access Rules on Data Credential + * @description Adds new access-control rules to this data credential. + */ + add_data_credential_accessors: { + parameters: { + path: { + /** @description The unique ID of the data credential. */ + data_credential_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Access Rules on Data Credential + * @description Replaces the list of access-control rules set for this data credential. Existing rules will be removed from the data credential, and only these new rules will be applied. + */ + replace_data_credential_accessors: { + parameters: { + path: { + /** @description The unique ID of the data credential. */ + data_credential_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Access Rules on Data Credential + * @description Removes access-control rules from a data credential. If no request body is provided, all rules associated with the data credential will be removed. + */ + delete_data_credential_accessors: { + parameters: { + path: { + /** @description The unique ID of the data credential. */ + data_credential_id: number; + }; + }; + /** @description Optional list of accessors. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Project Accessors + * @description Returns a list of the access-control rules set for this project. + */ + get_project_accessors: { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Project Accessors + * @description Adds new access-control rules to this project. + */ + add_project_accessors: { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Access Rules on Project + * @description Replaces the list of access-control rules set for this project. Existing rules will be removed from the project, and only these new rules will be applied. + */ + replace_project_accessors: { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Project Accessors + * @description Removes access-control rules from a project. If no request body is provided, all rules associated with the project will be removed. + */ + delete_project_accessors: { + parameters: { + path: { + /** @description The unique ID of the project. */ + project_id: number; + }; + }; + /** @description Optional list of accessors. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Access Rules on Flow + * @description Returns a list of the access-control rules set for this flow. + */ + get_flow_accessors: { + parameters: { + path: { + /** @description The unique ID of the flow. */ + data_flow_id: string; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Access Rules on Flow + * @description Adds new access-control rules to this data flow. + */ + add_flow_accessors: { + parameters: { + path: { + /** @description The unique ID of the data flow. */ + flow_id: string; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Access Rules on Flow + * @description Replaces the list of access-control rules set for this flow. Existing rules will be removed from the flow, and only these new rules will be applied. + */ + replace_flow_accessors: { + parameters: { + path: { + /** @description The unique ID of the data flow. */ + flow_id: string; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Access Rules on Flow + * @description Removes access-control rules from a data flow. If no request body is provided, all rules associated with the data flow will be removed. + */ + delete_flow_accessors: { + parameters: { + path: { + /** @description The unique ID of the data flow. */ + data_flow_id: string; + }; + }; + /** @description Optional list of accessors. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Access Rules on Flow (Deprecated) + * @description Returns a list of the access-control rules set for this data flow. + * + * > **Note**: This version of the endpoint has been deprecated. It uses a composite data flow id of the type `{resource_type}/{resource_id}`. See get_flow_accessors for a new version of this endpoint that uses unique `flow_id`. + */ + "get_flow_accessors_(deprecated)": { + parameters: { + path: { + /** @description The unique ID of the data flow. This ID is of the type `{resource_type}/{resource_id}`, where the resource is the root node of the flow - for example, `data_source/1001`. */ + data_flow_id: string; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Access Rules on Flow (Deprecated) + * @description Add new access-control rules to this data flow. This version uses a composite data flow id of the type `{resource_type}/{resource_id}`. + * + * > **Note**: This version of the endpoint has been deprecated. It uses a composite data flow id of the type `{resource_type}/{resource_id}`. See add_flow_accessors for a new version of this endpoint that uses unique `flow_id`. + */ + "add_flow_accessors_(deprecated)": { + parameters: { + path: { + /** @description The unique ID of the data flow. This ID is of the type `{resource_type}/{resource_id}`, where the resource is the root node of the flow - for example, `data_source/1001`. */ + data_flow_id: string; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Access Rules on Flow (Deprecated) + * @description Replace the list of access-control rules set for this data flow. Existing rules will be removed from the data flow, and only these new rules will be applied. This version uses a composite data flow id of the type `{resource_type}/{resource_id}`. + * + * > **Note**: This version of the endpoint has been deprecated. It uses a composite data flow id of the type `{resource_type}/{resource_id}`. See replace_flow_accessors for a new version of this endpoint that uses unique `flow_id`. + */ + "replace_flow_accessors_(deprecated)": { + parameters: { + path: { + /** @description The unique ID of the data flow. This ID is of the type `{resource_type}/{resource_id}`, where the resource is the root node of the flow - for example, `data_source/1001`. */ + data_flow_id: string; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Access Rules on Flow (Deprecated) + * @description Remove access-control rules from a data flow. If no request body is provided, all rules associated with the data flow will be removed. This version uses a composite data flow id of the type `{resource_type}/{resource_id}`. + * + * > **Note**: This version of the endpoint has been deprecated. It uses a composite data flow id of the type `{resource_type}/{resource_id}`. See delete_flow_accessors for a new version of this endpoint that uses unique `flow_id`. + */ + "delete_flow_accessors_(deprecated)": { + parameters: { + path: { + /** @description The unique ID of the data flow. This ID is of the type `{resource_type}/{resource_id}`, where the resource is the root node of the flow - for example, `data_source/1001`. */ + data_flow_id: string; + }; + }; + /** @description Optional list of accessors. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Access Rules on Data Schema + * @description Returns a list of the access-control rules set for this data schema. + */ + get_data_schema_accessors: { + parameters: { + path: { + /** @description The unique ID of the data schema. */ + data_schema_id: number; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Access Rules on Data Schema + * @description Adds new access-control rules to this data schema. + */ + add_data_schema_accessors: { + parameters: { + path: { + /** @description The unique ID of the data schema. */ + data_schema_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Access Rules on Data Schema + * @description Replaces the list of access-control rules set for this data schema. Existing rules will be removed from the data schema, and only these new rules will be applied. + */ + replace_data_schema_accessors: { + parameters: { + path: { + /** @description The unique ID of the data schema. */ + data_schema_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Access Rules on Data Schema + * @description Removes access-control rules from a data schema. If no request body is provided, all rules associated with the data schema will be removed. + */ + delete_data_schema_accessors: { + parameters: { + path: { + /** @description The unique ID of the data schema. */ + data_schema_id: number; + }; + }; + /** @description Optional list of accessors. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Access Rules on Document + * @description Returns a list of the access-control rules set for this document. + */ + get_doc_container_accessors: { + parameters: { + path: { + /** @description The unique ID of the document. */ + doc_container_id: number; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Access Rules on Document + * @description Adds new access-control rules to this document. + */ + add_doc_container_accessors: { + parameters: { + path: { + /** @description The unique ID of the document container. */ + doc_container_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Access Rules on Document + * @description Replaces the list of access-control rules set for this document. Existing rules will be removed from the document, and only these new rules will be applied. + */ + replace_doc_container_accessors: { + parameters: { + path: { + /** @description The unique ID of the document. */ + doc_container_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Access Rules on Document + * @description Removes access-control rules from a document. If no request body is provided, all rules associated with the document will be removed. + */ + delete_doc_container_accessors: { + parameters: { + path: { + /** @description The unique ID of the document. */ + doc_container_id: number; + }; + }; + /** @description Optional list of accessors. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Access Rules on Code Container + * @description Returns a list of the access-control rules set for this code container. + */ + get_code_container_accessors: { + parameters: { + path: { + /** @description The unique ID of the code container. */ + code_container_id: number; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Access Rules on Code Container + * @description Adds new access-control rules to this code container. + */ + add_code_container_accessors: { + parameters: { + path: { + /** @description The unique ID of the code container. */ + code_container_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Access Rules on Code Container + * @description Replaces the list of access-control rules set for this code container. Existing rules will be removed from the code container, and only these new rules will be applied. + */ + replace_code_container_accessors: { + parameters: { + path: { + /** @description The unique ID of the code container. */ + code_container_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Access Rules on Code Container + * @description Removes access-control rules from a code container. If no request body is provided, all rules associated with the code container will be removed. + */ + delete_code_container_accessors: { + parameters: { + path: { + /** @description The unique ID of the code container. */ + code_container_id: number; + }; + }; + /** @description Optional list of accessors. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Team Accessors + * @description Returns a list of the access-control rules set for this team. + */ + get_team_accessors: { + parameters: { + path: { + /** @description The unique ID of the team. */ + team_id: number; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Add Team Accessors + * @description Adds new access-control rules to this team. + */ + add_team_accessors: { + parameters: { + path: { + /** @description The unique ID of the team. */ + team_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Replace Team Accessors List + * @description Replaces the list of access-control rules set for this team. Existing rules will be removed from the team, and only these new rules will be applied. + */ + replace_team_accessors: { + parameters: { + path: { + /** @description The unique ID of the team. */ + team_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete Team Accessors + * @description Removes access-control rules from a team. If no request body is provided, all rules associated with the team will be removed. + */ + delete_team_accessors: { + parameters: { + path: { + /** @description The unique ID of the team. */ + team_id: number; + }; + }; + /** @description Optional list of accessors. */ + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["AccessorsRequestSchema"]; + }; + }; + responses: { + 200: components["responses"]["accessors_list"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get marketplace domains. + * @description Use this endpoint to fetch marketplace domains. You need a read permission for the org. + */ + get_domains: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["domains_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create marketplace domains. + * @description Use this endpoint to create marketplace domains. You need a manage permission for the org. + */ + create_domains: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["MarketplaceDomainCreate"]; + }; + }; + responses: { + 200: components["responses"]["domains_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get marketplace domains for organization. + * @description Use this endpoint to fetch marketplace domains for a specific organization. You need a read permission for the org. + */ + get_domains_for_org: { + parameters: { + query: { + /** @description The organization ID to filter domains by */ + org_id: number; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["domains_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get a single marketplace domain. + * @description Use this endpoint to fetch a marketplace domain. You need a read permission for the domain. + */ + get_domain: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the domain that needs to be fetched. */ + domain_id: number; + }; + }; + responses: { + 200: components["responses"]["domains_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Update a single marketplace domain. + * @description Use this endpoint to update a marketplace domain. You need a manage permission for the domain. + */ + update_domain: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the domain that needs to be updated. */ + domain_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["MarketplaceDomainCreate"]; + }; + }; + responses: { + 200: components["responses"]["domains_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create a single marketplace domain. + * @description Use this endpoint to create a marketplace domain. You need a manage permission for the org. + */ + create_domain: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["MarketplaceDomainCreate"]; + }; + }; + responses: { + 200: components["responses"]["domains_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Delete a single marketplace domain. + * @description Use this endpoint to delete a marketplace domain. You need a manage permission for the domain to delete it. + */ + delete_domain: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the domain that needs to be deleted. */ + domain_id: number; + }; + }; + responses: { + /** @description Successfully deleted */ + 200: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get marketplace items for a domain. + * @description Use this endpoint to fetch marketplace items for a domain. You need a read permission for the domain. + */ + get_domain_items: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the domain. */ + domain_id: number; + }; + }; + responses: { + 200: components["responses"]["domain_items_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create a marketplace item for a domain. + * @description Use this endpoint to create a marketplace item for a domain. You need a manage permission for the domain. + */ + create_domain_item: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the domain. */ + domain_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["MarketplaceDomainsItemCreate"]; + }; + }; + responses: { + 200: components["responses"]["domain_items_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get custodians for a marketplace domain. + * @description Use this endpoint to fetch custodians for a marketplace domain. You need a read permission for the domain. + */ + get_domain_custodians: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the domain. */ + domain_id: number; + }; + }; + responses: { + 200: components["responses"]["custodians_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Update custodians for a marketplace domain. + * @description Use this endpoint to update custodians for a marketplace domain. You need a manage permission for the domain. + */ + update_domain_custodians: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the domain. */ + domain_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["CustodiansPayload"]; + }; + }; + responses: { + 200: components["responses"]["custodians_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Add custodians to a marketplace domain. + * @description Use this endpoint to add custodians to a marketplace domain. You need a manage permission for the domain. + */ + add_domain_custodians: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the domain. */ + domain_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["CustodiansPayload"]; + }; + }; + responses: { + 200: components["responses"]["custodians_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Remove custodians from a marketplace domain. + * @description Use this endpoint to remove custodians from a marketplace domain. You need a manage permission for the domain. + */ + remove_domain_custodians: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the domain that needs to be deleted. */ + domain_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["CustodiansPayload"]; + }; + }; + responses: { + /** @description Successfully deleted */ + 200: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get organization custodians. + * @description Use this endpoint to fetch custodians of organization. Org read permission is required to access this endpoint. + */ + get_org_custodians: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the organization. */ + org_id: number; + }; + }; + responses: { + 200: components["responses"]["custodians_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Update organization custodians. + * @description Users listed within the request body will be updated as custodians for the organization. Users can be identified by their email or id. + */ + update_org_custodians: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the organization. */ + org_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["CustodiansPayload"]; + }; + }; + responses: { + 200: components["responses"]["custodians_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Add organization custodians. + * @description Users listed within the request body will be added as custodians to the organization. Users can be identified by their email or id. + */ + add_org_custodians: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the organization. */ + org_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["CustodiansPayload"]; + }; + }; + responses: { + 200: components["responses"]["custodians_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Remove organization custodians. + * @description Users listed within the request body will be deleted as custodians from the organization. Users can be identified by their email or id. + */ + remove_org_custodians: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the organization. */ + org_id: number; + }; + }; + requestBody?: { + content: { + "application/vnd.nexla.api.v1+json": components["schemas"]["CustodiansPayload"]; + }; + }; + responses: { + /** @description Successfully deleted */ + 200: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Login with Basic Authentication + * @description Use this endpoint for authentication if your organization allows basic authentication. A successful authentication attempt will result in an `access_token` that can be used to make authenticated requests to other API endpoints. The `access_token` automatically expires after a fixed duration, but you can also call the `/logout` endpoint to invalidate the access token at the end of your session. + * + * Nexla supports various methods of authentication, including Basic (email/password), Google SSO, and custom SAML- or OIDC-based SSO. One or more of these methods might be allowed in any organization, depending on the configuration chosen by the administrators. Instead of using this endpoint to start a session programmatically, we recommend performing authentication through the Nexla UI and using the Nexla Session Token (available in Tools >> Nexla Session Token) to connect to the API programmatically. + * + * > Note: A user might belong to multiple organizations. This method initiates an authenticated session in their default organization. + */ + login_with_basic_auth: { + responses: { + 200: components["responses"]["token"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + }; + }; + /** + * Logout + * @description Ends the current session and invalidates the `NexlaSessionToken` for future requests. + */ + logout: { + responses: { + /** @description OK */ + 200: { + content: never; + }; + /** @description Bad Request */ + 400: { + content: never; + }; + }; + }; + /** + * Get current rate limit and usage + * @description Returns the API rate limiting categories and the user's current usage + */ + limits: { + responses: { + /** @description Current usage for the types of endpoints */ + 200: { + content: { + "application/json": { + second?: { + common?: { + limit?: number; + count?: number; + }; + light?: { + limit?: number; + count?: number; + }; + medium?: { + limit?: number; + count?: number; + }; + high?: { + limit?: number; + count?: number; + }; + }; + day?: { + common?: { + limit?: number; + count?: number; + }; + light?: { + limit?: number; + count?: number; + }; + medium?: { + limit?: number; + count?: number; + }; + high?: { + limit?: number; + count?: number; + }; + }; + }; + }; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Get auth configs. + * @description Get the authentication configurations by it's ID. + */ + get_api_auth_configs: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description ID of the authentication configuration. */ + auth_config_id: number; + }; + }; + responses: { + 200: components["responses"]["auth_config_one"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create auth config. + * @description Create a new authentication configuration for the API. + */ + create_api_auth_config: { + requestBody?: { + content: { + "application/json": components["schemas"]["AuthConfigPayload"]; + }; + }; + responses: { + 200: components["responses"]["auth_config_one"]; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get all auth configs. + * @description Get the authentication configurations for the API. This will return all auth configs. Super-admin privilege is required. + */ + get_all_api_auth_configs: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["auth_configs_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Update auth config (enable/disable). + * @description Update an authentication configuration for the API. + */ + update_api_auth_config: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the organization for which settings will be returned. */ + org_id: number; + /** @description The unique ID of the authentication setting that needs to be updated. */ + auth_setting_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["AuthSettingPayload"]; + }; + }; + responses: { + 200: components["responses"]["auth_setting_one"]; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete auth config. + * @description Delete an authentication configuration for the API. + */ + delete_api_auth_config: { + parameters: { + path: { + /** @description The unique ID of the organization that needs to be deleted. */ + auth_config_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Sign Up + * @description This endpoint is used for users to register in the system. Once signup process is completed (email is verified, manual approval by admin may be required), + * user can set a password and login to the system. + * Email verification link is sent to the email provided by the user. + * + * Optionally, it allows for logged in user to be called. In this case, email verification will be skipped, and new org is created immediately. + */ + self_sign_up: { + requestBody?: { + content: { + "application/json": components["schemas"]["SignupRequest"]; + }; + }; + responses: { + /** @description Successful response. */ + 200: { + content: never; + }; + /** @description Bad Request in case of invalid input, unacceptable email etc. */ + 400: { + content: never; + }; + }; + }; + /** + * Verify Email + * @description This endpoint is used to verify the email address of the user. + * The user will be able to set a password and login to the system after the email is verified (unless manual admin approval is required). + */ + verify_email: { + parameters: { + query: { + /** @description The token received in the email. */ + token: string; + }; + }; + responses: { + /** @description Successful response. */ + 200: { + content: never; + }; + /** @description Bad Request */ + 400: { + content: never; + }; + }; + }; + /** + * List Self Sign Up Requests + * @description Returns a list of self sign up requests for an admin. + */ + get_self_signup_requests: { + responses: { + 200: components["responses"]["self_signup_requests_response"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Approve Self Sign Up Request + * @description This endpoint is used to approve a self sign up request. System admin access required. + * The user will be able to set a password and login to the system after the request is approved. + */ + approve_self_sign_up_request: { + parameters: { + path: { + /** @description The ID of the self sign up request. */ + request_id: string; + }; + }; + responses: { + /** @description Successful response. */ + 200: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * List self-sign-up blocked domains for admins. + * @description Returns a list of domains that are blocked for self-sign-up. Requires admin access. + */ + get_self_signup_blocked_domains: { + responses: { + 200: components["responses"]["self_signup_blocked_domains_response"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Add self-sign-up blocked domain for admins. + * @description Adds a domain to the list of domains that are blocked for self-sign-up. Requires admin access. + */ + add_self_signup_blocked_domain: { + requestBody?: { + content: { + "application/json": { + /** + * @description The domain to block for self-sign-up. + * @example example-domain.com + */ + domain?: string; + }; + }; + }; + responses: { + /** @description Successful response. */ + 200: { + content: never; + }; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Update self-sign-up blocked domain for admins. + * @description Updates a domain in the list of domains that are blocked for self-sign-up. Requires admin access. + */ + update_self_signup_blocked_domain: { + parameters: { + path: { + /** @description The ID of the self sign up blocked domain. */ + domain_id: string; + }; + }; + requestBody?: { + content: { + "application/json": { + /** + * @description The domain to block for self-sign-up. + * @example example-domain.com + */ + domain?: string; + }; + }; + }; + responses: { + /** @description Successful response. */ + 200: { + content: never; + }; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Delete self-sign-up blocked domain for admins. + * @description Deletes a domain from the list of domains that are blocked for self-sign-up. Requires admin access. + */ + delete_self_signup_blocked_domain: { + parameters: { + path: { + /** @description The ID of the self sign up blocked domain. */ + domain_id: string; + }; + }; + responses: { + /** @description Successful response. */ + 200: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not found error */ + 404: { + content: never; + }; + }; + }; + /** + * Get auth settings for org. + * @description Get the authentication settings for the given org. This allows to enable or disable specific auth configs for the org. + */ + get_api_auth_settings: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the organization for which settings will be returned. */ + org_id: number; + }; + }; + responses: { + 200: components["responses"]["auth_settings_many"]; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get async operations list for current user. + * @description Get a list of async operations for current user. Returns type, arguments, status, results etc. + */ + get_async_tasks: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["async_tasks_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + }; + }; + /** + * Create an async operation. + * @description Create an async operation. Returns the task id and other related data. Checks if the user has permission to create the task with all entities, and other preconditions. + */ + create_async_task: { + requestBody?: { + content: { + "application/json": components["schemas"]["AsyncTaskPayload"]; + }; + }; + responses: { + 200: components["responses"]["async_task_one"]; + /** @description Bad Request (e.g. invalid arguments) */ + 400: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get async operations list for current user of a specific type. + * @description Get a list of async operations for current user of a specific type. Returns type, arguments, status, results etc. + */ + get_async_tasks_of_type: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The type of the task. */ + task_type: components["schemas"]["async_task_types"]; + }; + }; + responses: { + 200: components["responses"]["async_tasks_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + }; + }; + /** + * Get async operations list for current user by status + * @description Get a list of async operations for current user of a specific type. Returns type, arguments, status, results etc. + */ + get_async_tasks_by_status: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The status of tasks. */ + status: components["schemas"]["async_task_statuses"]; + }; + }; + responses: { + 200: components["responses"]["async_tasks_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + }; + }; + /** + * Get async operation types + * @description Get a list of async operation types. Returns type, arguments, status, results etc. + */ + get_async_task_types: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": string[]; + }; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + }; + }; + /** + * Get async operation arguments for a specific type with descriptions + * @description Get a list of async operation arguments for a specific type with descriptions. + */ + get_async_tasks_explain_arguments: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The type of the task. */ + task_type: components["schemas"]["async_task_types"]; + }; + }; + responses: { + /** @description Success. The response is a dictionary with the argument names as keys and the descriptions as values. */ + 200: { + content: { + "application/json": Record; + }; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get async operation by ID + * @description Get an async operation by ID. Returns type, arguments, status, results and other fields. + */ + get_async_task: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The ID of the task. */ + task_id: number; + }; + }; + responses: { + 200: components["responses"]["async_task_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete async operation by ID + * @description Delete an async operation by ID. Returns the task id and other related data. + */ + delete_async_task: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The ID of the task. */ + task_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: never; + }; + /** @description Bad Request (e.g. task is running) */ + 400: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Rerun async operation + * @description Rerun an async operation. This is used to re-run an async operation. The task will be re-created and executed with the same arguments. + */ + rerun_async_task: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The task id. */ + task_id: number; + }; + }; + responses: { + 200: components["responses"]["async_task_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get async operation result + * @description Get the result of an async operation. + */ + get_async_task_result: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The ID of the task. */ + task_id: number; + }; + }; + responses: { + /** @description Processing */ + 102: { + content: never; + }; + /** @description Success */ + 200: { + content: { + "application/json": Record; + }; + }; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get download link for async operation result + * @description Get a download link for the result of an async operation. + */ + get_async_task_download_link: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The ID of the task. */ + task_id: string; + }; + }; + responses: { + /** @description Success */ + 200: { + content: { + "application/json": string; + }; + }; + /** @description Bad Request. Returned when tasks is not completed, or doesn't support downloading the result. */ + 400: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Acknowledge async operation + * @description Acknowledge an async operation. This is used to confirm that the user has seen the results of the async operation. After that, if tasks has stored results, they will be deleted. + */ + acknowledge_async_task: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The task id. */ + task_id: number; + }; + }; + responses: { + /** @description Success */ + 200: components["responses"]["async_task_one"]; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get all Custom Runtimes + * @description Retrieves a list of all custom runtimes defined for the organization. + */ + get_runtimes: { + responses: { + 200: components["responses"]["runtimes_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create a Custom Runtime + * @description Creates a custom runtime with the specified configuration. + */ + create_runtime: { + requestBody?: { + content: { + "application/json": components["schemas"]["RuntimePayload"]; + }; + }; + responses: { + 200: components["responses"]["runtimes_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get a custom runtime by ID + * @description Retrieves a custom runtime + */ + get_runtime: { + parameters: { + path: { + /** @description The ID of the runtime to retrieve */ + runtime_id: number; + }; + }; + responses: { + 200: components["responses"]["runtimes_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Update a Custom Runtime + * @description Updates a custom runtime. + */ + update_runtime: { + parameters: { + path: { + /** @description The ID of the runtime to update */ + runtime_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["RuntimePayload"]; + }; + }; + responses: { + 200: components["responses"]["runtimes_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Delete a Custom Runtime + * @description Creates a custom runtime with the specified configuration. + */ + delete_runtime: { + parameters: { + path: { + /** @description The ID of the runtime to delete */ + runtime_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Activate a Custom Runtime + * @description Activates a custom runtime with the specified ID. + */ + activate_runtime: { + parameters: { + path: { + /** @description The ID of the runtime to activate */ + runtime_id: number; + }; + }; + responses: { + 200: components["responses"]["runtimes_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Pause a Custom Runtime + * @description Pause a custom runtime with the specified ID. + */ + pause_runtime: { + parameters: { + path: { + /** @description The ID of the runtime to pause */ + runtime_id: number; + }; + }; + responses: { + 200: components["responses"]["runtimes_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get all GenAI configs in org + * @description Retrieves all GenAI configurations accessible to the authenticated user. + */ + get_gen_ai_configs: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["gen_ai_configs_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Create a GenAI config + * @description Creates a GenAI configuration. + */ + create_gen_ai_config: { + requestBody?: { + content: { + "application/json": components["schemas"]["GenAiConfigPayload"]; + }; + }; + responses: { + 200: components["responses"]["gen_ai_configs_one"]; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + }; + }; + /** + * Get GenAI Integration Config + * @description Retrieves a GenAI integration configuration by ID. + */ + get_gen_ai_integration_config: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the GenAI config. */ + set_id: number; + }; + }; + responses: { + 200: components["responses"]["gen_ai_configs_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Update GenAI Integration Config + * @description Updates a GenAI integration configuration by ID. + */ + update_gen_ai_integration_config: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the GenAI config. */ + set_id: number; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["GenAiConfigCreatePayload"]; + }; + }; + responses: { + 200: components["responses"]["gen_ai_configs_one"]; + /** @description Bad Request */ + 400: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete GenAI Integration Config + * @description Deletes a GenAI integration configuration by ID. + */ + delete_gen_ai_integration_config: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the GenAI config. */ + set_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get all bindings of GenAI configs of the org for specified usages. + * @description Retrieves all activated GenAI configurations for the org. + */ + get_gen_ai_org_settings: { + parameters: { + query?: { + /** @description The ID of the org (super-admin access required). */ + org_id?: number; + /** @description Flag to fetch all config bindings in the system (super-admin access required). */ + all?: boolean; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + 200: components["responses"]["gen_ai_org_settings_many"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Create a binding of GenAI config for the org for specific usage. + * @description Activates a GenAI configuration for specific usage. All other bindings for the same usage will be deactivated. + */ + create_gen_ai_org_setting: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["GenAiOrgSettingPayload"]; + }; + }; + responses: { + 200: components["responses"]["gen_ai_org_settings_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Get Org GenAI binding + * @description Retrieves a GenAI configuration binding by ID. + */ + get_gen_ai_org_setting: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the GenAI config. */ + org_gen_ai_config_id: number; + }; + }; + responses: { + 200: components["responses"]["gen_ai_org_settings_one"]; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Delete GenAI Config binding for org. + * @description Delete GenAI config binding for an org (disables a GenAI configuration usage). + */ + delete_gen_ai_org_setting: { + parameters: { + header?: { + Accept?: components["parameters"]["accept"]; + }; + path: { + /** @description The unique ID of the GenAI config. */ + org_gen_ai_config_id: number; + }; + }; + responses: { + /** @description Success */ + 200: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** + * Shows active GenAI Configuration for specific usage + * @description Shows active GenAI Configuration for specific usage + */ + gen_ai_org_settings_show_active: { + parameters: { + query: { + /** @description The usage of the GenAI configuration. */ + gen_ai_usage: string; + }; + header?: { + Accept?: components["parameters"]["accept"]; + }; + }; + responses: { + /** @description Success */ + 200: { + content: never; + }; + /** @description Unauthorized */ + 401: { + content: never; + }; + /** @description Forbidden */ + 403: { + content: never; + }; + /** @description Not Found */ + 404: { + content: never; + }; + }; + }; + /** Send one record to Webhook */ + send_one_record: { + parameters: { + path: { + /** + * @description Set this to `true` if you wish to send any custom headers to be included in the ingested records as part of the request. + * + * The platform will ignore standard headers like `Authorization` and `Content-Type`. Any custom headers will be added as a `header_` attribute in the record. + * + * Example: The request header `NEX-HEADER: test` will be ingested as the attribute `header_NEX-HEADER` with the value `test`. + * + * Default value: `false` + */ + include_headers: boolean; + /** + * @description Set this to `true` if you wish to send any custom query parameters to be included in the ingested records as part of the request. + * + * The platform will ignore standard query parameters like `api_key`. Any custom query parameters will be added as a `url_param_` attribute in the record. + * + * Example: The request query parameter `abc=def` will be ingested as the attribute `url_param_abc` with the value `def`. + * + * Default value: `false` + */ + include_url_params: boolean; + /** + * @description Usually, the platform performs Nexset schema detection only for the first few records from a new webhook. This is to avoid any unnecessary latencies in webhook record processing. + * + * Set this to `true` if you wish to enforce any webhook event to trigger schema detection. + * + * Default value: `false` + */ + force_schema_detection: boolean; + }; + }; + requestBody?: { + content: { + /** + * @example { + * "id": 1, + * "name": "one" + * } + */ + "application/json": components["schemas"]["nexset_record"]; + /** + * @example + * 1 + * one + * + */ + "application/xml": { + root?: components["schemas"]["nexset_record"]; + }; + }; + }; + responses: { + /** @description Returns the 200 status to indicate that data was received. */ + 200: { + content: { + "application/json": { + /** @description Nexset ID of the Nexset to which this record is sent. */ + datasetId?: number; + /** @description Number of records successfully processed by Nexla. */ + processed?: number; + }; + }; + }; + /** @description Request failed authentication */ + 403: { + content: never; + }; + /** @description Returned when the service is not able to consume the data. */ + 500: { + content: { + "application/json": { + /** + * @description Descriptive text about the error reason. + * + * @example Source #1000 is PAUSED + */ + error?: string; + }; + }; + }; + }; + }; + /** + * Send many records to Webhook + * @description Send an array of JSON objects. Nexla will treat each object as a unique record for the webhook. + */ + send_many_records: { + parameters: { + path: { + /** + * @description Set this to `true` if you wish to send any custom headers to be included in the ingested record as part of the request. + * + * The platform will ignore standard headers like `Authorization` and `Content-Type`. Any custom headers will be added as a `header_` attribute in the record. + * + * Example: The request header `NEX-HEADER: test` will be ingested as the attribute `header_NEX-HEADER` with the value `test`. + * + * Default value: `false` + */ + include_headers: boolean; + /** + * @description Set this to `true` if you wish to send any custom query parameters to be included in the ingested records as part of the request. + * + * The platform will ignore standard query parameters like `api_key`. Any custom query parameters will be added as a `url_param_` attribute in the record. + * + * Example: The request query parameter `abc=def` will be ingested as the attribute `url_param_abc` with the value `def`. + * + * Default value: `false` + */ + include_url_params: boolean; + /** + * @description Usually, the platform performs Nexset schema detection only for the first few records from a new webhook. This is to avoid any unnecessary latencies in webhook record processing. + * + * Set this to `true` if you wish to enforce any webhook event to trigger schema detection. + * + * Default value: `false` + */ + force_schema_detection: boolean; + }; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["nexset_record"][]; + }; + }; + responses: { + /** @description Returns the 200 status to indicate that data was received. */ + 200: { + content: { + "application/json": { + /** @description Nexset ID of the Nexset to which this record is sent. */ + datasetId?: number; + /** @description Number of records successfully processed by Nexla. */ + processed?: number; + }; + }; + }; + /** @description Request failed authentication */ + 403: { + content: never; + }; + /** @description Returned when the service is not able to consume the data. */ + 500: { + content: { + "application/json": { + /** + * @description Descriptive text about the error reason. + * + * @example Source #1000 is PAUSED + */ + error?: string; + }; + }; + }; + }; + }; +} diff --git a/packages/ts-sdk/src/generated/spec-metadata.ts b/packages/ts-sdk/src/generated/spec-metadata.ts new file mode 100644 index 0000000..bebba0a --- /dev/null +++ b/packages/ts-sdk/src/generated/spec-metadata.ts @@ -0,0 +1,8 @@ +/** + * Auto-generated spec metadata from ../../plugin-redoc-0.yaml. + * Do not edit manually. + */ + +export const SPEC_SOURCE = "../../plugin-redoc-0.yaml"; +export const SPEC_HASH_ALGORITHM = "sha256"; +export const SPEC_HASH = "ef02be92fe7b26c2c1ca51c7ddbdc2e0be2c9982b91d5bf026a150eee4f78b1f"; diff --git a/packages/ts-sdk/src/index.ts b/packages/ts-sdk/src/index.ts new file mode 100644 index 0000000..864b6d4 --- /dev/null +++ b/packages/ts-sdk/src/index.ts @@ -0,0 +1,12 @@ +export { NexlaClient } from "./client/nexla-client.js"; +export type { NexlaClientOptions } from "./client/nexla-client.js"; +export type { RetryOptions } from "./client/http.js"; +export type { RequestOptions } from "./client/types.js"; +export type { OperationData, OperationId, OperationInit } from "./client/operation-types.js"; + +export * from "./errors.js"; +export * from "./resources/index.js"; +export * from "./webhooks/index.js"; + +export { resourceMap } from "./generated/resource-map.js"; +export type { paths, components, operations } from "./generated/schema.js"; diff --git a/packages/ts-sdk/src/resources/generated/access_control.ts b/packages/ts-sdk/src/resources/generated/access_control.ts new file mode 100644 index 0000000..4611a78 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/access_control.ts @@ -0,0 +1,251 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class AccessControlResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + /** Add Access Rules on Code Container */ + async add_code_container_accessors(init?: OperationInit<"add_code_container_accessors">): Promise> { + return this.client.requestOperation("add_code_container_accessors", "put", "/code_containers/{code_container_id}/accessors", init); + } + + /** Add Access Rules on Data Credential */ + async add_data_credential_accessors(init?: OperationInit<"add_data_credential_accessors">): Promise> { + return this.client.requestOperation("add_data_credential_accessors", "put", "/data_credentials/{data_credential_id}/accessors", init); + } + + /** Add Access Rules on Data Map */ + async add_data_map_accessors(init?: OperationInit<"add_data_map_accessors">): Promise> { + return this.client.requestOperation("add_data_map_accessors", "put", "/data_maps/{data_map_id}/accessors", init); + } + + /** Add Access Rules on Data Schema */ + async add_data_schema_accessors(init?: OperationInit<"add_data_schema_accessors">): Promise> { + return this.client.requestOperation("add_data_schema_accessors", "put", "/data_schemas/{data_schema_id}/accessors", init); + } + + /** Add Access Rules on Data Sink */ + async add_data_sink_accessors(init?: OperationInit<"add_data_sink_accessors">): Promise> { + return this.client.requestOperation("add_data_sink_accessors", "put", "/data_sinks/{data_sink_id}/accessors", init); + } + + /** Add Access Rules on Data Source */ + async add_data_source_accessors(init?: OperationInit<"add_data_source_accessors">): Promise> { + return this.client.requestOperation("add_data_source_accessors", "put", "/data_sources/{data_source_id}/accessors", init); + } + + /** Add Access Rules on Document */ + async add_doc_container_accessors(init?: OperationInit<"add_doc_container_accessors">): Promise> { + return this.client.requestOperation("add_doc_container_accessors", "put", "/doc_containers/{doc_container_id}/accessors", init); + } + + /** Add Access Rules on Flow */ + async add_flow_accessors(init?: OperationInit<"add_flow_accessors">): Promise> { + return this.client.requestOperation("add_flow_accessors", "put", "/flows/{flow_id}/accessors", init); + } + + /** Add Access Rules on Flow (Deprecated) */ + async ["add_flow_accessors_(deprecated)"](init?: OperationInit<"add_flow_accessors_(deprecated)">): Promise> { + return this.client.requestOperation("add_flow_accessors_(deprecated)", "put", "/data_flows/{data_flow_id}/accessors", init); + } + + /** Add Access Rules on Nexset */ + async add_nexset_accessors(init?: OperationInit<"add_nexset_accessors">): Promise> { + return this.client.requestOperation("add_nexset_accessors", "put", "/data_sets/{data_set_id}/accessors", init); + } + + /** Add Project Accessors */ + async add_project_accessors(init?: OperationInit<"add_project_accessors">): Promise> { + return this.client.requestOperation("add_project_accessors", "put", "/projects/{project_id}/accessors", init); + } + + /** Add Team Accessors */ + async add_team_accessors(init?: OperationInit<"add_team_accessors">): Promise> { + return this.client.requestOperation("add_team_accessors", "put", "/teams/{team_id}/accessors", init); + } + + /** Delete Access Rules on Code Container */ + async delete_code_container_accessors(init?: OperationInit<"delete_code_container_accessors">): Promise> { + return this.client.requestOperation("delete_code_container_accessors", "delete", "/code_containers/{code_container_id}/accessors", init); + } + + /** Delete Access Rules on Data Credential */ + async delete_data_credential_accessors(init?: OperationInit<"delete_data_credential_accessors">): Promise> { + return this.client.requestOperation("delete_data_credential_accessors", "delete", "/data_credentials/{data_credential_id}/accessors", init); + } + + /** Delete Access Rules on Data Map */ + async delete_data_map_accessors(init?: OperationInit<"delete_data_map_accessors">): Promise> { + return this.client.requestOperation("delete_data_map_accessors", "delete", "/data_maps/{data_map_id}/accessors", init); + } + + /** Delete Access Rules on Data Schema */ + async delete_data_schema_accessors(init?: OperationInit<"delete_data_schema_accessors">): Promise> { + return this.client.requestOperation("delete_data_schema_accessors", "delete", "/data_schemas/{data_schema_id}/accessors", init); + } + + /** Delete Access Rules on Data Sink */ + async delete_data_sink_accessors(init?: OperationInit<"delete_data_sink_accessors">): Promise> { + return this.client.requestOperation("delete_data_sink_accessors", "delete", "/data_sinks/{data_sink_id}/accessors", init); + } + + /** Delete Access Rules on Data Source */ + async delete_data_source_accessors(init?: OperationInit<"delete_data_source_accessors">): Promise> { + return this.client.requestOperation("delete_data_source_accessors", "delete", "/data_sources/{data_source_id}/accessors", init); + } + + /** Delete Access Rules on Document */ + async delete_doc_container_accessors(init?: OperationInit<"delete_doc_container_accessors">): Promise> { + return this.client.requestOperation("delete_doc_container_accessors", "delete", "/doc_containers/{doc_container_id}/accessors", init); + } + + /** Delete Access Rules on Flow */ + async delete_flow_accessors(init?: OperationInit<"delete_flow_accessors">): Promise> { + return this.client.requestOperation("delete_flow_accessors", "delete", "/flows/{flow_id}/accessors", init); + } + + /** Delete Access Rules on Flow (Deprecated) */ + async ["delete_flow_accessors_(deprecated)"](init?: OperationInit<"delete_flow_accessors_(deprecated)">): Promise> { + return this.client.requestOperation("delete_flow_accessors_(deprecated)", "delete", "/data_flows/{data_flow_id}/accessors", init); + } + + /** Delete Access Rules on Nexset */ + async delete_nexset_accessors(init?: OperationInit<"delete_nexset_accessors">): Promise> { + return this.client.requestOperation("delete_nexset_accessors", "delete", "/data_sets/{data_set_id}/accessors", init); + } + + /** Delete Project Accessors */ + async delete_project_accessors(init?: OperationInit<"delete_project_accessors">): Promise> { + return this.client.requestOperation("delete_project_accessors", "delete", "/projects/{project_id}/accessors", init); + } + + /** Delete Team Accessors */ + async delete_team_accessors(init?: OperationInit<"delete_team_accessors">): Promise> { + return this.client.requestOperation("delete_team_accessors", "delete", "/teams/{team_id}/accessors", init); + } + + /** Get Access Rules on Code Container */ + async get_code_container_accessors(init?: OperationInit<"get_code_container_accessors">): Promise> { + return this.client.requestOperation("get_code_container_accessors", "get", "/code_containers/{code_container_id}/accessors", init); + } + + /** Get Access Rules on Data Credential */ + async get_data_credential_accessors(init?: OperationInit<"get_data_credential_accessors">): Promise> { + return this.client.requestOperation("get_data_credential_accessors", "get", "/data_credentials/{data_credential_id}/accessors", init); + } + + /** Get Access Rules on Data Map */ + async get_data_map_accessors(init?: OperationInit<"get_data_map_accessors">): Promise> { + return this.client.requestOperation("get_data_map_accessors", "get", "/data_maps/{data_map_id}/accessors", init); + } + + /** Get Access Rules on Data Schema */ + async get_data_schema_accessors(init?: OperationInit<"get_data_schema_accessors">): Promise> { + return this.client.requestOperation("get_data_schema_accessors", "get", "/data_schemas/{data_schema_id}/accessors", init); + } + + /** Get Access Rules on Data Sink */ + async get_data_sink_accessors(init?: OperationInit<"get_data_sink_accessors">): Promise> { + return this.client.requestOperation("get_data_sink_accessors", "get", "/data_sinks/{data_sink_id}/accessors", init); + } + + /** Get Access Rules on Data Source */ + async get_data_source_accessors(init?: OperationInit<"get_data_source_accessors">): Promise> { + return this.client.requestOperation("get_data_source_accessors", "get", "/data_sources/{data_source_id}/accessors", init); + } + + /** Get Access Rules on Document */ + async get_doc_container_accessors(init?: OperationInit<"get_doc_container_accessors">): Promise> { + return this.client.requestOperation("get_doc_container_accessors", "get", "/doc_containers/{doc_container_id}/accessors", init); + } + + /** Get Access Rules on Flow */ + async get_flow_accessors(init?: OperationInit<"get_flow_accessors">): Promise> { + return this.client.requestOperation("get_flow_accessors", "get", "/flows/{flow_id}/accessors", init); + } + + /** Get Access Rules on Flow (Deprecated) */ + async ["get_flow_accessors_(deprecated)"](init?: OperationInit<"get_flow_accessors_(deprecated)">): Promise> { + return this.client.requestOperation("get_flow_accessors_(deprecated)", "get", "/data_flows/{data_flow_id}/accessors", init); + } + + /** Get Access Rules on Nexset */ + async get_nexset_accessors(init?: OperationInit<"get_nexset_accessors">): Promise> { + return this.client.requestOperation("get_nexset_accessors", "get", "/data_sets/{data_set_id}/accessors", init); + } + + /** Get Project Accessors */ + async get_project_accessors(init?: OperationInit<"get_project_accessors">): Promise> { + return this.client.requestOperation("get_project_accessors", "get", "/projects/{project_id}/accessors", init); + } + + /** Get Team Accessors */ + async get_team_accessors(init?: OperationInit<"get_team_accessors">): Promise> { + return this.client.requestOperation("get_team_accessors", "get", "/teams/{team_id}/accessors", init); + } + + /** Replace Access Rules on Code Container */ + async replace_code_container_accessors(init?: OperationInit<"replace_code_container_accessors">): Promise> { + return this.client.requestOperation("replace_code_container_accessors", "post", "/code_containers/{code_container_id}/accessors", init); + } + + /** Replace Access Rules on Data Credential */ + async replace_data_credential_accessors(init?: OperationInit<"replace_data_credential_accessors">): Promise> { + return this.client.requestOperation("replace_data_credential_accessors", "post", "/data_credentials/{data_credential_id}/accessors", init); + } + + /** Replace Access Rules on Data Map */ + async replace_data_map_accessors(init?: OperationInit<"replace_data_map_accessors">): Promise> { + return this.client.requestOperation("replace_data_map_accessors", "post", "/data_maps/{data_map_id}/accessors", init); + } + + /** Replace Access Rules on Data Schema */ + async replace_data_schema_accessors(init?: OperationInit<"replace_data_schema_accessors">): Promise> { + return this.client.requestOperation("replace_data_schema_accessors", "post", "/data_schemas/{data_schema_id}/accessors", init); + } + + /** Replace Access Rules on Data Sink */ + async replace_data_sink_accessors(init?: OperationInit<"replace_data_sink_accessors">): Promise> { + return this.client.requestOperation("replace_data_sink_accessors", "post", "/data_sinks/{data_sink_id}/accessors", init); + } + + /** Replace Access Rules on Data Source */ + async replace_data_source_accessors(init?: OperationInit<"replace_data_source_accessors">): Promise> { + return this.client.requestOperation("replace_data_source_accessors", "post", "/data_sources/{data_source_id}/accessors", init); + } + + /** Replace Access Rules on Document */ + async replace_doc_container_accessors(init?: OperationInit<"replace_doc_container_accessors">): Promise> { + return this.client.requestOperation("replace_doc_container_accessors", "post", "/doc_containers/{doc_container_id}/accessors", init); + } + + /** Replace Access Rules on Flow */ + async replace_flow_accessors(init?: OperationInit<"replace_flow_accessors">): Promise> { + return this.client.requestOperation("replace_flow_accessors", "post", "/flows/{flow_id}/accessors", init); + } + + /** Replace Access Rules on Flow (Deprecated) */ + async ["replace_flow_accessors_(deprecated)"](init?: OperationInit<"replace_flow_accessors_(deprecated)">): Promise> { + return this.client.requestOperation("replace_flow_accessors_(deprecated)", "post", "/data_flows/{data_flow_id}/accessors", init); + } + + /** Replace Access Rules on Nexset */ + async replace_nexset_accessors(init?: OperationInit<"replace_nexset_accessors">): Promise> { + return this.client.requestOperation("replace_nexset_accessors", "post", "/data_sets/{data_set_id}/accessors", init); + } + + /** Replace Access Rules on Project */ + async replace_project_accessors(init?: OperationInit<"replace_project_accessors">): Promise> { + return this.client.requestOperation("replace_project_accessors", "post", "/projects/{project_id}/accessors", init); + } + + /** Replace Team Accessors List */ + async replace_team_accessors(init?: OperationInit<"replace_team_accessors">): Promise> { + return this.client.requestOperation("replace_team_accessors", "post", "/teams/{team_id}/accessors", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/approval_requests.ts b/packages/ts-sdk/src/resources/generated/approval_requests.ts new file mode 100644 index 0000000..f44d49d --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/approval_requests.ts @@ -0,0 +1,31 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class ApprovalRequestsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + /** Approve pending approval requests */ + async approve_approval_request(init?: OperationInit<"approve_approval_request">): Promise> { + return this.client.requestOperation("approve_approval_request", "put", "/approval_requests/{request_id}/approve", init); + } + + /** Get all pending approval requests. */ + async get_pending_approval_requests(init?: OperationInit<"get_pending_approval_requests">): Promise> { + return this.client.requestOperation("get_pending_approval_requests", "get", "/approval_requests/pending", init); + } + + /** Get all requested approval requests by the user. */ + async get_requested_approval_requests(init?: OperationInit<"get_requested_approval_requests">): Promise> { + return this.client.requestOperation("get_requested_approval_requests", "get", "/approval_requests/requested", init); + } + + /** Reject pending approval requests */ + async reject_approval_request(init?: OperationInit<"reject_approval_request">): Promise> { + return this.client.requestOperation("reject_approval_request", "delete", "/approval_requests/{request_id}/reject", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/async_tasks.ts b/packages/ts-sdk/src/resources/generated/async_tasks.ts new file mode 100644 index 0000000..7475a6b --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/async_tasks.ts @@ -0,0 +1,87 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class AsyncTasksResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_async_tasks">): Promise> { + return this.client.requestOperation("get_async_tasks", "get", "/async_tasks", init); + } + + async create(init?: OperationInit<"create_async_task">): Promise> { + return this.client.requestOperation("create_async_task", "post", "/async_tasks", init); + } + + async get(init?: OperationInit<"get_async_task">): Promise> { + return this.client.requestOperation("get_async_task", "get", "/async_tasks/{task_id}", init); + } + + async delete(init?: OperationInit<"delete_async_task">): Promise> { + return this.client.requestOperation("delete_async_task", "delete", "/async_tasks/{task_id}", init); + } + + /** Acknowledge async operation */ + async acknowledge_async_task(init?: OperationInit<"acknowledge_async_task">): Promise> { + return this.client.requestOperation("acknowledge_async_task", "post", "/async_tasks/{task_id}/acknowledge", init); + } + + /** Create an async operation. */ + async create_async_task(init?: OperationInit<"create_async_task">): Promise> { + return this.client.requestOperation("create_async_task", "post", "/async_tasks", init); + } + + /** Delete async operation by ID */ + async delete_async_task(init?: OperationInit<"delete_async_task">): Promise> { + return this.client.requestOperation("delete_async_task", "delete", "/async_tasks/{task_id}", init); + } + + /** Get async operation by ID */ + async get_async_task(init?: OperationInit<"get_async_task">): Promise> { + return this.client.requestOperation("get_async_task", "get", "/async_tasks/{task_id}", init); + } + + /** Get download link for async operation result */ + async get_async_task_download_link(init?: OperationInit<"get_async_task_download_link">): Promise> { + return this.client.requestOperation("get_async_task_download_link", "get", "/async_tasks/{task_id}/download_link", init); + } + + /** Get async operation result */ + async get_async_task_result(init?: OperationInit<"get_async_task_result">): Promise> { + return this.client.requestOperation("get_async_task_result", "get", "/async_tasks/{task_id}/result", init); + } + + /** Get async operation types */ + async get_async_task_types(init?: OperationInit<"get_async_task_types">): Promise> { + return this.client.requestOperation("get_async_task_types", "get", "/async_tasks/types", init); + } + + /** Get async operations list for current user. */ + async get_async_tasks(init?: OperationInit<"get_async_tasks">): Promise> { + return this.client.requestOperation("get_async_tasks", "get", "/async_tasks", init); + } + + /** Get async operations list for current user by status */ + async get_async_tasks_by_status(init?: OperationInit<"get_async_tasks_by_status">): Promise> { + return this.client.requestOperation("get_async_tasks_by_status", "get", "/async_tasks/by_status/{status}", init); + } + + /** Get async operation arguments for a specific type with descriptions */ + async get_async_tasks_explain_arguments(init?: OperationInit<"get_async_tasks_explain_arguments">): Promise> { + return this.client.requestOperation("get_async_tasks_explain_arguments", "get", "/async_tasks/explain_arguments/{task_type}", init); + } + + /** Get async operations list for current user of a specific type. */ + async get_async_tasks_of_type(init?: OperationInit<"get_async_tasks_of_type">): Promise> { + return this.client.requestOperation("get_async_tasks_of_type", "get", "/async_tasks/of_type/{task_type}", init); + } + + /** Rerun async operation */ + async rerun_async_task(init?: OperationInit<"rerun_async_task">): Promise> { + return this.client.requestOperation("rerun_async_task", "post", "/async_tasks/{task_id}/rerun", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/audit_logs.ts b/packages/ts-sdk/src/resources/generated/audit_logs.ts new file mode 100644 index 0000000..6edddb5 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/audit_logs.ts @@ -0,0 +1,71 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class AuditLogsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + /** Get Audit Log for a Code Container */ + async get_code_container_audit_log(init?: OperationInit<"get_code_container_audit_log">): Promise> { + return this.client.requestOperation("get_code_container_audit_log", "get", "/code_containers/{code_container_id}/audit_log", init); + } + + /** Get Audit Log for a Data Credential */ + async get_data_credential_audit_log(init?: OperationInit<"get_data_credential_audit_log">): Promise> { + return this.client.requestOperation("get_data_credential_audit_log", "get", "/data_credentials/{credential_id}/audit_log", init); + } + + /** Get Audit Log for a Data Map */ + async get_data_map_audit_log(init?: OperationInit<"get_data_map_audit_log">): Promise> { + return this.client.requestOperation("get_data_map_audit_log", "get", "/data_maps/{data_map_id}/audit_log", init); + } + + /** Get Audit Log for a Data Schema */ + async get_data_schema_audit_log(init?: OperationInit<"get_data_schema_audit_log">): Promise> { + return this.client.requestOperation("get_data_schema_audit_log", "get", "/data_schemas/{schema_id}/audit_log", init); + } + + /** Get Audit Log for a Data Sink */ + async get_data_sink_audit_log(init?: OperationInit<"get_data_sink_audit_log">): Promise> { + return this.client.requestOperation("get_data_sink_audit_log", "get", "/data_sinks/{sink_id}/audit_log", init); + } + + /** Get Audit Log for a Data Source */ + async get_data_source_audit_log(init?: OperationInit<"get_data_source_audit_log">): Promise> { + return this.client.requestOperation("get_data_source_audit_log", "get", "/data_sources/{source_id}/audit_log", init); + } + + /** Get Audit Log for a Document */ + async get_doc_container_audit_log(init?: OperationInit<"get_doc_container_audit_log">): Promise> { + return this.client.requestOperation("get_doc_container_audit_log", "get", "/doc_containers/{doc_container_id}/audit_log", init); + } + + /** Get Audit Log for a Nexset */ + async get_nexset_audit_log(init?: OperationInit<"get_nexset_audit_log">): Promise> { + return this.client.requestOperation("get_nexset_audit_log", "get", "/data_sets/{set_id}/audit_log", init); + } + + /** Get Audit Log for an Organization */ + async get_org_audit_log(init?: OperationInit<"get_org_audit_log">): Promise> { + return this.client.requestOperation("get_org_audit_log", "get", "/orgs/{org_id}/audit_log", init); + } + + /** Get Audit Log for a Project */ + async get_project_audit_log(init?: OperationInit<"get_project_audit_log">): Promise> { + return this.client.requestOperation("get_project_audit_log", "get", "/projects/{project_id}/audit_log", init); + } + + /** Get Audit Log for a Team */ + async get_team_audit_log(init?: OperationInit<"get_team_audit_log">): Promise> { + return this.client.requestOperation("get_team_audit_log", "get", "/teams/{team_id}/audit_log", init); + } + + /** Get Audit Log for a User */ + async get_user_audit_log(init?: OperationInit<"get_user_audit_log">): Promise> { + return this.client.requestOperation("get_user_audit_log", "get", "/users/{user_id}/audit_log", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/code_containers.ts b/packages/ts-sdk/src/resources/generated/code_containers.ts new file mode 100644 index 0000000..b6fd815 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/code_containers.ts @@ -0,0 +1,66 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class CodeContainersResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_code_containers">): Promise> { + return this.client.requestOperation("get_code_containers", "get", "/code_containers", init); + } + + async create(init?: OperationInit<"create_code_container">): Promise> { + return this.client.requestOperation("create_code_container", "post", "/code_containers", init); + } + + async get(init?: OperationInit<"get_code_container">): Promise> { + return this.client.requestOperation("get_code_container", "get", "/code_containers/{code_container_id}", init); + } + + async update(init?: OperationInit<"update_code_container">): Promise> { + return this.client.requestOperation("update_code_container", "put", "/code_containers/{code_container_id}", init); + } + + async delete(init?: OperationInit<"delete_code_container">): Promise> { + return this.client.requestOperation("delete_code_container", "delete", "/code_containers/{code_container_id}", init); + } + + /** Copy a Code Container */ + async copy_code_container(init?: OperationInit<"copy_code_container">): Promise> { + return this.client.requestOperation("copy_code_container", "post", "/code_containers/{code_container_id}/copy", init); + } + + /** Create a Code Container */ + async create_code_container(init?: OperationInit<"create_code_container">): Promise> { + return this.client.requestOperation("create_code_container", "post", "/code_containers", init); + } + + /** Delete a Code Container */ + async delete_code_container(init?: OperationInit<"delete_code_container">): Promise> { + return this.client.requestOperation("delete_code_container", "delete", "/code_containers/{code_container_id}", init); + } + + /** Get Code Container by ID */ + async get_code_container(init?: OperationInit<"get_code_container">): Promise> { + return this.client.requestOperation("get_code_container", "get", "/code_containers/{code_container_id}", init); + } + + /** Get all Code Containers */ + async get_code_containers(init?: OperationInit<"get_code_containers">): Promise> { + return this.client.requestOperation("get_code_containers", "get", "/code_containers", init); + } + + /** Get all Public Code Containers */ + async get_public_code_containers(init?: OperationInit<"get_public_code_containers">): Promise> { + return this.client.requestOperation("get_public_code_containers", "get", "/code_containers/public", init); + } + + /** Update a Code Container */ + async update_code_container(init?: OperationInit<"update_code_container">): Promise> { + return this.client.requestOperation("update_code_container", "put", "/code_containers/{code_container_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/credentials.ts b/packages/ts-sdk/src/resources/generated/credentials.ts new file mode 100644 index 0000000..ea4b539 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/credentials.ts @@ -0,0 +1,76 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class CredentialsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_data_credentials">): Promise> { + return this.client.requestOperation("get_data_credentials", "get", "/data_credentials", init); + } + + async create(init?: OperationInit<"create_data_credential">): Promise> { + return this.client.requestOperation("create_data_credential", "post", "/data_credentials", init); + } + + async get(init?: OperationInit<"get_data_credential">): Promise> { + return this.client.requestOperation("get_data_credential", "get", "/data_credentials/{credential_id}", init); + } + + async update(init?: OperationInit<"update_data_credential">): Promise> { + return this.client.requestOperation("update_data_credential", "put", "/data_credentials/{credential_id}", init); + } + + async delete(init?: OperationInit<"delete_data_credential">): Promise> { + return this.client.requestOperation("delete_data_credential", "delete", "/data_credentials/{credential_id}", init); + } + + /** Create a Credential */ + async create_data_credential(init?: OperationInit<"create_data_credential">): Promise> { + return this.client.requestOperation("create_data_credential", "post", "/data_credentials", init); + } + + /** Test credential validity */ + async data_credential_probe(init?: OperationInit<"data_credential_probe">): Promise> { + return this.client.requestOperation("data_credential_probe", "get", "/data_credentials/{credential_id}/probe", init); + } + + /** Delete a Credential */ + async delete_data_credential(init?: OperationInit<"delete_data_credential">): Promise> { + return this.client.requestOperation("delete_data_credential", "delete", "/data_credentials/{credential_id}", init); + } + + /** Get Credential by ID */ + async get_data_credential(init?: OperationInit<"get_data_credential">): Promise> { + return this.client.requestOperation("get_data_credential", "get", "/data_credentials/{credential_id}", init); + } + + /** Get Credential by ID with expanded references */ + async get_data_credential_expanded(init?: OperationInit<"get_data_credential_expanded">): Promise> { + return this.client.requestOperation("get_data_credential_expanded", "get", "/data_credentials/{credential_id}?expand=1", init); + } + + /** Get All Credentials */ + async get_data_credentials(init?: OperationInit<"get_data_credentials">): Promise> { + return this.client.requestOperation("get_data_credentials", "get", "/data_credentials", init); + } + + /** Preview Connector Content */ + async preview_connector_content(init?: OperationInit<"preview_connector_content">): Promise> { + return this.client.requestOperation("preview_connector_content", "post", "/data_credentials/{credential_id}/probe/sample", init); + } + + /** Preview Storage Structure */ + async preview_storage_structure(init?: OperationInit<"preview_storage_structure">): Promise> { + return this.client.requestOperation("preview_storage_structure", "post", "/data_credentials/{credential_id}/probe/tree", init); + } + + /** Update Credential */ + async update_data_credential(init?: OperationInit<"update_data_credential">): Promise> { + return this.client.requestOperation("update_data_credential", "put", "/data_credentials/{credential_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/destinations.ts b/packages/ts-sdk/src/resources/generated/destinations.ts new file mode 100644 index 0000000..a79162e --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/destinations.ts @@ -0,0 +1,76 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class DestinationsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_data_sinks">): Promise> { + return this.client.requestOperation("get_data_sinks", "get", "/data_sinks", init); + } + + async create(init?: OperationInit<"create_data_sink">): Promise> { + return this.client.requestOperation("create_data_sink", "post", "/data_sinks", init); + } + + async get(init?: OperationInit<"get_data_sink">): Promise> { + return this.client.requestOperation("get_data_sink", "get", "/data_sinks/{sink_id}", init); + } + + async update(init?: OperationInit<"update_data_sink">): Promise> { + return this.client.requestOperation("update_data_sink", "put", "/data_sinks/{sink_id}", init); + } + + async delete(init?: OperationInit<"delete_data_sink">): Promise> { + return this.client.requestOperation("delete_data_sink", "delete", "/data_sinks/{sink_id}", init); + } + + /** Activate a Sink */ + async activate_data_sink(init?: OperationInit<"activate_data_sink">): Promise> { + return this.client.requestOperation("activate_data_sink", "put", "/data_sinks/{sink_id}/activate", init); + } + + /** Copy a Sink */ + async copy_data_sink_source(init?: OperationInit<"copy_data_sink_source">): Promise> { + return this.client.requestOperation("copy_data_sink_source", "post", "/data_sinks/{sink_id}/copy", init); + } + + /** Create a Sink */ + async create_data_sink(init?: OperationInit<"create_data_sink">): Promise> { + return this.client.requestOperation("create_data_sink", "post", "/data_sinks", init); + } + + /** Delete a Sink */ + async delete_data_sink(init?: OperationInit<"delete_data_sink">): Promise> { + return this.client.requestOperation("delete_data_sink", "delete", "/data_sinks/{sink_id}", init); + } + + /** Get Sink by ID */ + async get_data_sink(init?: OperationInit<"get_data_sink">): Promise> { + return this.client.requestOperation("get_data_sink", "get", "/data_sinks/{sink_id}", init); + } + + /** Get Sink by ID with Expanded References */ + async get_data_sink_expanded(init?: OperationInit<"get_data_sink_expanded">): Promise> { + return this.client.requestOperation("get_data_sink_expanded", "get", "/data_sinks/{sink_id}?expand=1", init); + } + + /** Get All Sinks */ + async get_data_sinks(init?: OperationInit<"get_data_sinks">): Promise> { + return this.client.requestOperation("get_data_sinks", "get", "/data_sinks", init); + } + + /** Pause a Sink */ + async pause_data_sink(init?: OperationInit<"pause_data_sink">): Promise> { + return this.client.requestOperation("pause_data_sink", "put", "/data_sinks/{sink_id}/pause", init); + } + + /** Update Sink */ + async update_data_sink(init?: OperationInit<"update_data_sink">): Promise> { + return this.client.requestOperation("update_data_sink", "put", "/data_sinks/{sink_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/flows.ts b/packages/ts-sdk/src/resources/generated/flows.ts new file mode 100644 index 0000000..32b7f21 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/flows.ts @@ -0,0 +1,78 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class FlowsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_flows">): Promise> { + return this.client.requestOperation("get_flows", "get", "/flows", init); + } + + async get(init?: OperationInit<"get_flow_by_id">): Promise> { + return this.client.requestOperation("get_flow_by_id", "get", "/flows/{flow_id}", init); + } + + async delete(init?: OperationInit<"delete_flow">): Promise> { + return this.client.requestOperation("delete_flow", "delete", "/flows/{flow_id}", init); + } + + /** Delete a Flow */ + async delete_flow(init?: OperationInit<"delete_flow">): Promise> { + return this.client.requestOperation("delete_flow", "delete", "/flows/{flow_id}", init); + } + + /** Delete a Flow (by Resource ID) */ + async delete_flow_by_resource_id(init?: OperationInit<"delete_flow_by_resource_id">): Promise> { + return this.client.requestOperation("delete_flow_by_resource_id", "delete", "/{resource_type}/{resource_id}/flow", init); + } + + /** Activate a Flow */ + async flow_activate_with_flow_id(init?: OperationInit<"flow_activate_with_flow_id">): Promise> { + return this.client.requestOperation("flow_activate_with_flow_id", "put", "/flows/{flow_id}/activate", init); + } + + /** Activate a Flow (with Resource ID) */ + async flow_activate_with_resource_id(init?: OperationInit<"flow_activate_with_resource_id">): Promise> { + return this.client.requestOperation("flow_activate_with_resource_id", "put", "/{resource_type}/{resource_id}/activate", init); + } + + /** Copy a Flow */ + async flow_copy_with_flow_id(init?: OperationInit<"flow_copy_with_flow_id">): Promise> { + return this.client.requestOperation("flow_copy_with_flow_id", "post", "/flows/{flow_id}/copy", init); + } + + /** Generate an AI suggestion for flow documentation */ + async flow_docs_recommendation(init?: OperationInit<"flow_docs_recommendation">): Promise> { + return this.client.requestOperation("flow_docs_recommendation", "post", "/flows/{flow_id}/docs/recommendation", init); + } + + /** Pause a Flow */ + async flow_pause_with_flow_id(init?: OperationInit<"flow_pause_with_flow_id">): Promise> { + return this.client.requestOperation("flow_pause_with_flow_id", "put", "/flows/{flow_id}/pause", init); + } + + /** Pause a Flow (with Resource ID) */ + async flow_pause_with_resource_id(init?: OperationInit<"flow_pause_with_resource_id">): Promise> { + return this.client.requestOperation("flow_pause_with_resource_id", "put", "/{resource_type}/{resource_id}/pause", init); + } + + /** Get Flow by ID */ + async get_flow_by_id(init?: OperationInit<"get_flow_by_id">): Promise> { + return this.client.requestOperation("get_flow_by_id", "get", "/flows/{flow_id}", init); + } + + /** Get Flow (by Resource ID) */ + async get_flow_by_resource_id(init?: OperationInit<"get_flow_by_resource_id">): Promise> { + return this.client.requestOperation("get_flow_by_resource_id", "get", "/{resource_type}/{resource_id}/flow", init); + } + + /** Get All Flows */ + async get_flows(init?: OperationInit<"get_flows">): Promise> { + return this.client.requestOperation("get_flows", "get", "/flows", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/genai.ts b/packages/ts-sdk/src/resources/generated/genai.ts new file mode 100644 index 0000000..13235b1 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/genai.ts @@ -0,0 +1,61 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class GenaiResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + /** Create a GenAI config */ + async create_gen_ai_config(init?: OperationInit<"create_gen_ai_config">): Promise> { + return this.client.requestOperation("create_gen_ai_config", "post", "/gen_ai_integration_configs", init); + } + + /** Create a binding of GenAI config for the org for specific usage. */ + async create_gen_ai_org_setting(init?: OperationInit<"create_gen_ai_org_setting">): Promise> { + return this.client.requestOperation("create_gen_ai_org_setting", "post", "/gen_ai_org_settings", init); + } + + /** Delete GenAI Integration Config */ + async delete_gen_ai_integration_config(init?: OperationInit<"delete_gen_ai_integration_config">): Promise> { + return this.client.requestOperation("delete_gen_ai_integration_config", "delete", "/gen_ai_integration_configs/{gen_ai_config_id}", init); + } + + /** Delete GenAI Config binding for org. */ + async delete_gen_ai_org_setting(init?: OperationInit<"delete_gen_ai_org_setting">): Promise> { + return this.client.requestOperation("delete_gen_ai_org_setting", "delete", "/gen_ai_org_settings/{gen_ai_org_setting_id}", init); + } + + /** Shows active GenAI Configuration for specific usage */ + async gen_ai_org_settings_show_active(init?: OperationInit<"gen_ai_org_settings_show_active">): Promise> { + return this.client.requestOperation("gen_ai_org_settings_show_active", "get", "/gen_ai_org_settings/active_config", init); + } + + /** Get all GenAI configs in org */ + async get_gen_ai_configs(init?: OperationInit<"get_gen_ai_configs">): Promise> { + return this.client.requestOperation("get_gen_ai_configs", "get", "/gen_ai_integration_configs", init); + } + + /** Get GenAI Integration Config */ + async get_gen_ai_integration_config(init?: OperationInit<"get_gen_ai_integration_config">): Promise> { + return this.client.requestOperation("get_gen_ai_integration_config", "get", "/gen_ai_integration_configs/{gen_ai_config_id}", init); + } + + /** Get Org GenAI binding */ + async get_gen_ai_org_setting(init?: OperationInit<"get_gen_ai_org_setting">): Promise> { + return this.client.requestOperation("get_gen_ai_org_setting", "get", "/gen_ai_org_settings/{gen_ai_org_setting_id}", init); + } + + /** Get all bindings of GenAI configs of the org for specified usages. */ + async get_gen_ai_org_settings(init?: OperationInit<"get_gen_ai_org_settings">): Promise> { + return this.client.requestOperation("get_gen_ai_org_settings", "get", "/gen_ai_org_settings", init); + } + + /** Update GenAI Integration Config */ + async update_gen_ai_integration_config(init?: OperationInit<"update_gen_ai_integration_config">): Promise> { + return this.client.requestOperation("update_gen_ai_integration_config", "put", "/gen_ai_integration_configs/{gen_ai_config_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/index.ts b/packages/ts-sdk/src/resources/generated/index.ts new file mode 100644 index 0000000..2093c81 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/index.ts @@ -0,0 +1,120 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import { AccessControlResource } from "./access_control.js"; +import { ApprovalRequestsResource } from "./approval_requests.js"; +import { AsyncTasksResource } from "./async_tasks.js"; +import { AuditLogsResource } from "./audit_logs.js"; +import { CodeContainersResource } from "./code_containers.js"; +import { CredentialsResource } from "./credentials.js"; +import { DestinationsResource } from "./destinations.js"; +import { FlowsResource } from "./flows.js"; +import { GenaiResource } from "./genai.js"; +import { LimitsResource } from "./limits.js"; +import { LookupsResource } from "./lookups.js"; +import { MarketplaceResource } from "./marketplace.js"; +import { MetricsResource } from "./metrics.js"; +import { NexsetsResource } from "./nexsets.js"; +import { NotificationsResource } from "./notifications.js"; +import { OrgAuthConfigsResource } from "./org_auth_configs.js"; +import { OrganizationsResource } from "./organizations.js"; +import { ProjectsResource } from "./projects.js"; +import { QuarantineSettingsResource } from "./quarantine_settings.js"; +import { RuntimesResource } from "./runtimes.js"; +import { SelfSignupResource } from "./self_signup.js"; +import { SelfSignupAdminResource } from "./self_signup_admin.js"; +import { SourcesResource } from "./sources.js"; +import { TeamsResource } from "./teams.js"; +import { TokensResource } from "./tokens.js"; +import { TransformsResource } from "./transforms.js"; +import { UserSettingsResource } from "./user_settings.js"; +import { UsersResource } from "./users.js"; + +export { AccessControlResource }; +export { ApprovalRequestsResource }; +export { AsyncTasksResource }; +export { AuditLogsResource }; +export { CodeContainersResource }; +export { CredentialsResource }; +export { DestinationsResource }; +export { FlowsResource }; +export { GenaiResource }; +export { LimitsResource }; +export { LookupsResource }; +export { MarketplaceResource }; +export { MetricsResource }; +export { NexsetsResource }; +export { NotificationsResource }; +export { OrgAuthConfigsResource }; +export { OrganizationsResource }; +export { ProjectsResource }; +export { QuarantineSettingsResource }; +export { RuntimesResource }; +export { SelfSignupResource }; +export { SelfSignupAdminResource }; +export { SourcesResource }; +export { TeamsResource }; +export { TokensResource }; +export { TransformsResource }; +export { UserSettingsResource }; +export { UsersResource }; + +export interface GeneratedResourceClients { + access_control: AccessControlResource; + approval_requests: ApprovalRequestsResource; + async_tasks: AsyncTasksResource; + audit_logs: AuditLogsResource; + code_containers: CodeContainersResource; + credentials: CredentialsResource; + destinations: DestinationsResource; + flows: FlowsResource; + genai: GenaiResource; + limits: LimitsResource; + lookups: LookupsResource; + marketplace: MarketplaceResource; + metrics: MetricsResource; + nexsets: NexsetsResource; + notifications: NotificationsResource; + org_auth_configs: OrgAuthConfigsResource; + organizations: OrganizationsResource; + projects: ProjectsResource; + quarantine_settings: QuarantineSettingsResource; + runtimes: RuntimesResource; + self_signup: SelfSignupResource; + self_signup_admin: SelfSignupAdminResource; + sources: SourcesResource; + teams: TeamsResource; + tokens: TokensResource; + transforms: TransformsResource; + user_settings: UserSettingsResource; + users: UsersResource; +} + +export const createGeneratedResources = (client: NexlaClient): GeneratedResourceClients => ({ + access_control: new AccessControlResource(client), + approval_requests: new ApprovalRequestsResource(client), + async_tasks: new AsyncTasksResource(client), + audit_logs: new AuditLogsResource(client), + code_containers: new CodeContainersResource(client), + credentials: new CredentialsResource(client), + destinations: new DestinationsResource(client), + flows: new FlowsResource(client), + genai: new GenaiResource(client), + limits: new LimitsResource(client), + lookups: new LookupsResource(client), + marketplace: new MarketplaceResource(client), + metrics: new MetricsResource(client), + nexsets: new NexsetsResource(client), + notifications: new NotificationsResource(client), + org_auth_configs: new OrgAuthConfigsResource(client), + organizations: new OrganizationsResource(client), + projects: new ProjectsResource(client), + quarantine_settings: new QuarantineSettingsResource(client), + runtimes: new RuntimesResource(client), + self_signup: new SelfSignupResource(client), + self_signup_admin: new SelfSignupAdminResource(client), + sources: new SourcesResource(client), + teams: new TeamsResource(client), + tokens: new TokensResource(client), + transforms: new TransformsResource(client), + user_settings: new UserSettingsResource(client), + users: new UsersResource(client), +}); \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/limits.ts b/packages/ts-sdk/src/resources/generated/limits.ts new file mode 100644 index 0000000..898464e --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/limits.ts @@ -0,0 +1,16 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class LimitsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + /** Get current rate limit and usage */ + async limits(init?: OperationInit<"limits">): Promise> { + return this.client.requestOperation("limits", "get", "/limits", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/lookups.ts b/packages/ts-sdk/src/resources/generated/lookups.ts new file mode 100644 index 0000000..039f450 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/lookups.ts @@ -0,0 +1,71 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class LookupsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_data_maps">): Promise> { + return this.client.requestOperation("get_data_maps", "get", "/data_maps", init); + } + + async create(init?: OperationInit<"create_static_data_map">): Promise> { + return this.client.requestOperation("create_static_data_map", "post", "/data_maps", init); + } + + async get(init?: OperationInit<"get_data_map">): Promise> { + return this.client.requestOperation("get_data_map", "get", "/data_maps/{data_map_id}", init); + } + + async update(init?: OperationInit<"update_data_map_metadata">): Promise> { + return this.client.requestOperation("update_data_map_metadata", "put", "/data_maps/{data_map_id}", init); + } + + async delete(init?: OperationInit<"delete_data_map">): Promise> { + return this.client.requestOperation("delete_data_map", "delete", "/data_maps/{data_map_id}", init); + } + + /** Check Data Map Entries */ + async check_data_map_entries(init?: OperationInit<"check_data_map_entries">): Promise> { + return this.client.requestOperation("check_data_map_entries", "get", "/data_maps/{data_map_id}/entries/{entry_keys}", init); + } + + /** Create a Static Data Map */ + async create_static_data_map(init?: OperationInit<"create_static_data_map">): Promise> { + return this.client.requestOperation("create_static_data_map", "post", "/data_maps", init); + } + + /** Delete a Data Map */ + async delete_data_map(init?: OperationInit<"delete_data_map">): Promise> { + return this.client.requestOperation("delete_data_map", "delete", "/data_maps/{data_map_id}", init); + } + + /** Delete Data Map Entries */ + async delete_data_map_entries(init?: OperationInit<"delete_data_map_entries">): Promise> { + return this.client.requestOperation("delete_data_map_entries", "delete", "/data_maps/{data_map_id}/entries/{entry_keys}", init); + } + + /** Get Data Map by ID */ + async get_data_map(init?: OperationInit<"get_data_map">): Promise> { + return this.client.requestOperation("get_data_map", "get", "/data_maps/{data_map_id}", init); + } + + /** Get all Data Maps */ + async get_data_maps(init?: OperationInit<"get_data_maps">): Promise> { + return this.client.requestOperation("get_data_maps", "get", "/data_maps", init); + } + + /** Update Data Map Metadata */ + async update_data_map_metadata(init?: OperationInit<"update_data_map_metadata">): Promise> { + return this.client.requestOperation("update_data_map_metadata", "put", "/data_maps/{data_map_id}", init); + } + + /** Upsert Static Data Map Entries */ + async upsert_data_map_entries(init?: OperationInit<"upsert_data_map_entries">): Promise> { + return this.client.requestOperation("upsert_data_map_entries", "put", "/data_maps/{data_map_id}/entries", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/marketplace.ts b/packages/ts-sdk/src/resources/generated/marketplace.ts new file mode 100644 index 0000000..4b97f4a --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/marketplace.ts @@ -0,0 +1,76 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class MarketplaceResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + /** Add custodians to a marketplace domain. */ + async add_domain_custodians(init?: OperationInit<"add_domain_custodians">): Promise> { + return this.client.requestOperation("add_domain_custodians", "post", "/marketplace/domains/{domain_id}/custodians", init); + } + + /** Create a single marketplace domain. */ + async create_domain(init?: OperationInit<"create_domain">): Promise> { + return this.client.requestOperation("create_domain", "post", "/marketplace/domains/{domain_id}", init); + } + + /** Create a marketplace item for a domain. */ + async create_domain_item(init?: OperationInit<"create_domain_item">): Promise> { + return this.client.requestOperation("create_domain_item", "post", "/marketplace/domains/{domain_id}/items", init); + } + + /** Create marketplace domains. */ + async create_domains(init?: OperationInit<"create_domains">): Promise> { + return this.client.requestOperation("create_domains", "post", "/marketplace/domains", init); + } + + /** Delete a single marketplace domain. */ + async delete_domain(init?: OperationInit<"delete_domain">): Promise> { + return this.client.requestOperation("delete_domain", "delete", "/marketplace/domains/{domain_id}", init); + } + + /** Get a single marketplace domain. */ + async get_domain(init?: OperationInit<"get_domain">): Promise> { + return this.client.requestOperation("get_domain", "get", "/marketplace/domains/{domain_id}", init); + } + + /** Get custodians for a marketplace domain. */ + async get_domain_custodians(init?: OperationInit<"get_domain_custodians">): Promise> { + return this.client.requestOperation("get_domain_custodians", "get", "/marketplace/domains/{domain_id}/custodians", init); + } + + /** Get marketplace items for a domain. */ + async get_domain_items(init?: OperationInit<"get_domain_items">): Promise> { + return this.client.requestOperation("get_domain_items", "get", "/marketplace/domains/{domain_id}/items", init); + } + + /** Get marketplace domains. */ + async get_domains(init?: OperationInit<"get_domains">): Promise> { + return this.client.requestOperation("get_domains", "get", "/marketplace/domains", init); + } + + /** Get marketplace domains for organization. */ + async get_domains_for_org(init?: OperationInit<"get_domains_for_org">): Promise> { + return this.client.requestOperation("get_domains_for_org", "get", "/marketplace/domains/for_org", init); + } + + /** Remove custodians from a marketplace domain. */ + async remove_domain_custodians(init?: OperationInit<"remove_domain_custodians">): Promise> { + return this.client.requestOperation("remove_domain_custodians", "delete", "/marketplace/domains/{domain_id}/custodians", init); + } + + /** Update a single marketplace domain. */ + async update_domain(init?: OperationInit<"update_domain">): Promise> { + return this.client.requestOperation("update_domain", "put", "/marketplace/domains/{domain_id}", init); + } + + /** Update custodians for a marketplace domain. */ + async update_domain_custodians(init?: OperationInit<"update_domain_custodians">): Promise> { + return this.client.requestOperation("update_domain_custodians", "put", "/marketplace/domains/{domain_id}/custodians", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/metrics.ts b/packages/ts-sdk/src/resources/generated/metrics.ts new file mode 100644 index 0000000..1546add --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/metrics.ts @@ -0,0 +1,51 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class MetricsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + /** Get Flow Execution Logs for Run ID of a Flow */ + async get_flow_logs_for_run_id(init?: OperationInit<"get_flow_logs_for_run_id">): Promise> { + return this.client.requestOperation("get_flow_logs_for_run_id", "get", "/data_flows/{resource_type}/{resource_id}/logs", init); + } + + /** Get Metrics for a Flow */ + async get_flow_metrics(init?: OperationInit<"get_flow_metrics">): Promise> { + return this.client.requestOperation("get_flow_metrics", "get", "/data_flows/{resource_type}/{resource_id}/metrics", init); + } + + /** Get Metrics By Run ID for a Resource of a Flow */ + async get_resource_metrics_by_run(init?: OperationInit<"get_resource_metrics_by_run">): Promise> { + return this.client.requestOperation("get_resource_metrics_by_run", "get", "/{resource_type}/{resource_id}/metrics/run_summary", init); + } + + /** Get Daily Metrics for a Resource of a Flow */ + async get_resource_metrics_daily(init?: OperationInit<"get_resource_metrics_daily">): Promise> { + return this.client.requestOperation("get_resource_metrics_daily", "get", "/{resource_type}/{resource_id}/metrics", init); + } + + /** Get Total Account Metrics for An Organization */ + async org_account_metrics_total(init?: OperationInit<"org_account_metrics_total">): Promise> { + return this.client.requestOperation("org_account_metrics_total", "get", "/orgs/{org_id}/flows/account_metrics", init); + } + + /** Get 24 Hour Flow Stats for a User */ + async user_24_hour_flow_stats(init?: OperationInit<"user_24_hour_flow_stats">): Promise> { + return this.client.requestOperation("user_24_hour_flow_stats", "get", "/users/{user_id}/flows/dashboard", init); + } + + /** Get Total Account Metrics for a User */ + async user_account_metrics_total(init?: OperationInit<"user_account_metrics_total">): Promise> { + return this.client.requestOperation("user_account_metrics_total", "get", "/users/{user_id}/flows/account_metrics", init); + } + + /** Get Daily Data Processing Metrics for a User */ + async user_metrics_daily(init?: OperationInit<"user_metrics_daily">): Promise> { + return this.client.requestOperation("user_metrics_daily", "get", "/users/{user_id}/metrics", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/nexsets.ts b/packages/ts-sdk/src/resources/generated/nexsets.ts new file mode 100644 index 0000000..12a30e3 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/nexsets.ts @@ -0,0 +1,81 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class NexsetsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_nexsets">): Promise> { + return this.client.requestOperation("get_nexsets", "get", "/data_sets", init); + } + + async create(init?: OperationInit<"create_nexset">): Promise> { + return this.client.requestOperation("create_nexset", "post", "/data_sets", init); + } + + async get(init?: OperationInit<"get_nexset">): Promise> { + return this.client.requestOperation("get_nexset", "get", "/data_sets/{set_id}", init); + } + + async update(init?: OperationInit<"update_nexset">): Promise> { + return this.client.requestOperation("update_nexset", "put", "/data_sets/{set_id}", init); + } + + async delete(init?: OperationInit<"delete_nexset">): Promise> { + return this.client.requestOperation("delete_nexset", "delete", "/data_sets/{set_id}", init); + } + + /** Activate Nexset */ + async activate_nexset(init?: OperationInit<"activate_nexset">): Promise> { + return this.client.requestOperation("activate_nexset", "put", "/data_sets/{set_id}/activate", init); + } + + /** Copy Nexset */ + async copy_nexset(init?: OperationInit<"copy_nexset">): Promise> { + return this.client.requestOperation("copy_nexset", "post", "/data_sets/{set_id}/copy", init); + } + + /** Create a Nexset */ + async create_nexset(init?: OperationInit<"create_nexset">): Promise> { + return this.client.requestOperation("create_nexset", "post", "/data_sets", init); + } + + /** Generate an AI suggestion for Nexset documentation */ + async data_set_docs_recommendation(init?: OperationInit<"data_set_docs_recommendation">): Promise> { + return this.client.requestOperation("data_set_docs_recommendation", "post", "/data_sets/{data_set_id}/docs/recommendation", init); + } + + /** Delete a Nexset */ + async delete_nexset(init?: OperationInit<"delete_nexset">): Promise> { + return this.client.requestOperation("delete_nexset", "delete", "/data_sets/{set_id}", init); + } + + /** Get a Nexset */ + async get_nexset(init?: OperationInit<"get_nexset">): Promise> { + return this.client.requestOperation("get_nexset", "get", "/data_sets/{set_id}", init); + } + + /** Get Nexset Samples */ + async get_nexset_samples(init?: OperationInit<"get_nexset_samples">): Promise> { + return this.client.requestOperation("get_nexset_samples", "get", "/data_sets/{set_id}/samples", init); + } + + /** Get All Nexsets */ + async get_nexsets(init?: OperationInit<"get_nexsets">): Promise> { + return this.client.requestOperation("get_nexsets", "get", "/data_sets", init); + } + + /** Pause Nexset */ + async pause_nexset(init?: OperationInit<"pause_nexset">): Promise> { + return this.client.requestOperation("pause_nexset", "put", "/data_sets/{set_id}/pause", init); + } + + /** Update a Nexset */ + async update_nexset(init?: OperationInit<"update_nexset">): Promise> { + return this.client.requestOperation("update_nexset", "put", "/data_sets/{set_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/notifications.ts b/packages/ts-sdk/src/resources/generated/notifications.ts new file mode 100644 index 0000000..2b26f9b --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/notifications.ts @@ -0,0 +1,128 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class NotificationsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_notifications">): Promise> { + return this.client.requestOperation("get_notifications", "get", "/notifications", init); + } + + async get(init?: OperationInit<"get_notification">): Promise> { + return this.client.requestOperation("get_notification", "get", "/notifications/{notification_id}", init); + } + + async delete(init?: OperationInit<"delete_notifications">): Promise> { + return this.client.requestOperation("delete_notifications", "delete", "/notifications/{notification_id}", init); + } + + /** Create a Notification Channel Setting */ + async create_notification_channel_setting(init?: OperationInit<"create_notification_channel_setting">): Promise> { + return this.client.requestOperation("create_notification_channel_setting", "post", "/notification_channel_settings", init); + } + + /** Create a Notification Setting */ + async create_notification_setting(init?: OperationInit<"create_notification_setting">): Promise> { + return this.client.requestOperation("create_notification_setting", "post", "/notification_settings", init); + } + + /** Delete All Notifications */ + async delete_all_notifications(init?: OperationInit<"delete_all_notifications">): Promise> { + return this.client.requestOperation("delete_all_notifications", "delete", "/notifications/all", init); + } + + /** Delete a Notification Channel Setting */ + async delete_notification_channel_setting(init?: OperationInit<"delete_notification_channel_setting">): Promise> { + return this.client.requestOperation("delete_notification_channel_setting", "delete", "/notification_channel_settings/{notification_channel_setting_id}", init); + } + + /** Delete a Notification Setting */ + async delete_notification_setting(init?: OperationInit<"delete_notification_setting">): Promise> { + return this.client.requestOperation("delete_notification_setting", "delete", "/notification_settings/{notification_setting_id}", init); + } + + /** Delete a Notification */ + async delete_notifications(init?: OperationInit<"delete_notifications">): Promise> { + return this.client.requestOperation("delete_notifications", "delete", "/notifications/{notification_id}", init); + } + + /** Get a Notification */ + async get_notification(init?: OperationInit<"get_notification">): Promise> { + return this.client.requestOperation("get_notification", "get", "/notifications/{notification_id}", init); + } + + /** Get a Notification Channel Setting */ + async get_notification_channel_setting(init?: OperationInit<"get_notification_channel_setting">): Promise> { + return this.client.requestOperation("get_notification_channel_setting", "get", "/notification_channel_settings/{notification_channel_setting_id}", init); + } + + /** Get Notifications Count */ + async get_notification_count(init?: OperationInit<"get_notification_count">): Promise> { + return this.client.requestOperation("get_notification_count", "get", "/notifications/count", init); + } + + /** Get a Notification Setting */ + async get_notification_setting(init?: OperationInit<"get_notification_setting">): Promise> { + return this.client.requestOperation("get_notification_setting", "get", "/notification_settings/{notification_setting_id}", init); + } + + /** Get All Notification Types */ + async get_notification_types(init?: OperationInit<"get_notification_types">): Promise> { + return this.client.requestOperation("get_notification_types", "get", "/notification_types", init); + } + + /** Get All Notifications */ + async get_notifications(init?: OperationInit<"get_notifications">): Promise> { + return this.client.requestOperation("get_notifications", "get", "/notifications", init); + } + + /** List Notification Channel Settings */ + async list_notification_channel_settings(init?: OperationInit<"list_notification_channel_settings">): Promise> { + return this.client.requestOperation("list_notification_channel_settings", "get", "/notification_channel_settings", init); + } + + /** List Notification Settings */ + async list_notification_settings(init?: OperationInit<"list_notification_settings">): Promise> { + return this.client.requestOperation("list_notification_settings", "get", "/notification_settings", init); + } + + /** Get Notification Settings for an Event */ + async list_notification_settings_by_type(init?: OperationInit<"list_notification_settings_by_type">): Promise> { + return this.client.requestOperation("list_notification_settings_by_type", "get", "/notification_settings/notification_types/{notification_type_id}", init); + } + + /** Get One Notification Type */ + async list_notification_type(init?: OperationInit<"list_notification_type">): Promise> { + return this.client.requestOperation("list_notification_type", "get", "/notification_types/list", init); + } + + /** Get Notification Settings For a Resource */ + async list_resource_notification_settings(init?: OperationInit<"list_resource_notification_settings">): Promise> { + return this.client.requestOperation("list_resource_notification_settings", "get", "/notification_settings/{resource_type}/{resource_id}", init); + } + + /** Mark Notification Read */ + async notifications_mark_read(init?: OperationInit<"notifications_mark_read">): Promise> { + return this.client.requestOperation("notifications_mark_read", "put", "/notifications/mark_read", init); + } + + /** Mark Notification Unread */ + async notifications_mark_unread(init?: OperationInit<"notifications_mark_unread">): Promise> { + return this.client.requestOperation("notifications_mark_unread", "put", "/notifications/mark_unread", init); + } + + /** Update a Notification Channel Setting */ + async update_notification_channel_setting(init?: OperationInit<"update_notification_channel_setting">): Promise> { + return this.client.requestOperation("update_notification_channel_setting", "put", "/notification_channel_settings/{notification_channel_setting_id}", init); + } + + /** Modify a Notification Setting */ + async update_notification_setting(init?: OperationInit<"update_notification_setting">): Promise> { + return this.client.requestOperation("update_notification_setting", "put", "/notification_settings/{notification_setting_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/org_auth_configs.ts b/packages/ts-sdk/src/resources/generated/org_auth_configs.ts new file mode 100644 index 0000000..4bbe5ed --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/org_auth_configs.ts @@ -0,0 +1,61 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class OrgAuthConfigsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_api_auth_configs">): Promise> { + return this.client.requestOperation("get_api_auth_configs", "get", "/api_auth_configs", init); + } + + async create(init?: OperationInit<"create_api_auth_config">): Promise> { + return this.client.requestOperation("create_api_auth_config", "post", "/api_auth_configs", init); + } + + async get(init?: OperationInit<"get_api_auth_configs">): Promise> { + return this.client.requestOperation("get_api_auth_configs", "get", "/api_auth_configs/{auth_config_id}", init); + } + + async update(init?: OperationInit<"update_api_auth_config">): Promise> { + return this.client.requestOperation("update_api_auth_config", "put", "/api_auth_configs/{auth_config_id}", init); + } + + async delete(init?: OperationInit<"delete_api_auth_config">): Promise> { + return this.client.requestOperation("delete_api_auth_config", "delete", "/api_auth_configs/{auth_config_id}", init); + } + + /** Create auth config. */ + async create_api_auth_config(init?: OperationInit<"create_api_auth_config">): Promise> { + return this.client.requestOperation("create_api_auth_config", "post", "/api_auth_configs", init); + } + + /** Delete auth config. */ + async delete_api_auth_config(init?: OperationInit<"delete_api_auth_config">): Promise> { + return this.client.requestOperation("delete_api_auth_config", "delete", "/api_auth_configs/{auth_config_id}", init); + } + + /** Get all auth configs. */ + async get_all_api_auth_configs(init?: OperationInit<"get_all_api_auth_configs">): Promise> { + return this.client.requestOperation("get_all_api_auth_configs", "get", "/api_auth_configs/all", init); + } + + /** Get auth configs. */ + async get_api_auth_configs(init?: OperationInit<"get_api_auth_configs">): Promise> { + return this.client.requestOperation("get_api_auth_configs", "get", "/api_auth_configs", init); + } + + /** Get auth settings for org. */ + async get_api_auth_settings(init?: OperationInit<"get_api_auth_settings">): Promise> { + return this.client.requestOperation("get_api_auth_settings", "get", "/orgs/{org_id}/auth_settings", init); + } + + /** Update auth config. */ + async update_api_auth_config(init?: OperationInit<"update_api_auth_config">): Promise> { + return this.client.requestOperation("update_api_auth_config", "put", "/api_auth_configs/{auth_config_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/organizations.ts b/packages/ts-sdk/src/resources/generated/organizations.ts new file mode 100644 index 0000000..ab3117f --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/organizations.ts @@ -0,0 +1,73 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class OrganizationsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_orgs">): Promise> { + return this.client.requestOperation("get_orgs", "get", "/orgs", init); + } + + async get(init?: OperationInit<"get_org">): Promise> { + return this.client.requestOperation("get_org", "get", "/orgs/{org_id}", init); + } + + async update(init?: OperationInit<"update_org">): Promise> { + return this.client.requestOperation("update_org", "put", "/orgs/{org_id}", init); + } + + /** Add organization custodians. */ + async add_org_custodians(init?: OperationInit<"add_org_custodians">): Promise> { + return this.client.requestOperation("add_org_custodians", "post", "/orgs/{org_id}/custodians", init); + } + + /** Remove Members from an Organization. */ + async delete_org_members(init?: OperationInit<"delete_org_members">): Promise> { + return this.client.requestOperation("delete_org_members", "delete", "/orgs/{org_id}/members", init); + } + + /** Get Organization by ID */ + async get_org(init?: OperationInit<"get_org">): Promise> { + return this.client.requestOperation("get_org", "get", "/orgs/{org_id}", init); + } + + /** Get organization custodians. */ + async get_org_custodians(init?: OperationInit<"get_org_custodians">): Promise> { + return this.client.requestOperation("get_org_custodians", "get", "/orgs/{org_id}/custodians", init); + } + + /** Get All Members in Organization */ + async get_org_members(init?: OperationInit<"get_org_members">): Promise> { + return this.client.requestOperation("get_org_members", "get", "/orgs/{org_id}/members", init); + } + + /** Get all Organizations */ + async get_orgs(init?: OperationInit<"get_orgs">): Promise> { + return this.client.requestOperation("get_orgs", "get", "/orgs", init); + } + + /** Remove organization custodians. */ + async remove_org_custodians(init?: OperationInit<"remove_org_custodians">): Promise> { + return this.client.requestOperation("remove_org_custodians", "delete", "/orgs/{org_id}/custodians", init); + } + + /** Update an Organization */ + async update_org(init?: OperationInit<"update_org">): Promise> { + return this.client.requestOperation("update_org", "put", "/orgs/{org_id}", init); + } + + /** Update organization custodians. */ + async update_org_custodians(init?: OperationInit<"update_org_custodians">): Promise> { + return this.client.requestOperation("update_org_custodians", "put", "/orgs/{org_id}/custodians", init); + } + + /** Update Organization Members */ + async update_org_members(init?: OperationInit<"update_org_members">): Promise> { + return this.client.requestOperation("update_org_members", "put", "/orgs/{org_id}/members", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/projects.ts b/packages/ts-sdk/src/resources/generated/projects.ts new file mode 100644 index 0000000..9d5623d --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/projects.ts @@ -0,0 +1,96 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class ProjectsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_projects">): Promise> { + return this.client.requestOperation("get_projects", "get", "/projects", init); + } + + async create(init?: OperationInit<"create_project">): Promise> { + return this.client.requestOperation("create_project", "post", "/projects", init); + } + + async get(init?: OperationInit<"get_project">): Promise> { + return this.client.requestOperation("get_project", "get", "/projects/{project_id}", init); + } + + async update(init?: OperationInit<"update_project">): Promise> { + return this.client.requestOperation("update_project", "put", "/projects/{project_id}", init); + } + + async delete(init?: OperationInit<"delete_project">): Promise> { + return this.client.requestOperation("delete_project", "delete", "/projects/{project_id}", init); + } + + /** Add Flows to Project */ + async add_project_flows(init?: OperationInit<"add_project_flows">): Promise> { + return this.client.requestOperation("add_project_flows", "put", "/projects/{project_id}/flows", init); + } + + /** Add Flows to Project (Deprecated) */ + async ["add_project_flows_(deprecated)"](init?: OperationInit<"add_project_flows_(deprecated)">): Promise> { + return this.client.requestOperation("add_project_flows_(deprecated)", "put", "/projects/{project_id}/data_flows", init); + } + + /** Create a project */ + async create_project(init?: OperationInit<"create_project">): Promise> { + return this.client.requestOperation("create_project", "post", "/projects", init); + } + + /** Delete Project by ID */ + async delete_project(init?: OperationInit<"delete_project">): Promise> { + return this.client.requestOperation("delete_project", "delete", "/projects/{project_id}", init); + } + + /** Get Project by ID */ + async get_project(init?: OperationInit<"get_project">): Promise> { + return this.client.requestOperation("get_project", "get", "/projects/{project_id}", init); + } + + /** Get Project Flows */ + async get_project_flows(init?: OperationInit<"get_project_flows">): Promise> { + return this.client.requestOperation("get_project_flows", "get", "/projects/{project_id}/flows", init); + } + + /** Get Project Flows (Deprecated) */ + async ["get_project_flows_(deprecated)"](init?: OperationInit<"get_project_flows_(deprecated)">): Promise> { + return this.client.requestOperation("get_project_flows_(deprecated)", "get", "/projects/{project_id}/data_flows", init); + } + + /** Get all Projects */ + async get_projects(init?: OperationInit<"get_projects">): Promise> { + return this.client.requestOperation("get_projects", "get", "/projects", init); + } + + /** Remove Flows From A Project */ + async remove_project_flows(init?: OperationInit<"remove_project_flows">): Promise> { + return this.client.requestOperation("remove_project_flows", "delete", "/projects/{project_id}/flows", init); + } + + /** Remove Flows From A Project (Deprecated) */ + async ["remove_project_flows_(deprecated)"](init?: OperationInit<"remove_project_flows_(deprecated)">): Promise> { + return this.client.requestOperation("remove_project_flows_(deprecated)", "delete", "/projects/{project_id}/data_flows", init); + } + + /** Replace Project Flows List */ + async replace_project_flows(init?: OperationInit<"replace_project_flows">): Promise> { + return this.client.requestOperation("replace_project_flows", "post", "/projects/{project_id}/flows", init); + } + + /** Replace Project Flows List (Deprecated) */ + async ["replace_project_flows_(deprecated)"](init?: OperationInit<"replace_project_flows_(deprecated)">): Promise> { + return this.client.requestOperation("replace_project_flows_(deprecated)", "post", "/projects/{project_id}/data_flows", init); + } + + /** Modify a Project */ + async update_project(init?: OperationInit<"update_project">): Promise> { + return this.client.requestOperation("update_project", "put", "/projects/{project_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/quarantine_settings.ts b/packages/ts-sdk/src/resources/generated/quarantine_settings.ts new file mode 100644 index 0000000..eb0885a --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/quarantine_settings.ts @@ -0,0 +1,31 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class QuarantineSettingsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + /** Set Quarantine Data Export Settings for A User */ + async create_quarantine_data_export_settings(init?: OperationInit<"create_quarantine_data_export_settings">): Promise> { + return this.client.requestOperation("create_quarantine_data_export_settings", "post", "/users/{user_id}/quarantine_settings", init); + } + + /** Delete Quarantine Data Export Settings for A User */ + async delete_user_quarantine_data_export_settings(init?: OperationInit<"delete_user_quarantine_data_export_settings">): Promise> { + return this.client.requestOperation("delete_user_quarantine_data_export_settings", "delete", "/users/{user_id}/quarantine_settings", init); + } + + /** Get Quarantine Data Export Settings for A User */ + async get_user_quarantine_data_export_settings(init?: OperationInit<"get_user_quarantine_data_export_settings">): Promise> { + return this.client.requestOperation("get_user_quarantine_data_export_settings", "get", "/users/{user_id}/quarantine_settings", init); + } + + /** Update Quarantine Data Export Settings for A User */ + async update_user_quarantine_data_export_settings(init?: OperationInit<"update_user_quarantine_data_export_settings">): Promise> { + return this.client.requestOperation("update_user_quarantine_data_export_settings", "put", "/users/{user_id}/quarantine_settings", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/runtimes.ts b/packages/ts-sdk/src/resources/generated/runtimes.ts new file mode 100644 index 0000000..5729fb3 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/runtimes.ts @@ -0,0 +1,66 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class RuntimesResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_runtimes">): Promise> { + return this.client.requestOperation("get_runtimes", "get", "/runtimes", init); + } + + async create(init?: OperationInit<"create_runtime">): Promise> { + return this.client.requestOperation("create_runtime", "post", "/runtimes", init); + } + + async get(init?: OperationInit<"get_runtime">): Promise> { + return this.client.requestOperation("get_runtime", "get", "/runtimes/{runtime_id}", init); + } + + async update(init?: OperationInit<"update_runtime">): Promise> { + return this.client.requestOperation("update_runtime", "put", "/runtimes/{runtime_id}", init); + } + + async delete(init?: OperationInit<"delete_runtime">): Promise> { + return this.client.requestOperation("delete_runtime", "delete", "/runtimes/{runtime_id}", init); + } + + /** Activate a Custom Runtime */ + async activate_runtime(init?: OperationInit<"activate_runtime">): Promise> { + return this.client.requestOperation("activate_runtime", "put", "/runtimes/{runtime_id}/activate", init); + } + + /** Create a Custom Runtime */ + async create_runtime(init?: OperationInit<"create_runtime">): Promise> { + return this.client.requestOperation("create_runtime", "post", "/runtimes", init); + } + + /** Delete a Custom Runtime */ + async delete_runtime(init?: OperationInit<"delete_runtime">): Promise> { + return this.client.requestOperation("delete_runtime", "delete", "/runtimes/{runtime_id}", init); + } + + /** Get a custom runtime by ID */ + async get_runtime(init?: OperationInit<"get_runtime">): Promise> { + return this.client.requestOperation("get_runtime", "get", "/runtimes/{runtime_id}", init); + } + + /** Get all Custom Runtimes */ + async get_runtimes(init?: OperationInit<"get_runtimes">): Promise> { + return this.client.requestOperation("get_runtimes", "get", "/runtimes", init); + } + + /** Pause a Custom Runtime */ + async pause_runtime(init?: OperationInit<"pause_runtime">): Promise> { + return this.client.requestOperation("pause_runtime", "put", "/runtimes/{runtime_id}/pause", init); + } + + /** Update a Custom Runtime */ + async update_runtime(init?: OperationInit<"update_runtime">): Promise> { + return this.client.requestOperation("update_runtime", "put", "/runtimes/{runtime_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/self_signup.ts b/packages/ts-sdk/src/resources/generated/self_signup.ts new file mode 100644 index 0000000..644fd9d --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/self_signup.ts @@ -0,0 +1,21 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class SelfSignupResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + /** Sign Up */ + async self_sign_up(init?: OperationInit<"self_sign_up">): Promise> { + return this.client.requestOperation("self_sign_up", "post", "/signup", init); + } + + /** Verify Email */ + async verify_email(init?: OperationInit<"verify_email">): Promise> { + return this.client.requestOperation("verify_email", "get", "/signup/verify_email", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/self_signup_admin.ts b/packages/ts-sdk/src/resources/generated/self_signup_admin.ts new file mode 100644 index 0000000..8f08840 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/self_signup_admin.ts @@ -0,0 +1,41 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class SelfSignupAdminResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + /** Add self-sign-up blocked domain for admins. */ + async add_self_signup_blocked_domain(init?: OperationInit<"add_self_signup_blocked_domain">): Promise> { + return this.client.requestOperation("add_self_signup_blocked_domain", "post", "/self_signup_blocked_domains", init); + } + + /** Approve Self Sign Up Request */ + async approve_self_sign_up_request(init?: OperationInit<"approve_self_sign_up_request">): Promise> { + return this.client.requestOperation("approve_self_sign_up_request", "put", "/self_signup_requests/{request_id}/approve", init); + } + + /** Delete self-sign-up blocked domain for admins. */ + async delete_self_signup_blocked_domain(init?: OperationInit<"delete_self_signup_blocked_domain">): Promise> { + return this.client.requestOperation("delete_self_signup_blocked_domain", "delete", "/self_signup_blocked_domains/{domain_id}", init); + } + + /** List self-sign-up blocked domains for admins. */ + async get_self_signup_blocked_domains(init?: OperationInit<"get_self_signup_blocked_domains">): Promise> { + return this.client.requestOperation("get_self_signup_blocked_domains", "get", "/self_signup_blocked_domains", init); + } + + /** List Self Sign Up Requests */ + async get_self_signup_requests(init?: OperationInit<"get_self_signup_requests">): Promise> { + return this.client.requestOperation("get_self_signup_requests", "get", "/self_signup_requests", init); + } + + /** Update self-sign-up blocked domain for admins. */ + async update_self_signup_blocked_domain(init?: OperationInit<"update_self_signup_blocked_domain">): Promise> { + return this.client.requestOperation("update_self_signup_blocked_domain", "put", "/self_signup_blocked_domains/{domain_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/sources.ts b/packages/ts-sdk/src/resources/generated/sources.ts new file mode 100644 index 0000000..6b3ead9 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/sources.ts @@ -0,0 +1,76 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class SourcesResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_data_sources">): Promise> { + return this.client.requestOperation("get_data_sources", "get", "/data_sources", init); + } + + async create(init?: OperationInit<"create_data_source">): Promise> { + return this.client.requestOperation("create_data_source", "post", "/data_sources", init); + } + + async get(init?: OperationInit<"get_data_source">): Promise> { + return this.client.requestOperation("get_data_source", "get", "/data_sources/{source_id}", init); + } + + async update(init?: OperationInit<"update_data_source">): Promise> { + return this.client.requestOperation("update_data_source", "put", "/data_sources/{source_id}", init); + } + + async delete(init?: OperationInit<"delete_data_source">): Promise> { + return this.client.requestOperation("delete_data_source", "delete", "/data_sources/{source_id}", init); + } + + /** Activate a Source */ + async activate_source(init?: OperationInit<"activate_source">): Promise> { + return this.client.requestOperation("activate_source", "put", "/data_sources/{source_id}/activate", init); + } + + /** Copy a Source */ + async copy_source(init?: OperationInit<"copy_source">): Promise> { + return this.client.requestOperation("copy_source", "post", "/data_sources/{source_id}/copy", init); + } + + /** Create a Source */ + async create_data_source(init?: OperationInit<"create_data_source">): Promise> { + return this.client.requestOperation("create_data_source", "post", "/data_sources", init); + } + + /** Delete a Source */ + async delete_data_source(init?: OperationInit<"delete_data_source">): Promise> { + return this.client.requestOperation("delete_data_source", "delete", "/data_sources/{source_id}", init); + } + + /** Get Source by ID */ + async get_data_source(init?: OperationInit<"get_data_source">): Promise> { + return this.client.requestOperation("get_data_source", "get", "/data_sources/{source_id}", init); + } + + /** Get Source by ID with Expanded References */ + async get_data_source_expanded(init?: OperationInit<"get_data_source_expanded">): Promise> { + return this.client.requestOperation("get_data_source_expanded", "get", "/data_sources/{source_id}?expand=1", init); + } + + /** Get All Sources */ + async get_data_sources(init?: OperationInit<"get_data_sources">): Promise> { + return this.client.requestOperation("get_data_sources", "get", "/data_sources", init); + } + + /** Pause a Source */ + async pause_source(init?: OperationInit<"pause_source">): Promise> { + return this.client.requestOperation("pause_source", "put", "/data_sources/{source_id}/pause", init); + } + + /** Update a Source */ + async update_data_source(init?: OperationInit<"update_data_source">): Promise> { + return this.client.requestOperation("update_data_source", "put", "/data_sources/{source_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/teams.ts b/packages/ts-sdk/src/resources/generated/teams.ts new file mode 100644 index 0000000..0baee7b --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/teams.ts @@ -0,0 +1,76 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class TeamsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_teams">): Promise> { + return this.client.requestOperation("get_teams", "get", "/teams", init); + } + + async create(init?: OperationInit<"create_team">): Promise> { + return this.client.requestOperation("create_team", "post", "/teams", init); + } + + async get(init?: OperationInit<"get_team">): Promise> { + return this.client.requestOperation("get_team", "get", "/teams/{team_id}", init); + } + + async update(init?: OperationInit<"update_team">): Promise> { + return this.client.requestOperation("update_team", "put", "/teams/{team_id}", init); + } + + async delete(init?: OperationInit<"delete_team">): Promise> { + return this.client.requestOperation("delete_team", "delete", "/teams/{team_id}", init); + } + + /** Add Members to A Team */ + async add_team_members(init?: OperationInit<"add_team_members">): Promise> { + return this.client.requestOperation("add_team_members", "put", "/teams/{team_id}/members", init); + } + + /** Create a team */ + async create_team(init?: OperationInit<"create_team">): Promise> { + return this.client.requestOperation("create_team", "post", "/teams", init); + } + + /** Delete Team by ID */ + async delete_team(init?: OperationInit<"delete_team">): Promise> { + return this.client.requestOperation("delete_team", "delete", "/teams/{team_id}", init); + } + + /** Remove Team Members */ + async delete_team_members(init?: OperationInit<"delete_team_members">): Promise> { + return this.client.requestOperation("delete_team_members", "delete", "/teams/{team_id}/members", init); + } + + /** Get Team by ID */ + async get_team(init?: OperationInit<"get_team">): Promise> { + return this.client.requestOperation("get_team", "get", "/teams/{team_id}", init); + } + + /** Get Team Members */ + async get_team_members(init?: OperationInit<"get_team_members">): Promise> { + return this.client.requestOperation("get_team_members", "get", "/teams/{team_id}/members", init); + } + + /** Get all Teams */ + async get_teams(init?: OperationInit<"get_teams">): Promise> { + return this.client.requestOperation("get_teams", "get", "/teams", init); + } + + /** Replace Team Members List */ + async replace_team_members(init?: OperationInit<"replace_team_members">): Promise> { + return this.client.requestOperation("replace_team_members", "post", "/teams/{team_id}/members", init); + } + + /** Modify a Team */ + async update_team(init?: OperationInit<"update_team">): Promise> { + return this.client.requestOperation("update_team", "put", "/teams/{team_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/tokens.ts b/packages/ts-sdk/src/resources/generated/tokens.ts new file mode 100644 index 0000000..d532d1f --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/tokens.ts @@ -0,0 +1,26 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class TokensResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + /** Get info on current user */ + async get_current_user(init?: OperationInit<"get_current_user">): Promise> { + return this.client.requestOperation("get_current_user", "get", "/users/current", init); + } + + /** Login with Basic Authentication */ + async login_with_basic_auth(init?: OperationInit<"login_with_basic_auth">): Promise> { + return this.client.requestOperation("login_with_basic_auth", "post", "/token", withSkipAuth(init)); + } + + /** Logout */ + async logout(init?: OperationInit<"logout">): Promise> { + return this.client.requestOperation("logout", "post", "/token/logout", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/transforms.ts b/packages/ts-sdk/src/resources/generated/transforms.ts new file mode 100644 index 0000000..fe2581f --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/transforms.ts @@ -0,0 +1,96 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class TransformsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_reusable_record_transforms">): Promise> { + return this.client.requestOperation("get_reusable_record_transforms", "get", "/transforms", init); + } + + async create(init?: OperationInit<"create_reusable_record_transform">): Promise> { + return this.client.requestOperation("create_reusable_record_transform", "post", "/transforms", init); + } + + async get(init?: OperationInit<"get_reusable_record_transform">): Promise> { + return this.client.requestOperation("get_reusable_record_transform", "get", "/transforms/{transform_id}", init); + } + + async update(init?: OperationInit<"update_reusable_record_transform">): Promise> { + return this.client.requestOperation("update_reusable_record_transform", "put", "/transforms/{transform_id}", init); + } + + async delete(init?: OperationInit<"delete_reusable_record_transform">): Promise> { + return this.client.requestOperation("delete_reusable_record_transform", "delete", "/transforms/{transform_id}", init); + } + + /** Copy a Reusable Record Transform */ + async copy_transform(init?: OperationInit<"copy_transform">): Promise> { + return this.client.requestOperation("copy_transform", "post", "/transforms/{transform_id}/copy", init); + } + + /** Create an Attribute Transform */ + async create_attribute_transform(init?: OperationInit<"create_attribute_transform">): Promise> { + return this.client.requestOperation("create_attribute_transform", "post", "/attribute_transforms", init); + } + + /** Create a Reusable Record Transform */ + async create_reusable_record_transform(init?: OperationInit<"create_reusable_record_transform">): Promise> { + return this.client.requestOperation("create_reusable_record_transform", "post", "/transforms", init); + } + + /** Delete an Attribute Transform */ + async delete_attribute_transform(init?: OperationInit<"delete_attribute_transform">): Promise> { + return this.client.requestOperation("delete_attribute_transform", "delete", "/attribute_transforms/{attribute_transform_id}", init); + } + + /** Delete a Reusable Record Transform */ + async delete_reusable_record_transform(init?: OperationInit<"delete_reusable_record_transform">): Promise> { + return this.client.requestOperation("delete_reusable_record_transform", "delete", "/transforms/{transform_id}", init); + } + + /** Get Attribute Transform by ID */ + async get_attribute_transform(init?: OperationInit<"get_attribute_transform">): Promise> { + return this.client.requestOperation("get_attribute_transform", "get", "/attribute_transforms/{attribute_transform_id}", init); + } + + /** Get all Attribute Transforms */ + async get_attribute_transforms(init?: OperationInit<"get_attribute_transforms">): Promise> { + return this.client.requestOperation("get_attribute_transforms", "get", "/attribute_transforms", init); + } + + /** Get all Public Attribute Transforms */ + async get_public_attribute_transforms(init?: OperationInit<"get_public_attribute_transforms">): Promise> { + return this.client.requestOperation("get_public_attribute_transforms", "get", "/attribute_transforms/public", init); + } + + /** Get all Public Reusable Record Transforms */ + async get_public_reusable_record_transforms(init?: OperationInit<"get_public_reusable_record_transforms">): Promise> { + return this.client.requestOperation("get_public_reusable_record_transforms", "get", "/transforms/public", init); + } + + /** Get A Reusable Record Transform */ + async get_reusable_record_transform(init?: OperationInit<"get_reusable_record_transform">): Promise> { + return this.client.requestOperation("get_reusable_record_transform", "get", "/transforms/{transform_id}", init); + } + + /** Get all Reusable Record Transforms */ + async get_reusable_record_transforms(init?: OperationInit<"get_reusable_record_transforms">): Promise> { + return this.client.requestOperation("get_reusable_record_transforms", "get", "/transforms", init); + } + + /** Update Attribute Transform */ + async update_attribute_transform(init?: OperationInit<"update_attribute_transform">): Promise> { + return this.client.requestOperation("update_attribute_transform", "put", "/attribute_transforms/{attribute_transform_id}", init); + } + + /** Update Reusable Record Transform */ + async update_reusable_record_transform(init?: OperationInit<"update_reusable_record_transform">): Promise> { + return this.client.requestOperation("update_reusable_record_transform", "put", "/transforms/{transform_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/user_settings.ts b/packages/ts-sdk/src/resources/generated/user_settings.ts new file mode 100644 index 0000000..f839867 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/user_settings.ts @@ -0,0 +1,20 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class UserSettingsResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_user_settings">): Promise> { + return this.client.requestOperation("get_user_settings", "get", "/user_settings", init); + } + + /** Get the current user's settings */ + async get_user_settings(init?: OperationInit<"get_user_settings">): Promise> { + return this.client.requestOperation("get_user_settings", "get", "/user_settings", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/users.ts b/packages/ts-sdk/src/resources/generated/users.ts new file mode 100644 index 0000000..fd2eb7e --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/users.ts @@ -0,0 +1,57 @@ +import type { NexlaClient } from "../../client/nexla-client.js"; +import type { OperationData, OperationInit } from "../../client/operation-types.js"; +import { withSkipAuth } from "./utils.js"; + +export class UsersResource { + private readonly client: NexlaClient; + + constructor(client: NexlaClient) { + this.client = client; + } + + async list(init?: OperationInit<"get_users">): Promise> { + return this.client.requestOperation("get_users", "get", "/users", init); + } + + async create(init?: OperationInit<"create_user">): Promise> { + return this.client.requestOperation("create_user", "post", "/users", init); + } + + async get(init?: OperationInit<"get_user">): Promise> { + return this.client.requestOperation("get_user", "get", "/users/{user_id}", init); + } + + async update(init?: OperationInit<"update_user">): Promise> { + return this.client.requestOperation("update_user", "put", "/users/{user_id}", init); + } + + /** Create a User */ + async create_user(init?: OperationInit<"create_user">): Promise> { + return this.client.requestOperation("create_user", "post", "/users", init); + } + + /** Get User by ID */ + async get_user(init?: OperationInit<"get_user">): Promise> { + return this.client.requestOperation("get_user", "get", "/users/{user_id}", init); + } + + /** Get User by ID with Expanded References */ + async get_user_expand(init?: OperationInit<"get_user_expand">): Promise> { + return this.client.requestOperation("get_user_expand", "get", "/users/{user_id}?expand=1", init); + } + + /** Get All Users */ + async get_users(init?: OperationInit<"get_users">): Promise> { + return this.client.requestOperation("get_users", "get", "/users", init); + } + + /** Get All Users with Expanded References */ + async get_users_expand(init?: OperationInit<"get_users_expand">): Promise> { + return this.client.requestOperation("get_users_expand", "get", "/users?expand=1", init); + } + + /** Modify a User */ + async update_user(init?: OperationInit<"update_user">): Promise> { + return this.client.requestOperation("update_user", "put", "/users/{user_id}", init); + } +} \ No newline at end of file diff --git a/packages/ts-sdk/src/resources/generated/utils.ts b/packages/ts-sdk/src/resources/generated/utils.ts new file mode 100644 index 0000000..6308a04 --- /dev/null +++ b/packages/ts-sdk/src/resources/generated/utils.ts @@ -0,0 +1,40 @@ +/** + * Auto-generated helpers for resource clients. + * Do not edit manually. + */ + +import type { HeadersOptions } from "openapi-fetch"; + +type HeaderCarrier = { headers?: HeadersOptions }; + +const normalizeHeaders = (headers?: HeadersOptions): Record => { + if (!headers) return {}; + if (headers instanceof Headers) { + const record: Record = {}; + headers.forEach((value, key) => { + record[key] = value; + }); + return record; + } + if (Array.isArray(headers)) { + return Object.fromEntries(headers); + } + const record: Record = {}; + for (const [key, value] of Object.entries(headers)) { + if (value === null || value === undefined) continue; + if (Array.isArray(value)) { + record[key] = value.map((item) => String(item)).join(", "); + } else { + record[key] = String(value); + } + } + return record; +}; + +export const withSkipAuth = (init?: T): T => { + if (!init) { + return { headers: { "x-nexla-skip-auth": "true" } } as unknown as T; + } + const headers = { ...normalizeHeaders(init.headers), "x-nexla-skip-auth": "true" }; + return { ...(init as HeaderCarrier), headers } as unknown as T; +}; diff --git a/packages/ts-sdk/src/resources/index.ts b/packages/ts-sdk/src/resources/index.ts new file mode 100644 index 0000000..32a2907 --- /dev/null +++ b/packages/ts-sdk/src/resources/index.ts @@ -0,0 +1,4 @@ +export { ResourceClient } from "./resource-client.js"; +export type { ResourceKey } from "./resource-client.js"; + +export * from "./generated/index.js"; diff --git a/packages/ts-sdk/src/resources/resource-client.ts b/packages/ts-sdk/src/resources/resource-client.ts new file mode 100644 index 0000000..e789b95 --- /dev/null +++ b/packages/ts-sdk/src/resources/resource-client.ts @@ -0,0 +1,58 @@ +import type { HttpMethod, PathsWithMethod } from "openapi-typescript-helpers"; +import type { paths } from "../generated/schema.js"; +import { resourceMap } from "../generated/resource-map.js"; +import type { NexlaClient } from "../client/nexla-client.js"; +import { NexlaError } from "../errors.js"; +import type { RequestOptions } from "../client/types.js"; + +export type ResourceKey = keyof typeof resourceMap; + +type ResourceEntry = { + path: string; + method: string; +}; + +type ResourceAction = "list" | "get" | "create" | "update" | "delete"; + +export class ResourceClient { + private readonly client: NexlaClient; + private readonly key: ResourceKey; + + constructor(client: NexlaClient, key: ResourceKey) { + this.client = client; + this.key = key; + } + + async list(init?: Record): Promise { + return this.call("list", init); + } + + async get(init?: Record): Promise { + return this.call("get", init); + } + + async create(init?: Record): Promise { + return this.call("create", init); + } + + async update(init?: Record): Promise { + return this.call("update", init); + } + + async delete(init?: Record): Promise { + return this.call("delete", init); + } + + private async call(action: ResourceAction, init?: Record): Promise { + const entry = (resourceMap[this.key] as Record)[action]; + + if (!entry) { + throw new NexlaError(`Action '${action}' is not available for resource '${this.key}'.`); + } + + const method = entry.method.toLowerCase() as HttpMethod; + const path = entry.path as PathsWithMethod; + + return this.client.request(method, path, init as RequestOptions); + } +} diff --git a/packages/ts-sdk/src/webhooks/index.ts b/packages/ts-sdk/src/webhooks/index.ts new file mode 100644 index 0000000..9475bc1 --- /dev/null +++ b/packages/ts-sdk/src/webhooks/index.ts @@ -0,0 +1,96 @@ +import { NexlaError, isRecord } from "../errors.js"; + +export interface WebhookSendOptions { + includeHeaders?: boolean; + includeUrlParams?: boolean; + forceSchemaDetection?: boolean; + authMethod?: "query" | "header"; +} + +export interface WebhooksClientOptions { + apiKey: string; + fetch?: typeof fetch; +} + +export class WebhooksClient { + private readonly apiKey: string; + private readonly fetchFn: typeof fetch; + + constructor(options: WebhooksClientOptions) { + this.apiKey = options.apiKey; + this.fetchFn = options.fetch ?? globalThis.fetch; + } + + async sendOneRecord( + webhookUrl: string, + record: Record, + options?: WebhookSendOptions + ): Promise { + return this.makeRequest(webhookUrl, record, options); + } + + async sendManyRecords( + webhookUrl: string, + records: Record[], + options?: WebhookSendOptions + ): Promise { + return this.makeRequest(webhookUrl, records, options); + } + + private async makeRequest( + webhookUrl: string, + body: Record | Record[], + options?: WebhookSendOptions + ): Promise { + const authMethod = options?.authMethod ?? "query"; + const url = new URL(webhookUrl); + + if (authMethod === "query") { + url.searchParams.set("api_key", this.apiKey); + } + if (options?.includeHeaders) url.searchParams.set("include_headers", "true"); + if (options?.includeUrlParams) url.searchParams.set("include_url_params", "true"); + if (options?.forceSchemaDetection) url.searchParams.set("force_schema_detection", "true"); + + const headers: Record = { "Content-Type": "application/json" }; + if (authMethod === "header") { + headers["Authorization"] = `Basic ${Buffer.from(this.apiKey).toString("base64")}`; + } + + const response = await this.fetchFn(url.toString(), { + method: "POST", + headers, + body: JSON.stringify(body) + }); + + if (!response.ok) { + const errorBody = await safeParseJson(response); + const message = extractErrorMessage(errorBody) ?? response.statusText ?? "Webhook request failed"; + throw new NexlaError(message, { statusCode: response.status, response: errorBody }); + } + + return safeParseJson(response); + } +} + +const safeParseJson = async (response: Response): Promise => { + const contentType = response.headers.get("content-type")?.toLowerCase() ?? ""; + if (!contentType.includes("application/json")) { + return undefined; + } + try { + return await response.json(); + } catch { + return undefined; + } +}; + +const extractErrorMessage = (body: unknown): string | undefined => { + if (isRecord(body)) { + const message = body["message"]; + if (typeof message === "string") return message; + const error = body["error"]; + if (typeof error === "string") return error; + } + return undefined; +}; diff --git a/packages/ts-sdk/tests/access-control/accessors.test.ts b/packages/ts-sdk/tests/access-control/accessors.test.ts new file mode 100644 index 0000000..98c9842 --- /dev/null +++ b/packages/ts-sdk/tests/access-control/accessors.test.ts @@ -0,0 +1,238 @@ +/** + * Unit tests for accessor management operations. + */ + +import { describe, expect, it, beforeEach } from "vitest"; +import { createMockFetch } from "../utils/mock-fetch.js"; +import { createAccessor, createAccessorList, resetIdCounter } from "../utils/factories/index.js"; +import { NexlaClient } from "../../src/client/nexla-client.js"; + +describe("AccessorManagement", () => { + beforeEach(() => { + resetIdCounter(); + }); + + describe("get accessors", () => { + it("fetches accessors for a source", async () => { + const accessors = createAccessorList(3); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: accessors }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.get_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + }); + + expect(result).toEqual(accessors); + expect(calls[1]?.url).toContain("/data_sources/123/accessors"); + }); + + it("fetches accessors for a credential", async () => { + const accessors = createAccessorList(2); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: accessors }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.get_data_credential_accessors({ + params: { path: { data_credential_id: 456 } }, + }); + + expect(result).toEqual(accessors); + expect(calls[1]?.url).toContain("/data_credentials/456/accessors"); + }); + + it("returns empty list when no accessors", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.get_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + }); + + expect(result).toEqual([]); + }); + }); + + describe("add accessors", () => { + it("adds user accessor with collaborator role", async () => { + const newAccessor = createAccessor({ type: "USER", access_roles: ["collaborator"] }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [newAccessor] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.add_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "collaborator" }] }, + }); + + expect(result).toEqual([newAccessor]); + expect(calls[1]?.method).toBe("PUT"); + expect(calls[1]?.url).toContain("/data_sources/123/accessors"); + }); + + it("adds team accessor", async () => { + const teamAccessor = createAccessor({ type: "TEAM", access_roles: ["collaborator"] }); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [teamAccessor] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.add_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + body: { accessors: [{ id: 789, type: "team", access_role: "collaborator" }] }, + }); + + expect(result).toEqual([teamAccessor]); + }); + + it("adds multiple accessors at once", async () => { + const accessors = createAccessorList(3); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: accessors }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.add_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + body: { + accessors: [ + { id: 1, type: "user", access_role: "collaborator" }, + { id: 2, type: "team", access_role: "admin" }, + { id: 3, type: "user", access_role: "operator" }, + ], + }, + }); + + expect(result.length).toBe(3); + }); + }); + + describe("replace accessors", () => { + it("replaces entire accessor list", async () => { + const newAccessors = [createAccessor({ type: "USER", access_roles: ["owner"] })]; + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: newAccessors }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.replace_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "owner" }] }, + }); + + expect(result).toEqual(newAccessors); + expect(calls[1]?.method).toBe("POST"); + }); + }); + + describe("delete accessors", () => { + it("removes specific accessors", async () => { + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.delete_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + body: { accessors: [{ id: 456, type: "user" }] }, + }); + + expect(result).toEqual([]); + expect(calls[1]?.method).toBe("DELETE"); + }); + }); + + describe("error handling", () => { + it("handles 404 for non-existent resource", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 404, body: { message: "Resource not found" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.access_control.get_data_source_accessors({ + params: { path: { data_source_id: 99999 } }, + }) + ).rejects.toThrow(); + }); + + it("handles 403 for insufficient permissions", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 403, body: { message: "Forbidden" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.access_control.add_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "admin" }] }, + }) + ).rejects.toThrow(); + }); + }); +}); diff --git a/packages/ts-sdk/tests/access-control/role-limitations.test.ts b/packages/ts-sdk/tests/access-control/role-limitations.test.ts new file mode 100644 index 0000000..aca28f8 --- /dev/null +++ b/packages/ts-sdk/tests/access-control/role-limitations.test.ts @@ -0,0 +1,297 @@ +/** + * Unit tests for access role limitations. + * Based on admin-api spec/access_control/role_limitations_spec.rb + */ + +import { describe, expect, it, beforeEach } from "vitest"; +import { createMockFetch } from "../utils/mock-fetch.js"; +import { createAccessor, resetIdCounter } from "../utils/factories/index.js"; +import { NexlaClient } from "../../src/client/nexla-client.js"; + +describe("AccessRoleLimitations", () => { + beforeEach(() => { + resetIdCounter(); + }); + + describe("collaborator role", () => { + it("is supported for sources", async () => { + const accessor = createAccessor({ access_roles: ["collaborator"] }); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [accessor] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.add_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "collaborator" }] }, + }); + + expect(result[0].access_roles).toContain("collaborator"); + }); + + it("is supported for credentials", async () => { + const accessor = createAccessor({ access_roles: ["collaborator"] }); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [accessor] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.add_data_credential_accessors({ + params: { path: { data_credential_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "collaborator" }] }, + }); + + expect(result[0].access_roles).toContain("collaborator"); + }); + + it("is supported for nexsets", async () => { + const accessor = createAccessor({ access_roles: ["collaborator"] }); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [accessor] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.add_nexset_accessors({ + params: { path: { data_set_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "collaborator" }] }, + }); + + expect(result[0].access_roles).toContain("collaborator"); + }); + }); + + describe("admin role", () => { + it("is supported for sources", async () => { + const accessor = createAccessor({ access_roles: ["admin"] }); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [accessor] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.add_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "admin" }] }, + }); + + expect(result[0].access_roles).toContain("admin"); + }); + + it("is supported for teams", async () => { + const accessor = createAccessor({ access_roles: ["admin"] }); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [accessor] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.add_team_accessors({ + params: { path: { team_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "admin" }] }, + }); + + expect(result[0].access_roles).toContain("admin"); + }); + }); + + describe("operator role", () => { + it("is supported for sources", async () => { + const accessor = createAccessor({ access_roles: ["operator"] }); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [accessor] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.add_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "operator" }] }, + }); + + expect(result[0].access_roles).toContain("operator"); + }); + + it("is supported for nexsets", async () => { + const accessor = createAccessor({ access_roles: ["operator"] }); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [accessor] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.add_nexset_accessors({ + params: { path: { data_set_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "operator" }] }, + }); + + expect(result[0].access_roles).toContain("operator"); + }); + }); + + describe("sharer role", () => { + it("is supported for nexsets (data_sets)", async () => { + const accessor = createAccessor({ access_roles: ["sharer"] }); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [accessor] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.add_nexset_accessors({ + params: { path: { data_set_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "sharer" }] }, + }); + + expect(result[0].access_roles).toContain("sharer"); + }); + + it("returns error for sources (sharer not allowed)", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 400, body: { message: "Invalid access role 'sharer' for data_source" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.access_control.add_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "sharer" }] }, + }) + ).rejects.toThrow(); + }); + + it("returns error for teams (sharer not allowed)", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 400, body: { message: "Invalid access role 'sharer' for team" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.access_control.add_team_accessors({ + params: { path: { team_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "sharer" }] }, + }) + ).rejects.toThrow(); + }); + }); + + describe("invalid role", () => { + it("returns bad request for unknown role", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 400, body: { message: "Invalid access role 'unknown_role'" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.access_control.add_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "unknown_role" }] }, + }) + ).rejects.toThrow(); + }); + }); + + describe("accessor type", () => { + it("supports USER type", async () => { + const accessor = createAccessor({ type: "USER" }); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [accessor] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.add_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + body: { accessors: [{ id: 456, type: "user", access_role: "collaborator" }] }, + }); + + expect(result[0].type).toBe("USER"); + }); + + it("supports TEAM type", async () => { + const accessor = createAccessor({ type: "TEAM" }); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [accessor] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.access_control.add_data_source_accessors({ + params: { path: { data_source_id: 123 } }, + body: { accessors: [{ id: 789, type: "team", access_role: "collaborator" }] }, + }); + + expect(result[0].type).toBe("TEAM"); + }); + }); +}); diff --git a/packages/ts-sdk/tests/access-token.test.ts b/packages/ts-sdk/tests/access-token.test.ts new file mode 100644 index 0000000..1a64c1c --- /dev/null +++ b/packages/ts-sdk/tests/access-token.test.ts @@ -0,0 +1,15 @@ +import { describe, expect, it } from "vitest"; +import { AccessTokenAuthProvider } from "../src/auth/access-token.js"; +import { AuthenticationError } from "../src/errors.js"; + +describe("AccessTokenAuthProvider", () => { + it("returns the provided access token", async () => { + const provider = new AccessTokenAuthProvider("token"); + await expect(provider.getAccessToken()).resolves.toBe("token"); + }); + + it("throws on refresh attempts", async () => { + const provider = new AccessTokenAuthProvider("token"); + await expect(provider.refreshAccessToken()).rejects.toBeInstanceOf(AuthenticationError); + }); +}); diff --git a/packages/ts-sdk/tests/auth.test.ts b/packages/ts-sdk/tests/auth.test.ts new file mode 100644 index 0000000..aa7c203 --- /dev/null +++ b/packages/ts-sdk/tests/auth.test.ts @@ -0,0 +1,80 @@ +import { describe, expect, it } from "vitest"; +import { ServiceKeyAuthProvider } from "../src/auth/service-key.js"; +import { AuthenticationError } from "../src/errors.js"; + +const createMockFetch = (responses: Array<{ status: number; body?: unknown }>) => { + const calls: Request[] = []; + const fetchFn: typeof fetch = async (input, init) => { + const request = input instanceof Request ? input : new Request(input, init); + calls.push(request); + + const next = responses.shift() ?? { status: 200, body: {} }; + const body = JSON.stringify(next.body ?? {}); + return new Response(body, { + status: next.status, + headers: { "content-type": "application/json" } + }); + }; + return { fetchFn, calls }; +}; + +describe("ServiceKeyAuthProvider", () => { + it("obtains and caches access tokens", async () => { + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token-1", expires_in: 7200 } } + ]); + + const provider = new ServiceKeyAuthProvider({ + serviceKey: "svc-key", + baseUrl: "https://example.com", + apiVersion: "v1", + tokenRefreshMargin: 60, + fetchFn + }); + + const token1 = await provider.getAccessToken(); + const token2 = await provider.getAccessToken(); + + expect(token1).toBe("token-1"); + expect(token2).toBe("token-1"); + expect(calls.length).toBe(1); + }); + + it("refreshes token when near expiry", async () => { + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token-1", expires_in: 1 } }, + { status: 200, body: { access_token: "token-2", expires_in: 7200 } } + ]); + + const provider = new ServiceKeyAuthProvider({ + serviceKey: "svc-key", + baseUrl: "https://example.com", + apiVersion: "v1", + tokenRefreshMargin: 3600, + fetchFn + }); + + const token1 = await provider.getAccessToken(); + const token2 = await provider.getAccessToken(); + + expect(token1).toBe("token-1"); + expect(token2).toBe("token-2"); + expect(calls.length).toBe(2); + }); + + it("throws AuthenticationError on 401", async () => { + const { fetchFn } = createMockFetch([ + { status: 401, body: { message: "Invalid service key" } } + ]); + + const provider = new ServiceKeyAuthProvider({ + serviceKey: "svc-key", + baseUrl: "https://example.com", + apiVersion: "v1", + tokenRefreshMargin: 60, + fetchFn + }); + + await expect(provider.getAccessToken()).rejects.toBeInstanceOf(AuthenticationError); + }); +}); diff --git a/packages/ts-sdk/tests/client.test.ts b/packages/ts-sdk/tests/client.test.ts new file mode 100644 index 0000000..d05b939 --- /dev/null +++ b/packages/ts-sdk/tests/client.test.ts @@ -0,0 +1,39 @@ +import { describe, expect, it } from "vitest"; +import { NexlaClient } from "../src/client/nexla-client.js"; + +const createMockFetch = () => { + const calls: Request[] = []; + const fetchFn: typeof fetch = async (input, init) => { + const request = input instanceof Request ? input : new Request(input, init); + calls.push(request); + + const body = JSON.stringify([{ id: 1, name: "flow-1" }]); + return new Response(body, { + status: 200, + headers: { "content-type": "application/json" } + }); + }; + return { fetchFn, calls }; +}; + +describe("NexlaClient", () => { + it("injects authorization and accept headers", async () => { + const { fetchFn, calls } = createMockFetch(); + + const client = new NexlaClient({ + accessToken: "access-token", + baseUrl: "https://example.com", + fetch: fetchFn, + userAgent: "nexla-sdk-test" + }); + + await client.request("get", "/flows"); + + expect(calls.length).toBe(1); + const request = calls[0]; + if (!request) throw new Error("No request captured"); + expect(request.headers.get("Authorization")).toBe("Bearer access-token"); + expect(request.headers.get("Accept")).toBe("application/vnd.nexla.api.v1+json"); + expect(request.headers.get("User-Agent")).toBe("nexla-sdk-test"); + }); +}); diff --git a/packages/ts-sdk/tests/coverage-branches.test.ts b/packages/ts-sdk/tests/coverage-branches.test.ts new file mode 100644 index 0000000..4f7b4de --- /dev/null +++ b/packages/ts-sdk/tests/coverage-branches.test.ts @@ -0,0 +1,57 @@ +import { describe, expect, it } from "vitest"; +import { createFetchWithRetry } from "../src/client/http.js"; +import { ServiceKeyAuthProvider } from "../src/auth/service-key.js"; +import { NexlaClient } from "../src/client/nexla-client.js"; + +const createResponse = (status: number, contentType = "application/json") => + new Response("{}", { status, headers: { "content-type": contentType } }); + +describe("branch coverage extras", () => { + it("does not retry on non-retryable status", async () => { + let calls = 0; + const baseFetch: typeof fetch = async () => { + calls += 1; + return createResponse(400); + }; + + const fetchWithRetry = createFetchWithRetry(baseFetch, { maxRetries: 2, backoffMs: 1, maxBackoffMs: 5 }); + const response = await fetchWithRetry(new Request("https://example.com")); + + expect(response.status).toBe(400); + expect(calls).toBe(1); + }); + + it("handles non-json responses in service key auth", async () => { + const fetchFn: typeof fetch = async (input, init) => { + const request = input instanceof Request ? input : new Request(input, init); + if (request.url.endsWith("/token")) { + return createResponse(200, "text/plain"); + } + return createResponse(200); + }; + + const provider = new ServiceKeyAuthProvider({ + serviceKey: "svc", + baseUrl: "https://example.com", + apiVersion: "v1", + tokenRefreshMargin: 1, + fetchFn + }); + + await expect(provider.getAccessToken()).rejects.toThrow(); + }); + + it("defaults error message when response body is not JSON", async () => { + const fetchFn: typeof fetch = async () => { + return new Response("no-json", { status: 500, headers: { "content-type": "text/plain" } }); + }; + + const client = new NexlaClient({ + accessToken: "access-token", + baseUrl: "https://example.com", + fetch: fetchFn + }); + + await expect(client.request("get", "/flows")).rejects.toThrow(); + }); +}); diff --git a/packages/ts-sdk/tests/coverage-nexla-client.test.ts b/packages/ts-sdk/tests/coverage-nexla-client.test.ts new file mode 100644 index 0000000..d612a3d --- /dev/null +++ b/packages/ts-sdk/tests/coverage-nexla-client.test.ts @@ -0,0 +1,84 @@ +import { describe, expect, it } from "vitest"; +import { NexlaClient } from "../src/client/nexla-client.js"; +import { AuthenticationError, RateLimitError, ServerError } from "../src/errors.js"; + +const createFetchSequence = (responses: Array<{ status: number; body?: unknown; headers?: Record }>) => { + const calls: Request[] = []; + const fetchFn: typeof fetch = async (input, init) => { + const request = input instanceof Request ? input : new Request(input, init); + calls.push(request); + + const url = request.url; + if (url.endsWith("/token")) { + const body = JSON.stringify({ access_token: "token-1", expires_in: 3600 }); + return new Response(body, { status: 200, headers: { "content-type": "application/json" } }); + } + + const next = responses.shift() ?? { status: 200, body: {} }; + return new Response(JSON.stringify(next.body ?? {}), { + status: next.status, + headers: { "content-type": "application/json", ...(next.headers ?? {}) } + }); + }; + return { fetchFn, calls }; +}; + +describe("NexlaClient internal flows", () => { + it("refreshes auth on 401 for service key", async () => { + const { fetchFn, calls } = createFetchSequence([ + { status: 401, body: { message: "expired" } }, + { status: 200, body: [{ id: 1 }] } + ]); + + const client = new NexlaClient({ + serviceKey: "svc", + baseUrl: "https://example.com", + fetch: fetchFn + }); + + const data = await client.request("get", "/flows"); + expect(Array.isArray(data)).toBe(true); + expect(calls.length).toBeGreaterThanOrEqual(2); + }); + + it("maps rate limit errors with retry-after", async () => { + const { fetchFn } = createFetchSequence([ + { status: 429, body: { message: "slow down" }, headers: { "retry-after": "5" } } + ]); + + const client = new NexlaClient({ + serviceKey: "svc", + baseUrl: "https://example.com", + fetch: fetchFn, + retry: { maxRetries: 0 } + }); + + await expect(client.request("get", "/flows")).rejects.toBeInstanceOf(RateLimitError); + }); + + it("maps 500 errors to ServerError", async () => { + const { fetchFn } = createFetchSequence([ + { status: 500, body: { error: "boom" } } + ]); + + const client = new NexlaClient({ + serviceKey: "svc", + baseUrl: "https://example.com", + fetch: fetchFn + }); + + await expect(client.request("get", "/flows")).rejects.toBeInstanceOf(ServerError); + }); + + it("throws AuthenticationError when using expired access token", async () => { + const { fetchFn } = createFetchSequence([{ status: 401, body: { message: "invalid" } }]); + + const client = new NexlaClient({ + accessToken: "access", + baseUrl: "https://example.com", + fetch: fetchFn + }); + + await expect(client.request("get", "/flows")).rejects.toBeInstanceOf(AuthenticationError); + }); +}); diff --git a/packages/ts-sdk/tests/errors.test.ts b/packages/ts-sdk/tests/errors.test.ts new file mode 100644 index 0000000..a34ef31 --- /dev/null +++ b/packages/ts-sdk/tests/errors.test.ts @@ -0,0 +1,17 @@ +import { describe, expect, it } from "vitest"; +import { NexlaError, RateLimitError } from "../src/errors.js"; + +describe("NexlaError", () => { + it("captures summary metadata", () => { + const err = new NexlaError("boom", { statusCode: 400, response: { message: "bad" } }); + const summary = err.getErrorSummary(); + + expect(summary.message).toBe("boom"); + expect(summary.status_code).toBe(400); + }); + + it("includes retryAfter for rate limit errors", () => { + const err = new RateLimitError("slow down", { retryAfter: 30 }); + expect(err.retryAfter).toBe(30); + }); +}); diff --git a/packages/ts-sdk/tests/http-retry-headers.test.ts b/packages/ts-sdk/tests/http-retry-headers.test.ts new file mode 100644 index 0000000..b066ab9 --- /dev/null +++ b/packages/ts-sdk/tests/http-retry-headers.test.ts @@ -0,0 +1,28 @@ +import { describe, expect, it } from "vitest"; +import { createFetchWithRetry } from "../src/client/http.js"; + +const createResponse = (status: number, retryAfter?: string) => + new Response("{}", { + status, + headers: { + "content-type": "application/json", + ...(retryAfter ? { "retry-after": retryAfter } : {}) + } + }); + +describe("createFetchWithRetry", () => { + it("respects retry-after header", async () => { + let calls = 0; + const baseFetch: typeof fetch = async () => { + calls += 1; + if (calls === 1) return createResponse(429, "0"); + return createResponse(200); + }; + + const fetchWithRetry = createFetchWithRetry(baseFetch, { maxRetries: 2, backoffMs: 1, maxBackoffMs: 5 }); + const response = await fetchWithRetry(new Request("https://example.com")); + + expect(response.status).toBe(200); + expect(calls).toBe(2); + }); +}); diff --git a/packages/ts-sdk/tests/integration/live-api.test.ts b/packages/ts-sdk/tests/integration/live-api.test.ts new file mode 100644 index 0000000..8b1acdd --- /dev/null +++ b/packages/ts-sdk/tests/integration/live-api.test.ts @@ -0,0 +1,44 @@ +import { describe, expect, it } from "vitest"; +import { NexlaClient } from "../../src/client/nexla-client.js"; + +const serviceKey = process.env.NEXLA_SERVICE_KEY; +const accessToken = process.env.NEXLA_ACCESS_TOKEN; +const hasAuth = Boolean(serviceKey || accessToken); + +const describeIf = hasAuth ? describe : describe.skip; + +const createClient = (): NexlaClient => { + const options: { + serviceKey?: string; + accessToken?: string; + baseUrl?: string; + } = {}; + + if (serviceKey) { + options.serviceKey = serviceKey; + } else if (accessToken) { + options.accessToken = accessToken; + } + + if (process.env.NEXLA_API_URL) { + options.baseUrl = process.env.NEXLA_API_URL; + } + + return new NexlaClient(options); +}; + +describeIf("Live API integration", () => { + it("can list flows with real credentials", async () => { + const client = createClient(); + const result = await client.flows.list(); + + expect(result).toBeDefined(); + }); + + it("can perform typed raw GET calls", async () => { + const client = createClient(); + const { data } = await client.raw.GET("/flows"); + + expect(data).toBeDefined(); + }); +}); diff --git a/packages/ts-sdk/tests/logout.test.ts b/packages/ts-sdk/tests/logout.test.ts new file mode 100644 index 0000000..f372052 --- /dev/null +++ b/packages/ts-sdk/tests/logout.test.ts @@ -0,0 +1,40 @@ +import { describe, expect, it } from "vitest"; +import { ServiceKeyAuthProvider } from "../src/auth/service-key.js"; + +const createMockFetch = () => { + const calls: Request[] = []; + const fetchFn: typeof fetch = async (input, init) => { + const request = input instanceof Request ? input : new Request(input, init); + calls.push(request); + + if (request.url.endsWith("/token")) { + return new Response(JSON.stringify({ access_token: "token-1", expires_in: 1 }), { + status: 200, + headers: { "content-type": "application/json" } + }); + } + + return new Response("", { status: 204 }); + }; + return { fetchFn, calls }; +}; + +describe("ServiceKeyAuthProvider logout", () => { + it("calls /token/logout and clears token", async () => { + const { fetchFn, calls } = createMockFetch(); + + const provider = new ServiceKeyAuthProvider({ + serviceKey: "svc-key", + baseUrl: "https://example.com", + apiVersion: "v1", + tokenRefreshMargin: 1, + fetchFn + }); + + await provider.getAccessToken(); + await provider.logout(); + + const logoutCall = calls.find((req) => req.url.endsWith("/token/logout")); + expect(logoutCall).toBeTruthy(); + }); +}); diff --git a/packages/ts-sdk/tests/optional-auth.test.ts b/packages/ts-sdk/tests/optional-auth.test.ts new file mode 100644 index 0000000..2f7f0f7 --- /dev/null +++ b/packages/ts-sdk/tests/optional-auth.test.ts @@ -0,0 +1,32 @@ +import { describe, expect, it } from "vitest"; +import { NexlaClient } from "../src/client/nexla-client.js"; + +const createMockFetch = () => { + const calls: Request[] = []; + const fetchFn: typeof fetch = async (input, init) => { + const request = input instanceof Request ? input : new Request(input, init); + calls.push(request); + return new Response(JSON.stringify({ ok: true }), { + status: 200, + headers: { "content-type": "application/json" } + }); + }; + return { fetchFn, calls }; +}; + +describe("NexlaClient auth bypass header", () => { + it("skips auth when x-nexla-skip-auth is set", async () => { + const { fetchFn, calls } = createMockFetch(); + const client = new NexlaClient({ + accessToken: "access-token", + baseUrl: "https://example.com", + fetch: fetchFn + }); + + await client.request("get", "/flows", { headers: { "x-nexla-skip-auth": "true" } }); + + const request = calls[0]; + if (!request) throw new Error("No request captured"); + expect(request.headers.get("Authorization")).toBeNull(); + }); +}); diff --git a/packages/ts-sdk/tests/request-errors.test.ts b/packages/ts-sdk/tests/request-errors.test.ts new file mode 100644 index 0000000..2f407b8 --- /dev/null +++ b/packages/ts-sdk/tests/request-errors.test.ts @@ -0,0 +1,26 @@ +import { describe, expect, it } from "vitest"; +import { NexlaClient } from "../src/client/nexla-client.js"; +import { NotFoundError } from "../src/errors.js"; + +const createMockFetch = (status: number, body: unknown) => { + const fetchFn: typeof fetch = async () => { + return new Response(JSON.stringify(body), { + status, + headers: { "content-type": "application/json" } + }); + }; + return fetchFn; +}; + +describe("NexlaClient error mapping", () => { + it("maps 404 to NotFoundError", async () => { + const fetchFn = createMockFetch(404, { message: "not found" }); + const client = new NexlaClient({ + accessToken: "access-token", + baseUrl: "https://example.com", + fetch: fetchFn + }); + + await expect(client.request("get", "/flows")).rejects.toBeInstanceOf(NotFoundError); + }); +}); diff --git a/packages/ts-sdk/tests/resource-client-actions.test.ts b/packages/ts-sdk/tests/resource-client-actions.test.ts new file mode 100644 index 0000000..c27d0ed --- /dev/null +++ b/packages/ts-sdk/tests/resource-client-actions.test.ts @@ -0,0 +1,23 @@ +import { describe, expect, it } from "vitest"; +import { ResourceClient } from "../src/resources/resource-client.js"; +import type { NexlaClient } from "../src/client/nexla-client.js"; + +class FakeClient { + public calls: Array<{ method: string; path: string }> = []; + + async request(method: string, path: string): Promise { + this.calls.push({ method, path }); + return { ok: true }; + } +} + +describe("ResourceClient actions", () => { + it("calls create on async_tasks", async () => { + const fake = new FakeClient(); + const resource = new ResourceClient(fake as unknown as NexlaClient, "async_tasks"); + + await resource.create({}); + + expect(fake.calls[0]).toEqual({ method: "post", path: "/async_tasks" }); + }); +}); diff --git a/packages/ts-sdk/tests/resource-client-error.test.ts b/packages/ts-sdk/tests/resource-client-error.test.ts new file mode 100644 index 0000000..cfefb50 --- /dev/null +++ b/packages/ts-sdk/tests/resource-client-error.test.ts @@ -0,0 +1,26 @@ +import { describe, expect, it } from "vitest"; +import { NexlaClient } from "../src/client/nexla-client.js"; + +const createMockFetch = () => { + const fetchFn: typeof fetch = async () => { + return new Response(JSON.stringify({ ok: true }), { + status: 200, + headers: { "content-type": "application/json" } + }); + }; + return fetchFn; +}; + +describe("ResourceClient errors", () => { + it("skips resources without OpenAPI operations", () => { + const fetchFn = createMockFetch(); + const client = new NexlaClient({ + accessToken: "access-token", + baseUrl: "https://example.com", + fetch: fetchFn + }); + + const apiKeys = (client as { api_keys?: unknown }).api_keys; + expect(apiKeys).toBeUndefined(); + }); +}); diff --git a/packages/ts-sdk/tests/resource-client.test.ts b/packages/ts-sdk/tests/resource-client.test.ts new file mode 100644 index 0000000..6cd5ef9 --- /dev/null +++ b/packages/ts-sdk/tests/resource-client.test.ts @@ -0,0 +1,54 @@ +import { describe, expect, it } from "vitest"; +import { NexlaClient } from "../src/client/nexla-client.js"; + +const createMockFetch = () => { + const calls: Request[] = []; + const fetchFn: typeof fetch = async (input, init) => { + const request = input instanceof Request ? input : new Request(input, init); + calls.push(request); + + const body = JSON.stringify([{ id: 1 }]); + return new Response(body, { + status: 200, + headers: { "content-type": "application/json" } + }); + }; + return { fetchFn, calls }; +}; + +describe("ResourceClient", () => { + it("calls list on flows resource", async () => { + const { fetchFn, calls } = createMockFetch(); + + const client = new NexlaClient({ + accessToken: "access-token", + baseUrl: "https://example.com", + fetch: fetchFn + }); + + await client.flows.list({ params: { query: { page: 1 } } }); + + const request = calls[0]; + if (!request) throw new Error("No request captured"); + expect(request.url).toContain("/flows"); + }); + + it("calls operationId method on flows resource", async () => { + const { fetchFn, calls } = createMockFetch(); + + const client = new NexlaClient({ + accessToken: "access-token", + baseUrl: "https://example.com", + fetch: fetchFn + }); + + await client.flows.flow_activate_with_flow_id({ + params: { path: { flow_id: 123, all: 1 } } + }); + + const request = calls[0]; + if (!request) throw new Error("No request captured"); + expect(request.url).toContain("/flows/123/activate"); + expect(request.method).toBe("PUT"); + }); +}); diff --git a/packages/ts-sdk/tests/resources/credentials.test.ts b/packages/ts-sdk/tests/resources/credentials.test.ts new file mode 100644 index 0000000..285060b --- /dev/null +++ b/packages/ts-sdk/tests/resources/credentials.test.ts @@ -0,0 +1,273 @@ +/** + * Unit tests for Credentials resource operations. + */ + +import { describe, expect, it, beforeEach } from "vitest"; +import { createMockFetch } from "../utils/mock-fetch.js"; +import { createCredential, createCredentialList, resetIdCounter } from "../utils/factories/index.js"; +import { NexlaClient } from "../../src/client/nexla-client.js"; + +describe("CredentialsResource", () => { + beforeEach(() => { + resetIdCounter(); + }); + + describe("list operations", () => { + it("fetches all credentials", async () => { + const credentials = createCredentialList(3); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: credentials }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.credentials.list(); + + expect(result).toEqual(credentials); + expect(calls.length).toBe(2); + expect(calls[1]?.url).toContain("/data_credentials"); + }); + + it("filters by credentials_type", async () => { + const credentials = createCredentialList(2, { credentials_type: "s3" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: credentials }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.credentials.list({ params: { query: { credentials_type: "s3" } } }); + + const requestUrl = calls[1]?.url ?? ""; + expect(requestUrl).toContain("credentials_type=s3"); + }); + + it("handles empty results", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.credentials.list(); + + expect(result).toEqual([]); + }); + }); + + describe("get operations", () => { + it("fetches credential by ID", async () => { + const credential = createCredential({ id: 123 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: credential }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.credentials.get({ + params: { path: { credential_id: 123 } }, + }); + + expect(result).toEqual(credential); + expect(calls[1]?.url).toContain("/data_credentials/123"); + }); + + it("handles credential not found (404)", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 404, body: { message: "Credential not found" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.credentials.get({ params: { path: { credential_id: 99999 } } }) + ).rejects.toThrow(); + }); + }); + + describe("create operations", () => { + it("creates a new credential", async () => { + const newCredential = createCredential({ name: "My New Credential" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 201, body: newCredential }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.credentials.create({ + body: { + name: "My New Credential", + credentials_type: "postgres", + config: { host: "localhost" }, + }, + }); + + expect(result).toEqual(newCredential); + expect(calls[1]?.method).toBe("POST"); + }); + }); + + describe("update operations", () => { + it("updates existing credential", async () => { + const updatedCredential = createCredential({ id: 123, name: "Updated Credential" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedCredential }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.credentials.update({ + params: { path: { credential_id: 123 } }, + body: { name: "Updated Credential" }, + }); + + expect(result).toEqual(updatedCredential); + expect(calls[1]?.method).toBe("PUT"); + expect(calls[1]?.url).toContain("/data_credentials/123"); + }); + }); + + describe("delete operations", () => { + it("deletes credential by ID", async () => { + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: { status: "deleted" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.credentials.delete({ params: { path: { credential_id: 123 } } }); + + expect(calls[1]?.method).toBe("DELETE"); + expect(calls[1]?.url).toContain("/data_credentials/123"); + }); + }); + + describe("probe operations", () => { + it("probes credential validity", async () => { + const probeResult = { status: "ok", connection_verified: true }; + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: probeResult }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.credentials.data_credential_probe({ + params: { path: { credential_id: 123 } }, + }); + + expect(result).toEqual(probeResult); + expect(calls[1]?.url).toContain("/data_credentials/123/probe"); + }); + + it("returns tree structure", async () => { + const treeResult = { + status: "ok", + object: { tree: [{ name: "folder1", type: "folder" }] }, + }; + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: treeResult }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.credentials.preview_storage_structure({ + params: { path: { credential_id: 123 } }, + }); + + expect(result).toEqual(treeResult); + expect(calls[1]?.url).toContain("/data_credentials/123/probe/tree"); + }); + }); + + describe("verification status", () => { + it("returns VERIFIED status", async () => { + const credential = createCredential({ verified_status: "VERIFIED" }); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: credential }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.credentials.get({ + params: { path: { credential_id: 123 } }, + }); + + expect(result.verified_status).toBe("VERIFIED"); + }); + + it("returns FAILED status", async () => { + const credential = createCredential({ verified_status: "FAILED" }); + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: credential }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.credentials.get({ + params: { path: { credential_id: 123 } }, + }); + + expect(result.verified_status).toBe("FAILED"); + }); + }); +}); diff --git a/packages/ts-sdk/tests/resources/destinations.test.ts b/packages/ts-sdk/tests/resources/destinations.test.ts new file mode 100644 index 0000000..89d4aff --- /dev/null +++ b/packages/ts-sdk/tests/resources/destinations.test.ts @@ -0,0 +1,245 @@ +/** + * Unit tests for Destinations resource operations. + */ + +import { describe, expect, it, beforeEach } from "vitest"; +import { createMockFetch } from "../utils/mock-fetch.js"; +import { createDestination, createDestinationList, resetIdCounter } from "../utils/factories/index.js"; +import { NexlaClient } from "../../src/client/nexla-client.js"; + +describe("DestinationsResource", () => { + beforeEach(() => { + resetIdCounter(); + }); + + describe("list operations", () => { + it("fetches all destinations", async () => { + const destinations = createDestinationList(3); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: destinations }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.destinations.list(); + + expect(result).toEqual(destinations); + expect(calls.length).toBe(2); + expect(calls[1]?.url).toContain("/data_sinks"); + }); + + it("passes query parameters correctly", async () => { + const destinations = createDestinationList(2); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: destinations }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.destinations.list({ params: { query: { page: 2, per_page: 10 } } }); + + const requestUrl = calls[1]?.url ?? ""; + expect(requestUrl).toContain("page=2"); + expect(requestUrl).toContain("per_page=10"); + }); + + it("handles empty results", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.destinations.list(); + + expect(result).toEqual([]); + }); + }); + + describe("get operations", () => { + it("fetches destination by ID", async () => { + const destination = createDestination({ id: 123 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: destination }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.destinations.get({ params: { path: { sink_id: 123 } } }); + + expect(result).toEqual(destination); + expect(calls[1]?.url).toContain("/data_sinks/123"); + }); + + it("handles destination not found (404)", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 404, body: { message: "Destination not found" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.destinations.get({ params: { path: { sink_id: 99999 } } }) + ).rejects.toThrow(); + }); + }); + + describe("create operations", () => { + it("creates a new destination", async () => { + const newDestination = createDestination({ name: "My New Destination" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 201, body: newDestination }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.destinations.create({ + body: { name: "My New Destination", connector_id: 1, data_credentials_id: 1 }, + }); + + expect(result).toEqual(newDestination); + expect(calls[1]?.method).toBe("POST"); + }); + }); + + describe("update operations", () => { + it("updates existing destination", async () => { + const updatedDestination = createDestination({ id: 123, name: "Updated Destination" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedDestination }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.destinations.update({ + params: { path: { sink_id: 123 } }, + body: { name: "Updated Destination" }, + }); + + expect(result).toEqual(updatedDestination); + expect(calls[1]?.method).toBe("PUT"); + expect(calls[1]?.url).toContain("/data_sinks/123"); + }); + }); + + describe("delete operations", () => { + it("deletes destination by ID", async () => { + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: { status: "deleted" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.destinations.delete({ params: { path: { sink_id: 123 } } }); + + expect(calls[1]?.method).toBe("DELETE"); + expect(calls[1]?.url).toContain("/data_sinks/123"); + }); + }); + + describe("lifecycle operations", () => { + it("activates a paused destination", async () => { + const activatedDestination = createDestination({ id: 123, status: "ACTIVE" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: activatedDestination }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.destinations.activate_data_sink({ + params: { path: { sink_id: 123 } }, + }); + + expect(result).toEqual(activatedDestination); + expect(calls[1]?.url).toContain("/data_sinks/123/activate"); + }); + + it("pauses an active destination", async () => { + const pausedDestination = createDestination({ id: 123, status: "PAUSED" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: pausedDestination }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.destinations.pause_data_sink({ + params: { path: { sink_id: 123 } }, + }); + + expect(result).toEqual(pausedDestination); + expect(calls[1]?.url).toContain("/data_sinks/123/pause"); + }); + + it("copies a destination", async () => { + const copiedDestination = createDestination({ id: 456, name: "Copy of Destination" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: copiedDestination }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.destinations.copy_data_sink_source({ + params: { path: { sink_id: 123 } }, + }); + + expect(result).toEqual(copiedDestination); + expect(calls[1]?.url).toContain("/data_sinks/123/copy"); + expect(calls[1]?.method).toBe("POST"); + }); + }); +}); diff --git a/packages/ts-sdk/tests/resources/flows.test.ts b/packages/ts-sdk/tests/resources/flows.test.ts new file mode 100644 index 0000000..4218360 --- /dev/null +++ b/packages/ts-sdk/tests/resources/flows.test.ts @@ -0,0 +1,236 @@ +/** + * Unit tests for Flows resource operations. + */ + +import { describe, expect, it, beforeEach } from "vitest"; +import { createMockFetch } from "../utils/mock-fetch.js"; +import { createFlow, createFlowList, resetIdCounter } from "../utils/factories/index.js"; +import { NexlaClient } from "../../src/client/nexla-client.js"; + +describe("FlowsResource", () => { + beforeEach(() => { + resetIdCounter(); + }); + + describe("list operations", () => { + it("fetches all flows", async () => { + const flows = createFlowList(3); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: flows }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.flows.list(); + + expect(result).toEqual(flows); + expect(calls.length).toBe(2); + expect(calls[1]?.url).toContain("/flows"); + }); + + it("passes query parameters correctly", async () => { + const flows = createFlowList(2); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: flows }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.flows.list({ params: { query: { page: 2, per_page: 10 } } }); + + const requestUrl = calls[1]?.url ?? ""; + expect(requestUrl).toContain("page=2"); + expect(requestUrl).toContain("per_page=10"); + }); + + it("handles empty results", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.flows.list(); + + expect(result).toEqual([]); + }); + }); + + describe("get operations", () => { + it("fetches flow by ID", async () => { + const flow = createFlow({ id: 123 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: flow }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.flows.get_flow_by_id({ params: { path: { flow_id: 123 } } }); + + expect(result).toEqual(flow); + expect(calls[1]?.url).toContain("/flows/123"); + }); + + it("fetches flow using get alias", async () => { + const flow = createFlow({ id: 456 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: flow }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.flows.get({ params: { path: { flow_id: 456 } } }); + + expect(result).toEqual(flow); + expect(calls[1]?.url).toContain("/flows/456"); + }); + + it("handles flow not found (404)", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 404, body: { message: "Flow not found" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.flows.get_flow_by_id({ params: { path: { flow_id: 99999 } } }) + ).rejects.toThrow(); + }); + }); + + describe("delete operations", () => { + it("deletes flow by ID", async () => { + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: { status: "deleted" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.flows.delete_flow({ params: { path: { flow_id: 123 } } }); + + expect(calls[1]?.method).toBe("DELETE"); + expect(calls[1]?.url).toContain("/flows/123"); + }); + + it("deletes flow using delete alias", async () => { + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: { status: "deleted" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.flows.delete({ params: { path: { flow_id: 456 } } }); + + expect(calls[1]?.method).toBe("DELETE"); + expect(calls[1]?.url).toContain("/flows/456"); + }); + }); + + describe("lifecycle operations", () => { + it("activates a paused flow", async () => { + const activatedFlow = createFlow({ id: 123, status: "ACTIVE" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: activatedFlow }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.flows.flow_activate_with_flow_id({ + params: { path: { flow_id: 123 } }, + }); + + expect(result).toEqual(activatedFlow); + expect(calls[1]?.url).toContain("/flows/123/activate"); + expect(calls[1]?.method).toBe("PUT"); + }); + + it("pauses an active flow", async () => { + const pausedFlow = createFlow({ id: 123, status: "PAUSED" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: pausedFlow }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.flows.flow_pause_with_flow_id({ + params: { path: { flow_id: 123 } }, + }); + + expect(result).toEqual(pausedFlow); + expect(calls[1]?.url).toContain("/flows/123/pause"); + expect(calls[1]?.method).toBe("PUT"); + }); + + it("copies a flow", async () => { + const copiedFlow = createFlow({ id: 456, name: "Copy of Flow" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: copiedFlow }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.flows.flow_copy_with_flow_id({ + params: { path: { flow_id: 123 } }, + }); + + expect(result).toEqual(copiedFlow); + expect(calls[1]?.url).toContain("/flows/123/copy"); + expect(calls[1]?.method).toBe("POST"); + }); + }); +}); diff --git a/packages/ts-sdk/tests/resources/nexsets.test.ts b/packages/ts-sdk/tests/resources/nexsets.test.ts new file mode 100644 index 0000000..6e23e47 --- /dev/null +++ b/packages/ts-sdk/tests/resources/nexsets.test.ts @@ -0,0 +1,245 @@ +/** + * Unit tests for Nexsets resource operations. + */ + +import { describe, expect, it, beforeEach } from "vitest"; +import { createMockFetch } from "../utils/mock-fetch.js"; +import { createNexset, createNexsetList, resetIdCounter } from "../utils/factories/index.js"; +import { NexlaClient } from "../../src/client/nexla-client.js"; + +describe("NexsetsResource", () => { + beforeEach(() => { + resetIdCounter(); + }); + + describe("list operations", () => { + it("fetches all nexsets", async () => { + const nexsets = createNexsetList(3); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: nexsets }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.nexsets.list(); + + expect(result).toEqual(nexsets); + expect(calls.length).toBe(2); + expect(calls[1]?.url).toContain("/data_sets"); + }); + + it("passes query parameters correctly", async () => { + const nexsets = createNexsetList(2); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: nexsets }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.nexsets.list({ params: { query: { page: 2, per_page: 10 } } }); + + const requestUrl = calls[1]?.url ?? ""; + expect(requestUrl).toContain("page=2"); + expect(requestUrl).toContain("per_page=10"); + }); + + it("handles empty results", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.nexsets.list(); + + expect(result).toEqual([]); + }); + }); + + describe("get operations", () => { + it("fetches nexset by ID", async () => { + const nexset = createNexset({ id: 123 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: nexset }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.nexsets.get({ params: { path: { set_id: 123 } } }); + + expect(result).toEqual(nexset); + expect(calls[1]?.url).toContain("/data_sets/123"); + }); + + it("handles nexset not found (404)", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 404, body: { message: "Nexset not found" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.nexsets.get({ params: { path: { set_id: 99999 } } }) + ).rejects.toThrow(); + }); + }); + + describe("create operations", () => { + it("creates a new nexset", async () => { + const newNexset = createNexset({ name: "My New Nexset" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 201, body: newNexset }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.nexsets.create({ + body: { name: "My New Nexset", data_source_id: 1 }, + }); + + expect(result).toEqual(newNexset); + expect(calls[1]?.method).toBe("POST"); + }); + }); + + describe("update operations", () => { + it("updates existing nexset", async () => { + const updatedNexset = createNexset({ id: 123, name: "Updated Nexset" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedNexset }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.nexsets.update({ + params: { path: { set_id: 123 } }, + body: { name: "Updated Nexset" }, + }); + + expect(result).toEqual(updatedNexset); + expect(calls[1]?.method).toBe("PUT"); + expect(calls[1]?.url).toContain("/data_sets/123"); + }); + }); + + describe("delete operations", () => { + it("deletes nexset by ID", async () => { + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: { status: "deleted" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.nexsets.delete({ params: { path: { set_id: 123 } } }); + + expect(calls[1]?.method).toBe("DELETE"); + expect(calls[1]?.url).toContain("/data_sets/123"); + }); + }); + + describe("lifecycle operations", () => { + it("activates a paused nexset", async () => { + const activatedNexset = createNexset({ id: 123, status: "ACTIVE" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: activatedNexset }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.nexsets.activate_nexset({ + params: { path: { set_id: 123 } }, + }); + + expect(result).toEqual(activatedNexset); + expect(calls[1]?.url).toContain("/data_sets/123/activate"); + }); + + it("pauses an active nexset", async () => { + const pausedNexset = createNexset({ id: 123, status: "PAUSED" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: pausedNexset }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.nexsets.pause_nexset({ + params: { path: { set_id: 123 } }, + }); + + expect(result).toEqual(pausedNexset); + expect(calls[1]?.url).toContain("/data_sets/123/pause"); + }); + + it("copies a nexset", async () => { + const copiedNexset = createNexset({ id: 456, name: "Copy of Nexset" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: copiedNexset }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.nexsets.copy_nexset({ + params: { path: { set_id: 123 } }, + }); + + expect(result).toEqual(copiedNexset); + expect(calls[1]?.url).toContain("/data_sets/123/copy"); + expect(calls[1]?.method).toBe("POST"); + }); + }); +}); diff --git a/packages/ts-sdk/tests/resources/organizations.test.ts b/packages/ts-sdk/tests/resources/organizations.test.ts new file mode 100644 index 0000000..f83c339 --- /dev/null +++ b/packages/ts-sdk/tests/resources/organizations.test.ts @@ -0,0 +1,136 @@ +/** + * Unit tests for Organizations resource operations. + */ + +import { describe, expect, it, beforeEach } from "vitest"; +import { createMockFetch } from "../utils/mock-fetch.js"; +import { createOrganization, createOrganizationList, resetIdCounter } from "../utils/factories/index.js"; +import { NexlaClient } from "../../src/client/nexla-client.js"; + +describe("OrganizationsResource", () => { + beforeEach(() => { + resetIdCounter(); + }); + + describe("list operations", () => { + it("fetches all organizations", async () => { + const organizations = createOrganizationList(3); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: organizations }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.organizations.list(); + + expect(result).toEqual(organizations); + expect(calls.length).toBe(2); + expect(calls[1]?.url).toContain("/orgs"); + }); + + it("passes query parameters correctly", async () => { + const organizations = createOrganizationList(2); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: organizations }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.organizations.list({ params: { query: { page: 2, per_page: 10 } } }); + + const requestUrl = calls[1]?.url ?? ""; + expect(requestUrl).toContain("page=2"); + expect(requestUrl).toContain("per_page=10"); + }); + + it("handles empty results", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.organizations.list(); + + expect(result).toEqual([]); + }); + }); + + describe("get operations", () => { + it("fetches organization by ID", async () => { + const organization = createOrganization({ id: 123 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: organization }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.organizations.get({ params: { path: { org_id: 123 } } }); + + expect(result).toEqual(organization); + expect(calls[1]?.url).toContain("/orgs/123"); + }); + + it("handles organization not found (404)", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 404, body: { message: "Organization not found" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.organizations.get({ params: { path: { org_id: 99999 } } }) + ).rejects.toThrow(); + }); + }); + + describe("update operations", () => { + it("updates existing organization", async () => { + const updatedOrganization = createOrganization({ id: 123, name: "Updated Organization" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedOrganization }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.organizations.update({ + params: { path: { org_id: 123 } }, + body: { name: "Updated Organization" }, + }); + + expect(result).toEqual(updatedOrganization); + expect(calls[1]?.method).toBe("PUT"); + expect(calls[1]?.url).toContain("/orgs/123"); + }); + }); +}); diff --git a/packages/ts-sdk/tests/resources/projects.test.ts b/packages/ts-sdk/tests/resources/projects.test.ts new file mode 100644 index 0000000..4c25bf0 --- /dev/null +++ b/packages/ts-sdk/tests/resources/projects.test.ts @@ -0,0 +1,339 @@ +/** + * Unit tests for Projects resource operations. + */ + +import { describe, expect, it, beforeEach } from "vitest"; +import { createMockFetch } from "../utils/mock-fetch.js"; +import { createProject, createProjectList, resetIdCounter } from "../utils/factories/index.js"; +import { NexlaClient } from "../../src/client/nexla-client.js"; + +describe("ProjectsResource", () => { + beforeEach(() => { + resetIdCounter(); + }); + + describe("list operations", () => { + it("fetches all projects", async () => { + const projects = createProjectList(3); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: projects }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.projects.list(); + + expect(result).toEqual(projects); + expect(calls.length).toBe(2); + expect(calls[1]?.url).toContain("/projects"); + }); + + it("passes query parameters correctly", async () => { + const projects = createProjectList(2); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: projects }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.projects.list({ params: { query: { page: 2, per_page: 10 } } }); + + const requestUrl = calls[1]?.url ?? ""; + expect(requestUrl).toContain("page=2"); + expect(requestUrl).toContain("per_page=10"); + }); + + it("handles empty results", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.projects.list(); + + expect(result).toEqual([]); + }); + }); + + describe("get operations", () => { + it("fetches project by ID", async () => { + const project = createProject({ id: 123 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: project }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.projects.get({ params: { path: { project_id: 123 } } }); + + expect(result).toEqual(project); + expect(calls[1]?.url).toContain("/projects/123"); + }); + + it("handles project not found (404)", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 404, body: { message: "Project not found" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.projects.get({ params: { path: { project_id: 99999 } } }) + ).rejects.toThrow(); + }); + }); + + describe("create operations", () => { + it("creates a new project", async () => { + const newProject = createProject({ name: "My New Project" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 201, body: newProject }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.projects.create({ + body: { name: "My New Project" }, + }); + + expect(result).toEqual(newProject); + expect(calls[1]?.method).toBe("POST"); + expect(calls[1]?.url).toContain("/projects"); + }); + + it("creates a project with description", async () => { + const newProject = createProject({ + name: "Project with Description", + description: "A detailed project description" + }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 201, body: newProject }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.projects.create({ + body: { name: "Project with Description", description: "A detailed project description" }, + }); + + expect(result).toEqual(newProject); + expect(calls[1]?.method).toBe("POST"); + }); + }); + + describe("update operations", () => { + it("updates existing project", async () => { + const updatedProject = createProject({ id: 123, name: "Updated Project" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedProject }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.projects.update({ + params: { path: { project_id: 123 } }, + body: { name: "Updated Project" }, + }); + + expect(result).toEqual(updatedProject); + expect(calls[1]?.method).toBe("PUT"); + expect(calls[1]?.url).toContain("/projects/123"); + }); + + it("updates project description", async () => { + const updatedProject = createProject({ + id: 123, + description: "New description" + }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedProject }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.projects.update({ + params: { path: { project_id: 123 } }, + body: { description: "New description" }, + }); + + expect(result).toEqual(updatedProject); + expect(calls[1]?.method).toBe("PUT"); + }); + }); + + describe("delete operations", () => { + it("deletes project by ID", async () => { + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: { status: "deleted" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.projects.delete({ params: { path: { project_id: 123 } } }); + + expect(calls[1]?.method).toBe("DELETE"); + expect(calls[1]?.url).toContain("/projects/123"); + }); + + it("handles delete of non-existent project", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 404, body: { message: "Project not found" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.projects.delete({ params: { path: { project_id: 99999 } } }) + ).rejects.toThrow(); + }); + }); + + describe("flow operations", () => { + it("gets project flows", async () => { + const flows = [{ id: 1, name: "Flow 1" }, { id: 2, name: "Flow 2" }]; + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: flows }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.projects.get_project_flows({ + params: { path: { project_id: 123 } }, + }); + + expect(result).toEqual(flows); + expect(calls[1]?.url).toContain("/projects/123/flows"); + expect(calls[1]?.method).toBe("GET"); + }); + + it("adds flows to project", async () => { + const updatedProject = createProject({ id: 123 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedProject }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.projects.add_project_flows({ + params: { path: { project_id: 123 } }, + body: { flow_ids: [1, 2, 3] }, + }); + + expect(result).toEqual(updatedProject); + expect(calls[1]?.url).toContain("/projects/123/flows"); + expect(calls[1]?.method).toBe("PUT"); + }); + + it("removes flows from project", async () => { + const updatedProject = createProject({ id: 123 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedProject }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.projects.remove_project_flows({ + params: { path: { project_id: 123 } }, + body: { flow_ids: [1, 2] }, + }); + + expect(result).toEqual(updatedProject); + expect(calls[1]?.url).toContain("/projects/123/flows"); + expect(calls[1]?.method).toBe("DELETE"); + }); + + it("replaces project flows list", async () => { + const updatedProject = createProject({ id: 123 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedProject }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.projects.replace_project_flows({ + params: { path: { project_id: 123 } }, + body: { flow_ids: [4, 5, 6] }, + }); + + expect(result).toEqual(updatedProject); + expect(calls[1]?.url).toContain("/projects/123/flows"); + expect(calls[1]?.method).toBe("POST"); + }); + }); +}); diff --git a/packages/ts-sdk/tests/resources/sources.test.ts b/packages/ts-sdk/tests/resources/sources.test.ts new file mode 100644 index 0000000..960505a --- /dev/null +++ b/packages/ts-sdk/tests/resources/sources.test.ts @@ -0,0 +1,245 @@ +/** + * Unit tests for Sources resource operations. + */ + +import { describe, expect, it, beforeEach } from "vitest"; +import { createMockFetch } from "../utils/mock-fetch.js"; +import { createSource, createSourceList, resetIdCounter } from "../utils/factories/index.js"; +import { NexlaClient } from "../../src/client/nexla-client.js"; + +describe("SourcesResource", () => { + beforeEach(() => { + resetIdCounter(); + }); + + describe("list operations", () => { + it("fetches all sources", async () => { + const sources = createSourceList(3); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: sources }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.sources.list(); + + expect(result).toEqual(sources); + expect(calls.length).toBe(2); + expect(calls[1]?.url).toContain("/data_sources"); + }); + + it("passes query parameters correctly", async () => { + const sources = createSourceList(2); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: sources }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.sources.list({ params: { query: { page: 2, per_page: 10 } } }); + + const requestUrl = calls[1]?.url ?? ""; + expect(requestUrl).toContain("page=2"); + expect(requestUrl).toContain("per_page=10"); + }); + + it("handles empty results", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.sources.list(); + + expect(result).toEqual([]); + }); + }); + + describe("get operations", () => { + it("fetches source by ID", async () => { + const source = createSource({ id: 123 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: source }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.sources.get({ params: { path: { source_id: 123 } } }); + + expect(result).toEqual(source); + expect(calls[1]?.url).toContain("/data_sources/123"); + }); + + it("handles source not found (404)", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 404, body: { message: "Source not found" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.sources.get({ params: { path: { source_id: 99999 } } }) + ).rejects.toThrow(); + }); + }); + + describe("create operations", () => { + it("creates a new source", async () => { + const newSource = createSource({ name: "My New Source" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 201, body: newSource }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.sources.create({ + body: { name: "My New Source", connector_id: 1, data_credentials_id: 1 }, + }); + + expect(result).toEqual(newSource); + expect(calls[1]?.method).toBe("POST"); + }); + }); + + describe("update operations", () => { + it("updates existing source", async () => { + const updatedSource = createSource({ id: 123, name: "Updated Source" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedSource }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.sources.update({ + params: { path: { source_id: 123 } }, + body: { name: "Updated Source" }, + }); + + expect(result).toEqual(updatedSource); + expect(calls[1]?.method).toBe("PUT"); + expect(calls[1]?.url).toContain("/data_sources/123"); + }); + }); + + describe("delete operations", () => { + it("deletes source by ID", async () => { + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: { status: "deleted" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.sources.delete({ params: { path: { source_id: 123 } } }); + + expect(calls[1]?.method).toBe("DELETE"); + expect(calls[1]?.url).toContain("/data_sources/123"); + }); + }); + + describe("lifecycle operations", () => { + it("activates a paused source", async () => { + const activatedSource = createSource({ id: 123, status: "ACTIVE" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: activatedSource }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.sources.activate_source({ + params: { path: { source_id: 123 } }, + }); + + expect(result).toEqual(activatedSource); + expect(calls[1]?.url).toContain("/data_sources/123/activate"); + }); + + it("pauses an active source", async () => { + const pausedSource = createSource({ id: 123, status: "PAUSED" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: pausedSource }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.sources.pause_source({ + params: { path: { source_id: 123 } }, + }); + + expect(result).toEqual(pausedSource); + expect(calls[1]?.url).toContain("/data_sources/123/pause"); + }); + + it("copies a source", async () => { + const copiedSource = createSource({ id: 456, name: "Copy of Source" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: copiedSource }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.sources.copy_source({ + params: { path: { source_id: 123 } }, + }); + + expect(result).toEqual(copiedSource); + expect(calls[1]?.url).toContain("/data_sources/123/copy"); + expect(calls[1]?.method).toBe("POST"); + }); + }); +}); diff --git a/packages/ts-sdk/tests/resources/teams.test.ts b/packages/ts-sdk/tests/resources/teams.test.ts new file mode 100644 index 0000000..3ea194d --- /dev/null +++ b/packages/ts-sdk/tests/resources/teams.test.ts @@ -0,0 +1,251 @@ +/** + * Unit tests for Teams resource operations. + */ + +import { describe, expect, it, beforeEach } from "vitest"; +import { createMockFetch } from "../utils/mock-fetch.js"; +import { createTeam, createTeamList, createTeamMember, resetIdCounter } from "../utils/factories/index.js"; +import { NexlaClient } from "../../src/client/nexla-client.js"; + +describe("TeamsResource", () => { + beforeEach(() => { + resetIdCounter(); + }); + + describe("list operations", () => { + it("fetches all teams", async () => { + const teams = createTeamList(3); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: teams }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.teams.list(); + + expect(result).toEqual(teams); + expect(calls.length).toBe(2); + expect(calls[1]?.url).toContain("/teams"); + }); + + it("passes query parameters correctly", async () => { + const teams = createTeamList(2); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: teams }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.teams.list({ params: { query: { page: 2, per_page: 10 } } }); + + const requestUrl = calls[1]?.url ?? ""; + expect(requestUrl).toContain("page=2"); + expect(requestUrl).toContain("per_page=10"); + }); + + it("handles empty results", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.teams.list(); + + expect(result).toEqual([]); + }); + }); + + describe("get operations", () => { + it("fetches team by ID", async () => { + const team = createTeam({ id: 123 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: team }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.teams.get({ params: { path: { team_id: 123 } } }); + + expect(result).toEqual(team); + expect(calls[1]?.url).toContain("/teams/123"); + }); + + it("handles team not found (404)", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 404, body: { message: "Team not found" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.teams.get({ params: { path: { team_id: 99999 } } }) + ).rejects.toThrow(); + }); + }); + + describe("create operations", () => { + it("creates a new team with name", async () => { + const newTeam = createTeam({ name: "My New Team" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 201, body: newTeam }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.teams.create({ + body: { name: "My New Team" }, + }); + + expect(result).toEqual(newTeam); + expect(calls[1]?.method).toBe("POST"); + expect(calls[1]?.url).toContain("/teams"); + }); + + it("creates a team with members", async () => { + const members = [ + createTeamMember({ email: "member1@test.com", admin: false }), + createTeamMember({ email: "member2@test.com", admin: true }), + ]; + const newTeam = createTeam({ name: "Team with Members", members }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 201, body: newTeam }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.teams.create({ + body: { + name: "Team with Members", + members: [ + { email: "member1@test.com", admin: false }, + { email: "member2@test.com", admin: true }, + ], + }, + }); + + expect(result).toEqual(newTeam); + expect(calls[1]?.method).toBe("POST"); + }); + }); + + describe("update operations", () => { + it("updates existing team", async () => { + const updatedTeam = createTeam({ id: 123, name: "Updated Team" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedTeam }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.teams.update({ + params: { path: { team_id: 123 } }, + body: { name: "Updated Team" }, + }); + + expect(result).toEqual(updatedTeam); + expect(calls[1]?.method).toBe("PUT"); + expect(calls[1]?.url).toContain("/teams/123"); + }); + + it("updates team description", async () => { + const updatedTeam = createTeam({ id: 456, description: "New description" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedTeam }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.teams.update({ + params: { path: { team_id: 456 } }, + body: { description: "New description" }, + }); + + expect(result).toEqual(updatedTeam); + expect(calls[1]?.method).toBe("PUT"); + expect(calls[1]?.url).toContain("/teams/456"); + }); + }); + + describe("delete operations", () => { + it("deletes team by ID", async () => { + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: { status: "deleted" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.teams.delete({ params: { path: { team_id: 123 } } }); + + expect(calls[1]?.method).toBe("DELETE"); + expect(calls[1]?.url).toContain("/teams/123"); + }); + + it("handles delete of non-existent team", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 404, body: { message: "Team not found" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.teams.delete({ params: { path: { team_id: 99999 } } }) + ).rejects.toThrow(); + }); + }); +}); diff --git a/packages/ts-sdk/tests/resources/users.test.ts b/packages/ts-sdk/tests/resources/users.test.ts new file mode 100644 index 0000000..ff8d2eb --- /dev/null +++ b/packages/ts-sdk/tests/resources/users.test.ts @@ -0,0 +1,203 @@ +/** + * Unit tests for Users resource operations. + */ + +import { describe, expect, it, beforeEach } from "vitest"; +import { createMockFetch } from "../utils/mock-fetch.js"; +import { createUser, createUserList, resetIdCounter } from "../utils/factories/index.js"; +import { NexlaClient } from "../../src/client/nexla-client.js"; + +describe("UsersResource", () => { + beforeEach(() => { + resetIdCounter(); + }); + + describe("list operations", () => { + it("fetches all users", async () => { + const users = createUserList(3); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: users }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.users.list(); + + expect(result).toEqual(users); + expect(calls.length).toBe(2); + expect(calls[1]?.url).toContain("/users"); + }); + + it("passes query parameters correctly", async () => { + const users = createUserList(2); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: users }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await client.users.list({ params: { query: { page: 2, per_page: 10 } } }); + + const requestUrl = calls[1]?.url ?? ""; + expect(requestUrl).toContain("page=2"); + expect(requestUrl).toContain("per_page=10"); + }); + + it("handles empty results", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: [] }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.users.list(); + + expect(result).toEqual([]); + }); + }); + + describe("get operations", () => { + it("fetches user by ID", async () => { + const user = createUser({ id: 123 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: user }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.users.get({ params: { path: { user_id: 123 } } }); + + expect(result).toEqual(user); + expect(calls[1]?.url).toContain("/users/123"); + }); + + it("handles user not found (404)", async () => { + const { fetchFn } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 404, body: { message: "User not found" } }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + await expect( + client.users.get({ params: { path: { user_id: 99999 } } }) + ).rejects.toThrow(); + }); + + it("fetches user with expand parameter", async () => { + const user = createUser({ id: 123 }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: user }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.users.get_user_expand({ params: { path: { user_id: 123 } } }); + + expect(result).toEqual(user); + expect(calls[1]?.url).toContain("/users/123"); + expect(calls[1]?.url).toContain("expand=1"); + }); + }); + + describe("create operations", () => { + it("creates a new user", async () => { + const newUser = createUser({ email: "newuser@test.com", full_name: "New User" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 201, body: newUser }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.users.create({ + body: { email: "newuser@test.com", full_name: "New User" }, + }); + + expect(result).toEqual(newUser); + expect(calls[1]?.method).toBe("POST"); + expect(calls[1]?.url).toContain("/users"); + }); + }); + + describe("update operations", () => { + it("updates existing user", async () => { + const updatedUser = createUser({ id: 123, full_name: "Updated User" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedUser }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.users.update({ + params: { path: { user_id: 123 } }, + body: { full_name: "Updated User" }, + }); + + expect(result).toEqual(updatedUser); + expect(calls[1]?.method).toBe("PUT"); + expect(calls[1]?.url).toContain("/users/123"); + }); + + it("updates user role", async () => { + const updatedUser = createUser({ id: 123, role: "admin" }); + const { fetchFn, calls } = createMockFetch([ + { status: 200, body: { access_token: "token", expires_in: 7200 } }, + { status: 200, body: updatedUser }, + ]); + + const client = new NexlaClient({ + serviceKey: "test-key", + baseUrl: "https://test.nexla.io/nexla-api", + fetch: fetchFn, + }); + + const result = await client.users.update_user({ + params: { path: { user_id: 123 } }, + body: { role: "admin" }, + }); + + expect(result).toEqual(updatedUser); + expect(calls[1]?.method).toBe("PUT"); + expect(calls[1]?.url).toContain("/users/123"); + }); + }); +}); diff --git a/packages/ts-sdk/tests/retry.test.ts b/packages/ts-sdk/tests/retry.test.ts new file mode 100644 index 0000000..fcb2c40 --- /dev/null +++ b/packages/ts-sdk/tests/retry.test.ts @@ -0,0 +1,22 @@ +import { describe, expect, it } from "vitest"; +import { createFetchWithRetry } from "../src/client/http.js"; + +const createResponse = (status: number) => + new Response("{}", { status, headers: { "content-type": "application/json" } }); + +describe("createFetchWithRetry", () => { + it("retries on retryable status codes", async () => { + let calls = 0; + const baseFetch: typeof fetch = async () => { + calls += 1; + if (calls < 3) return createResponse(503); + return createResponse(200); + }; + + const fetchWithRetry = createFetchWithRetry(baseFetch, { maxRetries: 3, backoffMs: 1, maxBackoffMs: 5 }); + const response = await fetchWithRetry(new Request("https://example.com")); + + expect(response.status).toBe(200); + expect(calls).toBe(3); + }); +}); diff --git a/packages/ts-sdk/tests/utils/factories/accessors.ts b/packages/ts-sdk/tests/utils/factories/accessors.ts new file mode 100644 index 0000000..8109c6c --- /dev/null +++ b/packages/ts-sdk/tests/utils/factories/accessors.ts @@ -0,0 +1,81 @@ +/** + * Accessor test data factory. + */ + +import { generateId } from "./common.js"; + +export interface AccessorResponse { + id: number; + type: "USER" | "TEAM"; + access_roles: string[]; + org_id: number; + email?: string; + full_name?: string; + user_id?: number; + name?: string; + team_id?: number; +} + +export interface AccessorFactoryOptions { + id?: number; + type?: AccessorResponse["type"]; + access_roles?: string[]; + org_id?: number; + email?: string; + full_name?: string; + user_id?: number; + name?: string; + team_id?: number; +} + +/** + * Create a mock accessor response. + */ +export function createAccessor(options: AccessorFactoryOptions = {}): AccessorResponse { + const id = options.id ?? generateId(); + const type = options.type ?? "USER"; + + const base: AccessorResponse = { + id, + type, + access_roles: options.access_roles ?? ["collaborator"], + org_id: options.org_id ?? 1, + }; + + if (type === "USER") { + base.email = options.email ?? `user${id}@test.com`; + base.full_name = options.full_name ?? `Test User ${id}`; + base.user_id = options.user_id ?? id; + } else { + base.name = options.name ?? `Test Team ${id}`; + base.team_id = options.team_id ?? id; + } + + return base; +} + +/** + * Create a list of mock accessors. + */ +export function createAccessorList(count: number, options?: AccessorFactoryOptions): AccessorResponse[] { + return Array.from({ length: count }, (_, i) => { + // Alternate between USER and TEAM types + const type = i % 2 === 0 ? "USER" : "TEAM"; + return createAccessor({ ...options, type: options?.type ?? type }); + }); +} + +/** + * Create accessor request payload for adding/updating accessors. + */ +export function createAccessorRequest(options: { + id: number; + type: "USER" | "TEAM"; + access_roles?: string[]; +}): { id: number; type: string; access_roles: string[] } { + return { + id: options.id, + type: options.type, + access_roles: options.access_roles ?? ["collaborator"], + }; +} diff --git a/packages/ts-sdk/tests/utils/factories/common.ts b/packages/ts-sdk/tests/utils/factories/common.ts new file mode 100644 index 0000000..a378a54 --- /dev/null +++ b/packages/ts-sdk/tests/utils/factories/common.ts @@ -0,0 +1,61 @@ +/** + * Common factory utilities and shared types. + */ + +let idCounter = 1; + +/** + * Generate a unique ID for test data. + */ +export function generateId(): number { + return idCounter++; +} + +/** + * Reset the ID counter (useful between test suites). + */ +export function resetIdCounter(): void { + idCounter = 1; +} + +/** + * Create a mock owner object. + */ +export function createOwner(overrides: Partial = {}): Owner { + return { + id: generateId(), + full_name: `Test User ${idCounter}`, + email: `user${idCounter}@test.com`, + ...overrides, + }; +} + +/** + * Create a mock organization object. + */ +export function createOrg(overrides: Partial = {}): Org { + return { + id: generateId(), + name: `Test Org ${idCounter}`, + ...overrides, + }; +} + +/** + * Create a timestamp string. + */ +export function createTimestamp(): string { + return new Date().toISOString(); +} + +// Type definitions +export interface Owner { + id: number; + full_name: string; + email: string; +} + +export interface Org { + id: number; + name: string; +} diff --git a/packages/ts-sdk/tests/utils/factories/credentials.ts b/packages/ts-sdk/tests/utils/factories/credentials.ts new file mode 100644 index 0000000..b317d86 --- /dev/null +++ b/packages/ts-sdk/tests/utils/factories/credentials.ts @@ -0,0 +1,64 @@ +/** + * Credential test data factory. + */ + +import { createOwner, createOrg, createTimestamp, generateId, type Owner, type Org } from "./common.js"; + +export interface CredentialResponse { + id: number; + name: string; + description?: string; + credentials_type: string; + credentials_version: string; + verified_status: "VERIFIED" | "UNVERIFIED" | "FAILED"; + owner: Owner; + org: Org; + access_roles: string[]; + managed: boolean; + tags: string[]; + created_at: string; + updated_at: string; +} + +export interface CredentialFactoryOptions { + id?: number; + name?: string; + description?: string; + credentials_type?: string; + credentials_version?: string; + verified_status?: CredentialResponse["verified_status"]; + owner?: Partial; + org?: Partial; + access_roles?: string[]; + managed?: boolean; + tags?: string[]; +} + +/** + * Create a mock credential response. + */ +export function createCredential(options: CredentialFactoryOptions = {}): CredentialResponse { + const id = options.id ?? generateId(); + return { + id, + name: options.name ?? `Test Credential ${id}`, + description: options.description, + credentials_type: options.credentials_type ?? "postgres", + credentials_version: options.credentials_version ?? "1", + verified_status: options.verified_status ?? "VERIFIED", + owner: createOwner(options.owner), + org: createOrg(options.org), + access_roles: options.access_roles ?? ["owner"], + managed: options.managed ?? false, + tags: options.tags ?? [], + created_at: createTimestamp(), + updated_at: createTimestamp(), + }; +} + +/** + * Create a list of mock credentials. + */ +export function createCredentialList(count: number, options?: CredentialFactoryOptions): CredentialResponse[] { + return Array.from({ length: count }, () => createCredential(options)); +} diff --git a/packages/ts-sdk/tests/utils/factories/destinations.ts b/packages/ts-sdk/tests/utils/factories/destinations.ts new file mode 100644 index 0000000..dcaa48b --- /dev/null +++ b/packages/ts-sdk/tests/utils/factories/destinations.ts @@ -0,0 +1,70 @@ +/** + * Destination test data factory. + */ + +import { createOwner, createOrg, createTimestamp, generateId, type Owner, type Org } from "./common.js"; + +export interface DestinationResponse { + id: number; + name: string; + description?: string; + status: "ACTIVE" | "PAUSED" | "DRAFT" | "DELETED" | "ERROR" | "INIT"; + sink_type: string; + managed: boolean; + auto_generated: boolean; + in_memory: boolean; + owner: Owner; + org: Org; + access_roles: string[]; + data_set_id?: number; + tags: string[]; + created_at: string; + updated_at: string; +} + +export interface DestinationFactoryOptions { + id?: number; + name?: string; + description?: string; + status?: DestinationResponse["status"]; + sink_type?: string; + managed?: boolean; + auto_generated?: boolean; + in_memory?: boolean; + owner?: Partial; + org?: Partial; + access_roles?: string[]; + data_set_id?: number; + tags?: string[]; +} + +/** + * Create a mock destination response. + */ +export function createDestination(options: DestinationFactoryOptions = {}): DestinationResponse { + const id = options.id ?? generateId(); + return { + id, + name: options.name ?? `Test Destination ${id}`, + description: options.description, + status: options.status ?? "ACTIVE", + sink_type: options.sink_type ?? "postgres", + managed: options.managed ?? false, + auto_generated: options.auto_generated ?? false, + in_memory: options.in_memory ?? false, + owner: createOwner(options.owner), + org: createOrg(options.org), + access_roles: options.access_roles ?? ["owner"], + data_set_id: options.data_set_id, + tags: options.tags ?? [], + created_at: createTimestamp(), + updated_at: createTimestamp(), + }; +} + +/** + * Create a list of mock destinations. + */ +export function createDestinationList(count: number, options?: DestinationFactoryOptions): DestinationResponse[] { + return Array.from({ length: count }, () => createDestination(options)); +} diff --git a/packages/ts-sdk/tests/utils/factories/flows.ts b/packages/ts-sdk/tests/utils/factories/flows.ts new file mode 100644 index 0000000..8f7098a --- /dev/null +++ b/packages/ts-sdk/tests/utils/factories/flows.ts @@ -0,0 +1,104 @@ +/** + * Flow test data factory. + */ + +import { createOwner, createOrg, createTimestamp, generateId, type Owner, type Org } from "./common.js"; + +export interface FlowNode { + id: number; + node_type: "SOURCE" | "TRANSFORM" | "DESTINATION" | "NEXSET"; + resource_id: number; + resource_type: string; + status: "ACTIVE" | "PAUSED" | "ERROR" | "INIT"; + upstream_node_ids: number[]; + downstream_node_ids: number[]; +} + +export interface FlowResponse { + id: number; + name: string; + description?: string; + status: "ACTIVE" | "PAUSED" | "DRAFT" | "DELETED" | "ERROR" | "INIT"; + flow_type: "DATA_FLOW" | "INGESTION" | "TRANSFORMATION" | "DISTRIBUTION"; + owner: Owner; + org: Org; + nodes: FlowNode[]; + origin_node_id: number | null; + data_sources: unknown[]; + data_sets: unknown[]; + data_sinks: unknown[]; + access_roles: string[]; + created_at: string; + updated_at: string; +} + +export interface FlowNodeFactoryOptions { + id?: number; + node_type?: FlowNode["node_type"]; + resource_id?: number; + resource_type?: string; + status?: FlowNode["status"]; + upstream_node_ids?: number[]; + downstream_node_ids?: number[]; +} + +export interface FlowFactoryOptions { + id?: number; + name?: string; + description?: string; + status?: FlowResponse["status"]; + flow_type?: FlowResponse["flow_type"]; + owner?: Partial; + org?: Partial; + nodes?: FlowNode[]; + origin_node_id?: number | null; + access_roles?: string[]; +} + +/** + * Create a mock flow node. + */ +export function createFlowNode(options: FlowNodeFactoryOptions = {}): FlowNode { + const id = options.id ?? generateId(); + return { + id, + node_type: options.node_type ?? "SOURCE", + resource_id: options.resource_id ?? generateId(), + resource_type: options.resource_type ?? "data_source", + status: options.status ?? "ACTIVE", + upstream_node_ids: options.upstream_node_ids ?? [], + downstream_node_ids: options.downstream_node_ids ?? [], + }; +} + +/** + * Create a mock flow response. + */ +export function createFlow(options: FlowFactoryOptions = {}): FlowResponse { + const id = options.id ?? generateId(); + const nodes = options.nodes ?? [createFlowNode()]; + return { + id, + name: options.name ?? `Test Flow ${id}`, + description: options.description, + status: options.status ?? "ACTIVE", + flow_type: options.flow_type ?? "DATA_FLOW", + owner: createOwner(options.owner), + org: createOrg(options.org), + nodes, + origin_node_id: options.origin_node_id ?? (nodes.length > 0 ? nodes[0]!.id : null), + data_sources: [], + data_sets: [], + data_sinks: [], + access_roles: options.access_roles ?? ["owner"], + created_at: createTimestamp(), + updated_at: createTimestamp(), + }; +} + +/** + * Create a list of mock flows. + */ +export function createFlowList(count: number, options?: FlowFactoryOptions): FlowResponse[] { + return Array.from({ length: count }, () => createFlow(options)); +} diff --git a/packages/ts-sdk/tests/utils/factories/index.ts b/packages/ts-sdk/tests/utils/factories/index.ts new file mode 100644 index 0000000..e9aab02 --- /dev/null +++ b/packages/ts-sdk/tests/utils/factories/index.ts @@ -0,0 +1,15 @@ +/** + * Test data factories for creating realistic mock data. + */ + +export * from "./sources.js"; +export * from "./credentials.js"; +export * from "./accessors.js"; +export * from "./destinations.js"; +export * from "./nexsets.js"; +export * from "./teams.js"; +export * from "./flows.js"; +export * from "./organizations.js"; +export * from "./users.js"; +export * from "./projects.js"; +export * from "./common.js"; diff --git a/packages/ts-sdk/tests/utils/factories/nexsets.ts b/packages/ts-sdk/tests/utils/factories/nexsets.ts new file mode 100644 index 0000000..e4fca40 --- /dev/null +++ b/packages/ts-sdk/tests/utils/factories/nexsets.ts @@ -0,0 +1,73 @@ +/** + * Nexset test data factory. + */ + +import { createOwner, createOrg, createTimestamp, generateId, type Owner, type Org } from "./common.js"; + +export interface NexsetResponse { + id: number; + name: string; + description?: string; + status: "ACTIVE" | "PAUSED" | "DRAFT" | "DELETED" | "ERROR" | "INIT"; + data_format: string; + managed: boolean; + auto_generated: boolean; + owner: Owner; + org: Org; + access_roles: string[]; + data_source_id?: number; + tags: string[]; + input_schema?: unknown; + output_schema?: unknown; + created_at: string; + updated_at: string; +} + +export interface NexsetFactoryOptions { + id?: number; + name?: string; + description?: string; + status?: NexsetResponse["status"]; + data_format?: string; + managed?: boolean; + auto_generated?: boolean; + owner?: Partial; + org?: Partial; + access_roles?: string[]; + data_source_id?: number; + tags?: string[]; + input_schema?: unknown; + output_schema?: unknown; +} + +/** + * Create a mock nexset response. + */ +export function createNexset(options: NexsetFactoryOptions = {}): NexsetResponse { + const id = options.id ?? generateId(); + return { + id, + name: options.name ?? `Test Nexset ${id}`, + description: options.description, + status: options.status ?? "ACTIVE", + data_format: options.data_format ?? "JSON", + managed: options.managed ?? false, + auto_generated: options.auto_generated ?? false, + owner: createOwner(options.owner), + org: createOrg(options.org), + access_roles: options.access_roles ?? ["owner"], + data_source_id: options.data_source_id, + tags: options.tags ?? [], + input_schema: options.input_schema, + output_schema: options.output_schema, + created_at: createTimestamp(), + updated_at: createTimestamp(), + }; +} + +/** + * Create a list of mock nexsets. + */ +export function createNexsetList(count: number, options?: NexsetFactoryOptions): NexsetResponse[] { + return Array.from({ length: count }, () => createNexset(options)); +} diff --git a/packages/ts-sdk/tests/utils/factories/organizations.ts b/packages/ts-sdk/tests/utils/factories/organizations.ts new file mode 100644 index 0000000..88311c5 --- /dev/null +++ b/packages/ts-sdk/tests/utils/factories/organizations.ts @@ -0,0 +1,82 @@ +/** + * Organization test data factory. + */ + +import { createOwner, createTimestamp, generateId, type Owner } from "./common.js"; + +export interface OrganizationMember { + id: number; + email: string; + full_name: string; + role: "owner" | "admin" | "member"; +} + +export interface OrganizationResponse { + id: number; + name: string; + description?: string; + email_domain?: string; + owner: Owner; + members: OrganizationMember[]; + custodians: OrganizationMember[]; + status: "ACTIVE" | "INACTIVE" | "SUSPENDED"; + created_at: string; + updated_at: string; +} + +export interface OrganizationMemberOptions { + id?: number; + email?: string; + full_name?: string; + role?: OrganizationMember["role"]; +} + +export interface OrganizationFactoryOptions { + id?: number; + name?: string; + description?: string; + email_domain?: string; + owner?: Partial; + members?: OrganizationMemberOptions[]; + custodians?: OrganizationMemberOptions[]; + status?: OrganizationResponse["status"]; +} + +/** + * Create a mock organization member. + */ +export function createOrganizationMember(options: OrganizationMemberOptions = {}): OrganizationMember { + const id = options.id ?? generateId(); + return { + id, + email: options.email ?? `member${id}@test.com`, + full_name: options.full_name ?? `Test Member ${id}`, + role: options.role ?? "member", + }; +} + +/** + * Create a mock organization response. + */ +export function createOrganization(options: OrganizationFactoryOptions = {}): OrganizationResponse { + const id = options.id ?? generateId(); + return { + id, + name: options.name ?? `Test Organization ${id}`, + description: options.description, + email_domain: options.email_domain, + owner: createOwner(options.owner), + members: options.members?.map(createOrganizationMember) ?? [], + custodians: options.custodians?.map(createOrganizationMember) ?? [], + status: options.status ?? "ACTIVE", + created_at: createTimestamp(), + updated_at: createTimestamp(), + }; +} + +/** + * Create a list of mock organizations. + */ +export function createOrganizationList(count: number, options?: OrganizationFactoryOptions): OrganizationResponse[] { + return Array.from({ length: count }, () => createOrganization(options)); +} diff --git a/packages/ts-sdk/tests/utils/factories/projects.ts b/packages/ts-sdk/tests/utils/factories/projects.ts new file mode 100644 index 0000000..652a99c --- /dev/null +++ b/packages/ts-sdk/tests/utils/factories/projects.ts @@ -0,0 +1,74 @@ +/** + * Project test data factory. + */ + +import { createOwner, createOrg, createTimestamp, generateId, type Owner, type Org } from "./common.js"; + +export interface FlowReference { + id: number; + name: string; +} + +export interface ProjectResponse { + id: number; + name: string; + description?: string; + owner: Owner; + org: Org; + flows: FlowReference[]; + data_flows: FlowReference[]; + access_roles: string[]; + copied_from_id?: number; + created_at: string; + updated_at: string; +} + +export interface ProjectFactoryOptions { + id?: number; + name?: string; + description?: string; + owner?: Partial; + org?: Partial; + flows?: FlowReference[]; + data_flows?: FlowReference[]; + access_roles?: string[]; + copied_from_id?: number; +} + +/** + * Create a mock flow reference. + */ +export function createFlowReference(overrides: Partial = {}): FlowReference { + const id = overrides.id ?? generateId(); + return { + id, + name: overrides.name ?? `Test Flow ${id}`, + }; +} + +/** + * Create a mock project response. + */ +export function createProject(options: ProjectFactoryOptions = {}): ProjectResponse { + const id = options.id ?? generateId(); + return { + id, + name: options.name ?? `Test Project ${id}`, + description: options.description, + owner: createOwner(options.owner), + org: createOrg(options.org), + flows: options.flows ?? [], + data_flows: options.data_flows ?? [], + access_roles: options.access_roles ?? ["owner"], + copied_from_id: options.copied_from_id, + created_at: createTimestamp(), + updated_at: createTimestamp(), + }; +} + +/** + * Create a list of mock projects. + */ +export function createProjectList(count: number, options?: ProjectFactoryOptions): ProjectResponse[] { + return Array.from({ length: count }, () => createProject(options)); +} diff --git a/packages/ts-sdk/tests/utils/factories/sources.ts b/packages/ts-sdk/tests/utils/factories/sources.ts new file mode 100644 index 0000000..3633cad --- /dev/null +++ b/packages/ts-sdk/tests/utils/factories/sources.ts @@ -0,0 +1,76 @@ +/** + * Source test data factory. + */ + +import { createOwner, createOrg, createTimestamp, generateId, type Owner, type Org } from "./common.js"; + +export interface SourceResponse { + id: number; + name: string; + description?: string; + status: "ACTIVE" | "PAUSED" | "DRAFT" | "DELETED" | "ERROR" | "INIT"; + source_type: string; + ingest_method: string; + source_format: string; + managed: boolean; + auto_generated: boolean; + owner: Owner; + org: Org; + access_roles: string[]; + data_sets: unknown[]; + data_credentials?: unknown; + tags: string[]; + run_ids: unknown[]; + created_at: string; + updated_at: string; +} + +export interface SourceFactoryOptions { + id?: number; + name?: string; + description?: string; + status?: SourceResponse["status"]; + source_type?: string; + ingest_method?: string; + source_format?: string; + managed?: boolean; + auto_generated?: boolean; + owner?: Partial; + org?: Partial; + access_roles?: string[]; + tags?: string[]; +} + +/** + * Create a mock source response. + */ +export function createSource(options: SourceFactoryOptions = {}): SourceResponse { + const id = options.id ?? generateId(); + return { + id, + name: options.name ?? `Test Source ${id}`, + description: options.description, + status: options.status ?? "ACTIVE", + source_type: options.source_type ?? "postgres", + ingest_method: options.ingest_method ?? "POLL", + source_format: options.source_format ?? "JSON", + managed: options.managed ?? false, + auto_generated: options.auto_generated ?? false, + owner: createOwner(options.owner), + org: createOrg(options.org), + access_roles: options.access_roles ?? ["owner"], + data_sets: [], + data_credentials: undefined, + tags: options.tags ?? [], + run_ids: [], + created_at: createTimestamp(), + updated_at: createTimestamp(), + }; +} + +/** + * Create a list of mock sources. + */ +export function createSourceList(count: number, options?: SourceFactoryOptions): SourceResponse[] { + return Array.from({ length: count }, () => createSource(options)); +} diff --git a/packages/ts-sdk/tests/utils/factories/teams.ts b/packages/ts-sdk/tests/utils/factories/teams.ts new file mode 100644 index 0000000..44741bd --- /dev/null +++ b/packages/ts-sdk/tests/utils/factories/teams.ts @@ -0,0 +1,82 @@ +/** + * Team test data factory. + */ + +import { createOwner, createOrg, createTimestamp, generateId, type Owner, type Org } from "./common.js"; + +export interface TeamMember { + id: number; + email: string; + full_name: string; + admin: boolean; +} + +export interface TeamResponse { + id: number; + name: string; + description?: string; + owner: Owner; + org: Org; + members: TeamMember[]; + member: boolean; + access_roles: string[]; + created_at: string; + updated_at: string; +} + +export interface TeamMemberFactoryOptions { + id?: number; + email?: string; + full_name?: string; + admin?: boolean; +} + +export interface TeamFactoryOptions { + id?: number; + name?: string; + description?: string; + owner?: Partial; + org?: Partial; + members?: TeamMember[]; + member?: boolean; + access_roles?: string[]; +} + +/** + * Create a mock team member. + */ +export function createTeamMember(options: TeamMemberFactoryOptions = {}): TeamMember { + const id = options.id ?? generateId(); + return { + id, + email: options.email ?? `member${id}@test.com`, + full_name: options.full_name ?? `Test Member ${id}`, + admin: options.admin ?? false, + }; +} + +/** + * Create a mock team response. + */ +export function createTeam(options: TeamFactoryOptions = {}): TeamResponse { + const id = options.id ?? generateId(); + return { + id, + name: options.name ?? `Test Team ${id}`, + description: options.description, + owner: createOwner(options.owner), + org: createOrg(options.org), + members: options.members ?? [], + member: options.member ?? true, + access_roles: options.access_roles ?? ["owner"], + created_at: createTimestamp(), + updated_at: createTimestamp(), + }; +} + +/** + * Create a list of mock teams. + */ +export function createTeamList(count: number, options?: TeamFactoryOptions): TeamResponse[] { + return Array.from({ length: count }, () => createTeam(options)); +} diff --git a/packages/ts-sdk/tests/utils/factories/users.ts b/packages/ts-sdk/tests/utils/factories/users.ts new file mode 100644 index 0000000..13441ab --- /dev/null +++ b/packages/ts-sdk/tests/utils/factories/users.ts @@ -0,0 +1,72 @@ +/** + * User test data factory. + */ + +import { createOrg, createTimestamp, generateId, type Org } from "./common.js"; + +export interface UserTeam { + id: number; + name: string; +} + +export interface UserResponse { + id: number; + email: string; + full_name: string; + status: "ACTIVE" | "PENDING" | "INACTIVE" | "DELETED"; + org: Org; + orgs: Org[]; + teams: UserTeam[]; + role: "admin" | "member" | "viewer"; + created_at: string; + updated_at: string; +} + +export interface UserFactoryOptions { + id?: number; + email?: string; + full_name?: string; + status?: UserResponse["status"]; + org?: Partial; + orgs?: Partial[]; + teams?: Partial[]; + role?: UserResponse["role"]; +} + +/** + * Create a mock team object. + */ +function createTeam(overrides: Partial = {}): UserTeam { + const id = overrides.id ?? generateId(); + return { + id, + name: overrides.name ?? `Test Team ${id}`, + }; +} + +/** + * Create a mock user response. + */ +export function createUser(options: UserFactoryOptions = {}): UserResponse { + const id = options.id ?? generateId(); + const org = createOrg(options.org); + return { + id, + email: options.email ?? `user${id}@test.com`, + full_name: options.full_name ?? `Test User ${id}`, + status: options.status ?? "ACTIVE", + org, + orgs: options.orgs ? options.orgs.map((o) => createOrg(o)) : [org], + teams: options.teams ? options.teams.map((t) => createTeam(t)) : [], + role: options.role ?? "member", + created_at: createTimestamp(), + updated_at: createTimestamp(), + }; +} + +/** + * Create a list of mock users. + */ +export function createUserList(count: number, options?: UserFactoryOptions): UserResponse[] { + return Array.from({ length: count }, () => createUser(options)); +} diff --git a/packages/ts-sdk/tests/utils/index.ts b/packages/ts-sdk/tests/utils/index.ts new file mode 100644 index 0000000..ecd0c94 --- /dev/null +++ b/packages/ts-sdk/tests/utils/index.ts @@ -0,0 +1,7 @@ +/** + * Test utilities index. + */ + +export * from "./mock-fetch.js"; +export * from "./test-client.js"; +export * from "./factories/index.js"; diff --git a/packages/ts-sdk/tests/utils/mock-fetch.ts b/packages/ts-sdk/tests/utils/mock-fetch.ts new file mode 100644 index 0000000..226506d --- /dev/null +++ b/packages/ts-sdk/tests/utils/mock-fetch.ts @@ -0,0 +1,140 @@ +/** + * Mock fetch utilities for testing. + * Replaces duplicated createMockFetch patterns across test files. + */ + +export interface MockResponse { + status: number; + body?: unknown; + headers?: Record; +} + +export interface MockFetchResult { + fetchFn: typeof fetch; + calls: Request[]; + getCall(index: number): Request | undefined; + getLastCall(): Request | undefined; + clearCalls(): void; + getRequestBody(index: number): Promise; +} + +/** + * Create a mock fetch function that returns responses in order. + * Each call consumes the next response from the queue. + */ +export function createMockFetch(responses: MockResponse[]): MockFetchResult { + const calls: Request[] = []; + const responseQueue = [...responses]; + + const fetchFn: typeof fetch = async (input, init) => { + const request = input instanceof Request ? input : new Request(input, init); + calls.push(request); + + const next = responseQueue.shift() ?? { status: 200, body: {} }; + const body = JSON.stringify(next.body ?? {}); + const headers: Record = { + "content-type": "application/json", + ...(next.headers ?? {}), + }; + + return new Response(body, { + status: next.status, + headers, + }); + }; + + return { + fetchFn, + calls, + getCall(index: number): Request | undefined { + return calls[index]; + }, + getLastCall(): Request | undefined { + return calls[calls.length - 1]; + }, + clearCalls(): void { + calls.length = 0; + }, + async getRequestBody(index: number): Promise { + const request = calls[index]; + if (!request) return undefined; + try { + const cloned = request.clone(); + return await cloned.json(); + } catch { + return undefined; + } + }, + }; +} + +/** + * Create a mock fetch that uses a handler function for dynamic responses. + */ +export function createDynamicMockFetch( + handler: (request: Request) => MockResponse | Promise +): MockFetchResult { + const calls: Request[] = []; + + const fetchFn: typeof fetch = async (input, init) => { + const request = input instanceof Request ? input : new Request(input, init); + calls.push(request); + + const response = await handler(request); + const body = JSON.stringify(response.body ?? {}); + const headers: Record = { + "content-type": "application/json", + ...(response.headers ?? {}), + }; + + return new Response(body, { + status: response.status, + headers, + }); + }; + + return { + fetchFn, + calls, + getCall(index: number): Request | undefined { + return calls[index]; + }, + getLastCall(): Request | undefined { + return calls[calls.length - 1]; + }, + clearCalls(): void { + calls.length = 0; + }, + async getRequestBody(index: number): Promise { + const request = calls[index]; + if (!request) return undefined; + try { + const cloned = request.clone(); + return await cloned.json(); + } catch { + return undefined; + } + }, + }; +} + +/** + * Create pagination response headers. + */ +export function paginationHeaders(options: { + pageCount?: number; + totalPageCount?: number; + currentPage?: number; +}): Record { + const headers: Record = {}; + if (options.pageCount !== undefined) { + headers["X-Page-Count"] = String(options.pageCount); + } + if (options.totalPageCount !== undefined) { + headers["X-Total-Page-Count"] = String(options.totalPageCount); + } + if (options.currentPage !== undefined) { + headers["X-Current-Page"] = String(options.currentPage); + } + return headers; +} diff --git a/packages/ts-sdk/tests/utils/test-client.ts b/packages/ts-sdk/tests/utils/test-client.ts new file mode 100644 index 0000000..7d791de --- /dev/null +++ b/packages/ts-sdk/tests/utils/test-client.ts @@ -0,0 +1,65 @@ +/** + * Test client setup utilities. + */ + +import { NexlaClient } from "../../src/client/nexla-client.js"; +import { createMockFetch, type MockFetchResult, type MockResponse } from "./mock-fetch.js"; + +export interface TestClientOptions { + mockResponses?: MockResponse[]; + accessToken?: string; + baseUrl?: string; +} + +export interface TestClientResult { + client: NexlaClient; + mockFetch: MockFetchResult; +} + +/** + * Create a test client with mocked fetch. + * Automatically includes a token response for service key auth. + */ +export function createTestClient(options: TestClientOptions = {}): TestClientResult { + const { + mockResponses = [], + accessToken, + baseUrl = "https://test.nexla.io/nexla-api", + } = options; + + // If using service key auth, prepend token response + const allResponses: MockResponse[] = accessToken + ? [...mockResponses] + : [ + { status: 200, body: { access_token: "test-token", expires_in: 7200 } }, + ...mockResponses, + ]; + + const mockFetch = createMockFetch(allResponses); + + const client = accessToken + ? new NexlaClient({ + accessToken, + baseUrl, + fetch: mockFetch.fetchFn, + }) + : new NexlaClient({ + serviceKey: "test-service-key", + baseUrl, + fetch: mockFetch.fetchFn, + }); + + return { client, mockFetch }; +} + +/** + * Create a test client for direct token auth (no token refresh). + */ +export function createDirectTokenClient( + mockResponses: MockResponse[] = [] +): TestClientResult { + return createTestClient({ + mockResponses, + accessToken: "direct-test-token", + }); +} diff --git a/packages/ts-sdk/tests/webhooks-error.test.ts b/packages/ts-sdk/tests/webhooks-error.test.ts new file mode 100644 index 0000000..c90cb5a --- /dev/null +++ b/packages/ts-sdk/tests/webhooks-error.test.ts @@ -0,0 +1,24 @@ +import { describe, expect, it } from "vitest"; +import { WebhooksClient } from "../src/webhooks/index.js"; +import { NexlaError } from "../src/errors.js"; + +const createMockFetch = (status: number, body?: unknown) => { + const fetchFn: typeof fetch = async () => { + return new Response(JSON.stringify(body ?? {}), { + status, + headers: { "content-type": "application/json" } + }); + }; + return fetchFn; +}; + +describe("WebhooksClient errors", () => { + it("throws NexlaError on non-200", async () => { + const fetchFn = createMockFetch(500, { error: "boom" }); + const client = new WebhooksClient({ apiKey: "abc", fetch: fetchFn }); + + await expect( + client.sendOneRecord("https://example.com/webhook", { id: 1 }) + ).rejects.toBeInstanceOf(NexlaError); + }); +}); diff --git a/packages/ts-sdk/tests/webhooks.test.ts b/packages/ts-sdk/tests/webhooks.test.ts new file mode 100644 index 0000000..600f9ba --- /dev/null +++ b/packages/ts-sdk/tests/webhooks.test.ts @@ -0,0 +1,41 @@ +import { describe, expect, it } from "vitest"; +import { WebhooksClient } from "../src/webhooks/index.js"; + +const createMockFetch = (status: number, body?: unknown) => { + const calls: Request[] = []; + const fetchFn: typeof fetch = async (input, init) => { + const request = input instanceof Request ? input : new Request(input, init); + calls.push(request); + + return new Response(JSON.stringify(body ?? {}), { + status, + headers: { "content-type": "application/json" } + }); + }; + return { fetchFn, calls }; +}; + +describe("WebhooksClient", () => { + it("sends record with api_key query param", async () => { + const { fetchFn, calls } = createMockFetch(200, { ok: true }); + const client = new WebhooksClient({ apiKey: "abc", fetch: fetchFn }); + + await client.sendOneRecord("https://example.com/webhook", { id: 1 }); + + const request = calls[0]; + if (!request) throw new Error("No request captured"); + const url = new URL(request.url); + expect(url.searchParams.get("api_key")).toBe("abc"); + }); + + it("sends record with basic auth header when authMethod=header", async () => { + const { fetchFn, calls } = createMockFetch(200, { ok: true }); + const client = new WebhooksClient({ apiKey: "abc", fetch: fetchFn }); + + await client.sendOneRecord("https://example.com/webhook", { id: 1 }, { authMethod: "header" }); + + const request = calls[0]; + if (!request) throw new Error("No request captured"); + expect(request.headers.get("Authorization")).toBe(`Basic ${Buffer.from("abc").toString("base64")}`); + }); +}); diff --git a/packages/ts-sdk/tsconfig.json b/packages/ts-sdk/tsconfig.json new file mode 100644 index 0000000..b4ed568 --- /dev/null +++ b/packages/ts-sdk/tsconfig.json @@ -0,0 +1,25 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "ESNext", + "moduleResolution": "Bundler", + "lib": ["ES2022", "DOM"], + "declaration": true, + "declarationMap": true, + "sourceMap": true, + "outDir": "dist", + "rootDir": ".", + "strict": true, + "noImplicitAny": true, + "noUncheckedIndexedAccess": true, + "exactOptionalPropertyTypes": true, + "noImplicitOverride": true, + "useUnknownInCatchVariables": true, + "noFallthroughCasesInSwitch": true, + "skipLibCheck": true, + "types": ["node"], + "esModuleInterop": true + }, + "include": ["src", "tests"], + "exclude": ["dist", "node_modules"] +} diff --git a/packages/ts-sdk/tsconfig.typecheck.json b/packages/ts-sdk/tsconfig.typecheck.json new file mode 100644 index 0000000..585c02f --- /dev/null +++ b/packages/ts-sdk/tsconfig.typecheck.json @@ -0,0 +1,5 @@ +{ + "extends": "./tsconfig.json", + "include": ["src"], + "exclude": ["dist", "node_modules", "tests"] +} diff --git a/packages/ts-sdk/tsup.config.ts b/packages/ts-sdk/tsup.config.ts new file mode 100644 index 0000000..a69d5ac --- /dev/null +++ b/packages/ts-sdk/tsup.config.ts @@ -0,0 +1,12 @@ +import { defineConfig } from "tsup"; + +export default defineConfig({ + entry: ["src/index.ts"], + format: ["esm", "cjs"], + dts: true, + sourcemap: true, + clean: true, + splitting: false, + outDir: "dist", + target: "node18" +}); diff --git a/packages/ts-sdk/vitest.config.ts b/packages/ts-sdk/vitest.config.ts new file mode 100644 index 0000000..f365a69 --- /dev/null +++ b/packages/ts-sdk/vitest.config.ts @@ -0,0 +1,31 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + environment: "node", + coverage: { + provider: "v8", + reportsDirectory: "./coverage", + reporter: ["text", "lcov"], + thresholds: { + lines: 80, + functions: 80, + statements: 80, + branches: 70 + }, + exclude: [ + "dist/**", + "coverage/**", + "tests/**", + "src/generated/**", + "src/resources/generated/**", + "scripts/**", + "src/index.ts", + "**/*.config.*", + "src/auth/types.ts", + "src/client/types.ts", + "src/resources/index.ts" + ] + } + } +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml new file mode 100644 index 0000000..741c494 --- /dev/null +++ b/pnpm-lock.yaml @@ -0,0 +1,15591 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + devDependencies: + '@changesets/cli': + specifier: ^2.27.9 + version: 2.29.8(@types/node@20.19.30) + turbo: + specifier: ^2.3.3 + version: 2.8.1 + + docs-site: + dependencies: + '@docusaurus/core': + specifier: ^3.8.1 + version: 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/preset-classic': + specifier: ^3.8.1 + version: 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(@types/react@18.3.27)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/theme-mermaid': + specifier: ^3.8.1 + version: 3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@easyops-cn/docusaurus-search-local': + specifier: ^0.52.1 + version: 0.52.3(@docusaurus/theme-common@3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + clsx: + specifier: ^2.1.1 + version: 2.1.1 + dotenv: + specifier: ^16.4.5 + version: 16.6.1 + react: + specifier: ^18.3.1 + version: 18.3.1 + react-dom: + specifier: ^18.3.1 + version: 18.3.1(react@18.3.1) + devDependencies: + '@docusaurus/module-type-aliases': + specifier: ^3.8.1 + version: 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@types/node': + specifier: ^20.12.12 + version: 20.19.30 + '@types/react': + specifier: ^18.3.4 + version: 18.3.27 + '@types/react-dom': + specifier: ^18.3.0 + version: 18.3.7(@types/react@18.3.27) + markdownlint: + specifier: ^0.33.0 + version: 0.33.0 + prettier: + specifier: ^3.3.3 + version: 3.8.1 + typescript: + specifier: ^5.4.5 + version: 5.9.3 + + packages/ts-sdk: + dependencies: + openapi-fetch: + specifier: ^0.10.2 + version: 0.10.6 + devDependencies: + '@types/node': + specifier: ^20.11.30 + version: 20.19.30 + '@typescript-eslint/eslint-plugin': + specifier: ^8.25.0 + version: 8.54.0(@typescript-eslint/parser@8.54.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/parser': + specifier: ^8.25.0 + version: 8.54.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@vitest/coverage-v8': + specifier: ^2.1.8 + version: 2.1.9(vitest@2.1.9(@types/node@20.19.30)(terser@5.46.0)) + eslint: + specifier: ^9.18.0 + version: 9.39.2(jiti@1.21.7) + openapi-typescript: + specifier: ^6.7.6 + version: 6.7.6 + openapi-typescript-helpers: + specifier: ^0.0.11 + version: 0.0.11 + tsup: + specifier: ^8.3.5 + version: 8.5.1(jiti@1.21.7)(postcss@8.5.6)(typescript@5.9.3)(yaml@2.8.2) + typescript: + specifier: ^5.7.3 + version: 5.9.3 + vitest: + specifier: ^2.1.8 + version: 2.1.9(@types/node@20.19.30)(terser@5.46.0) + yaml: + specifier: ^2.4.5 + version: 2.8.2 + +packages: + + '@algolia/abtesting@1.13.0': + resolution: {integrity: sha512-Zrqam12iorp3FjiKMXSTpedGYznZ3hTEOAr2oCxI8tbF8bS1kQHClyDYNq/eV0ewMNLyFkgZVWjaS+8spsOYiQ==} + engines: {node: '>= 14.0.0'} + + '@algolia/client-abtesting@5.47.0': + resolution: {integrity: sha512-aOpsdlgS9xTEvz47+nXmw8m0NtUiQbvGWNuSEb7fA46iPL5FxOmOUZkh8PREBJpZ0/H8fclSc7BMJCVr+Dn72w==} + engines: {node: '>= 14.0.0'} + + '@algolia/client-analytics@5.47.0': + resolution: {integrity: sha512-EcF4w7IvIk1sowrO7Pdy4Ako7x/S8+nuCgdk6En+u5jsaNQM4rTT09zjBPA+WQphXkA2mLrsMwge96rf6i7Mow==} + engines: {node: '>= 14.0.0'} + + '@algolia/client-common@5.47.0': + resolution: {integrity: sha512-Wzg5Me2FqgRDj0lFuPWFK05UOWccSMsIBL2YqmTmaOzxVlLZ+oUqvKbsUSOE5ud8Fo1JU7JyiLmEXBtgDKzTwg==} + engines: {node: '>= 14.0.0'} + + '@algolia/client-insights@5.47.0': + resolution: {integrity: sha512-Ci+cn/FDIsDxSKMRBEiyKrqybblbk8xugo6ujDN1GSTv9RIZxwxqZYuHfdLnLEwLlX7GB8pqVyqrUSlRnR+sJA==} + engines: {node: '>= 14.0.0'} + + '@algolia/client-personalization@5.47.0': + resolution: {integrity: sha512-gsLnHPZmWcX0T3IigkDL2imCNtsQ7dR5xfnwiFsb+uTHCuYQt+IwSNjsd8tok6HLGLzZrliSaXtB5mfGBtYZvQ==} + engines: {node: '>= 14.0.0'} + + '@algolia/client-query-suggestions@5.47.0': + resolution: {integrity: sha512-PDOw0s8WSlR2fWFjPQldEpmm/gAoUgLigvC3k/jCSi/DzigdGX6RdC0Gh1RR1P8Cbk5KOWYDuL3TNzdYwkfDyA==} + engines: {node: '>= 14.0.0'} + + '@algolia/client-search@5.47.0': + resolution: {integrity: sha512-b5hlU69CuhnS2Rqgsz7uSW0t4VqrLMLTPbUpEl0QVz56rsSwr1Sugyogrjb493sWDA+XU1FU5m9eB8uH7MoI0g==} + engines: {node: '>= 14.0.0'} + + '@algolia/events@4.0.1': + resolution: {integrity: sha512-FQzvOCgoFXAbf5Y6mYozw2aj5KCJoA3m4heImceldzPSMbdyS4atVjJzXKMsfX3wnZTFYwkkt8/z8UesLHlSBQ==} + + '@algolia/ingestion@1.47.0': + resolution: {integrity: sha512-WvwwXp5+LqIGISK3zHRApLT1xkuEk320/EGeD7uYy+K8WwDd5OjXnhjuXRhYr1685KnkvWkq1rQ/ihCJjOfHpQ==} + engines: {node: '>= 14.0.0'} + + '@algolia/monitoring@1.47.0': + resolution: {integrity: sha512-j2EUFKAlzM0TE4GRfkDE3IDfkVeJdcbBANWzK16Tb3RHz87WuDfQ9oeEW6XiRE1/bEkq2xf4MvZesvSeQrZRDA==} + engines: {node: '>= 14.0.0'} + + '@algolia/recommend@5.47.0': + resolution: {integrity: sha512-+kTSE4aQ1ARj2feXyN+DMq0CIDHJwZw1kpxIunedkmpWUg8k3TzFwWsMCzJVkF2nu1UcFbl7xsIURz3Q3XwOXA==} + engines: {node: '>= 14.0.0'} + + '@algolia/requester-browser-xhr@5.47.0': + resolution: {integrity: sha512-Ja+zPoeSA2SDowPwCNRbm5Q2mzDvVV8oqxCQ4m6SNmbKmPlCfe30zPfrt9ho3kBHnsg37pGucwOedRIOIklCHw==} + engines: {node: '>= 14.0.0'} + + '@algolia/requester-fetch@5.47.0': + resolution: {integrity: sha512-N6nOvLbaR4Ge+oVm7T4W/ea1PqcSbsHR4O58FJ31XtZjFPtOyxmnhgCmGCzP9hsJI6+x0yxJjkW5BMK/XI8OvA==} + engines: {node: '>= 14.0.0'} + + '@algolia/requester-node-http@5.47.0': + resolution: {integrity: sha512-z1oyLq5/UVkohVXNDEY70mJbT/sv/t6HYtCvCwNrOri6pxBJDomP9R83KOlwcat+xqBQEdJHjbrPh36f1avmZA==} + engines: {node: '>= 14.0.0'} + + '@ampproject/remapping@2.3.0': + resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==} + engines: {node: '>=6.0.0'} + + '@antfu/install-pkg@1.1.0': + resolution: {integrity: sha512-MGQsmw10ZyI+EJo45CdSER4zEb+p31LpDAFp2Z3gkSd1yqVZGi0Ebx++YTEMonJy4oChEMLsxZ64j8FH6sSqtQ==} + + '@babel/code-frame@7.29.0': + resolution: {integrity: sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==} + engines: {node: '>=6.9.0'} + + '@babel/compat-data@7.29.0': + resolution: {integrity: sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==} + engines: {node: '>=6.9.0'} + + '@babel/core@7.29.0': + resolution: {integrity: sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==} + engines: {node: '>=6.9.0'} + + '@babel/generator@7.29.0': + resolution: {integrity: sha512-vSH118/wwM/pLR38g/Sgk05sNtro6TlTJKuiMXDaZqPUfjTFcudpCOt00IhOfj+1BFAX+UFAlzCU+6WXr3GLFQ==} + engines: {node: '>=6.9.0'} + + '@babel/helper-annotate-as-pure@7.27.3': + resolution: {integrity: sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==} + engines: {node: '>=6.9.0'} + + '@babel/helper-compilation-targets@7.28.6': + resolution: {integrity: sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==} + engines: {node: '>=6.9.0'} + + '@babel/helper-create-class-features-plugin@7.28.6': + resolution: {integrity: sha512-dTOdvsjnG3xNT9Y0AUg1wAl38y+4Rl4sf9caSQZOXdNqVn+H+HbbJ4IyyHaIqNR6SW9oJpA/RuRjsjCw2IdIow==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/helper-create-regexp-features-plugin@7.28.5': + resolution: {integrity: sha512-N1EhvLtHzOvj7QQOUCCS3NrPJP8c5W6ZXCHDn7Yialuy1iu4r5EmIYkXlKNqT99Ciw+W0mDqWoR6HWMZlFP3hw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/helper-define-polyfill-provider@0.6.6': + resolution: {integrity: sha512-mOAsxeeKkUKayvZR3HeTYD/fICpCPLJrU5ZjelT/PA6WHtNDBOE436YiaEUvHN454bRM3CebhDsIpieCc4texA==} + peerDependencies: + '@babel/core': ^7.4.0 || ^8.0.0-0 <8.0.0 + + '@babel/helper-globals@7.28.0': + resolution: {integrity: sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-member-expression-to-functions@7.28.5': + resolution: {integrity: sha512-cwM7SBRZcPCLgl8a7cY0soT1SptSzAlMH39vwiRpOQkJlh53r5hdHwLSCZpQdVLT39sZt+CRpNwYG4Y2v77atg==} + engines: {node: '>=6.9.0'} + + '@babel/helper-module-imports@7.28.6': + resolution: {integrity: sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-module-transforms@7.28.6': + resolution: {integrity: sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/helper-optimise-call-expression@7.27.1': + resolution: {integrity: sha512-URMGH08NzYFhubNSGJrpUEphGKQwMQYBySzat5cAByY1/YgIRkULnIy3tAMeszlL/so2HbeilYloUmSpd7GdVw==} + engines: {node: '>=6.9.0'} + + '@babel/helper-plugin-utils@7.28.6': + resolution: {integrity: sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==} + engines: {node: '>=6.9.0'} + + '@babel/helper-remap-async-to-generator@7.27.1': + resolution: {integrity: sha512-7fiA521aVw8lSPeI4ZOD3vRFkoqkJcS+z4hFo82bFSH/2tNd6eJ5qCVMS5OzDmZh/kaHQeBaeyxK6wljcPtveA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/helper-replace-supers@7.28.6': + resolution: {integrity: sha512-mq8e+laIk94/yFec3DxSjCRD2Z0TAjhVbEJY3UQrlwVo15Lmt7C2wAUbK4bjnTs4APkwsYLTahXRraQXhb1WCg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/helper-skip-transparent-expression-wrappers@7.27.1': + resolution: {integrity: sha512-Tub4ZKEXqbPjXgWLl2+3JpQAYBJ8+ikpQ2Ocj/q/r0LwE3UhENh7EUabyHjz2kCEsrRY83ew2DQdHluuiDQFzg==} + engines: {node: '>=6.9.0'} + + '@babel/helper-string-parser@7.27.1': + resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-identifier@7.28.5': + resolution: {integrity: sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==} + engines: {node: '>=6.9.0'} + + '@babel/helper-validator-option@7.27.1': + resolution: {integrity: sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==} + engines: {node: '>=6.9.0'} + + '@babel/helper-wrap-function@7.28.6': + resolution: {integrity: sha512-z+PwLziMNBeSQJonizz2AGnndLsP2DeGHIxDAn+wdHOGuo4Fo1x1HBPPXeE9TAOPHNNWQKCSlA2VZyYyyibDnQ==} + engines: {node: '>=6.9.0'} + + '@babel/helpers@7.28.6': + resolution: {integrity: sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==} + engines: {node: '>=6.9.0'} + + '@babel/parser@7.29.0': + resolution: {integrity: sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==} + engines: {node: '>=6.0.0'} + hasBin: true + + '@babel/plugin-bugfix-firefox-class-in-computed-class-key@7.28.5': + resolution: {integrity: sha512-87GDMS3tsmMSi/3bWOte1UblL+YUTFMV8SZPZ2eSEL17s74Cw/l63rR6NmGVKMYW2GYi85nE+/d6Hw5N0bEk2Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/plugin-bugfix-safari-class-field-initializer-scope@7.27.1': + resolution: {integrity: sha512-qNeq3bCKnGgLkEXUuFry6dPlGfCdQNZbn7yUAPCInwAJHMU7THJfrBSozkcWq5sNM6RcF3S8XyQL2A52KNR9IA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.27.1': + resolution: {integrity: sha512-g4L7OYun04N1WyqMNjldFwlfPCLVkgB54A/YCXICZYBsvJJE3kByKv9c9+R/nAfmIfjl2rKYLNyMHboYbZaWaA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@7.27.1': + resolution: {integrity: sha512-oO02gcONcD5O1iTLi/6frMJBIwWEHceWGSGqrpCmEL8nogiS6J9PBlE48CaK20/Jx1LuRml9aDftLgdjXT8+Cw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.13.0 + + '@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@7.28.6': + resolution: {integrity: sha512-a0aBScVTlNaiUe35UtfxAN7A/tehvvG4/ByO6+46VPKTRSlfnAFsgKy0FUh+qAkQrDTmhDkT+IBOKlOoMUxQ0g==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2': + resolution: {integrity: sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-dynamic-import@7.8.3': + resolution: {integrity: sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-import-assertions@7.28.6': + resolution: {integrity: sha512-pSJUpFHdx9z5nqTSirOCMtYVP2wFgoWhP0p3g8ONK/4IHhLIBd0B9NYqAvIUAhq+OkhO4VM1tENCt0cjlsNShw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-import-attributes@7.28.6': + resolution: {integrity: sha512-jiLC0ma9XkQT3TKJ9uYvlakm66Pamywo+qwL+oL8HJOvc6TWdZXVfhqJr8CCzbSGUAbDOzlGHJC1U+vRfLQDvw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-jsx@7.28.6': + resolution: {integrity: sha512-wgEmr06G6sIpqr8YDwA2dSRTE3bJ+V0IfpzfSY3Lfgd7YWOaAdlykvJi13ZKBt8cZHfgH1IXN+CL656W3uUa4w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-typescript@7.28.6': + resolution: {integrity: sha512-+nDNmQye7nlnuuHDboPbGm00Vqg3oO8niRRL27/4LYHUsHYh0zJ1xWOz0uRwNFmM1Avzk8wZbc6rdiYhomzv/A==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-syntax-unicode-sets-regex@7.18.6': + resolution: {integrity: sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/plugin-transform-arrow-functions@7.27.1': + resolution: {integrity: sha512-8Z4TGic6xW70FKThA5HYEKKyBpOOsucTOD1DjU3fZxDg+K3zBJcXMFnt/4yQiZnf5+MiOMSXQ9PaEK/Ilh1DeA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-async-generator-functions@7.29.0': + resolution: {integrity: sha512-va0VdWro4zlBr2JsXC+ofCPB2iG12wPtVGTWFx2WLDOM3nYQZZIGP82qku2eW/JR83sD+k2k+CsNtyEbUqhU6w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-async-to-generator@7.28.6': + resolution: {integrity: sha512-ilTRcmbuXjsMmcZ3HASTe4caH5Tpo93PkTxF9oG2VZsSWsahydmcEHhix9Ik122RcTnZnUzPbmux4wh1swfv7g==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-block-scoped-functions@7.27.1': + resolution: {integrity: sha512-cnqkuOtZLapWYZUYM5rVIdv1nXYuFVIltZ6ZJ7nIj585QsjKM5dhL2Fu/lICXZ1OyIAFc7Qy+bvDAtTXqGrlhg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-block-scoping@7.28.6': + resolution: {integrity: sha512-tt/7wOtBmwHPNMPu7ax4pdPz6shjFrmHDghvNC+FG9Qvj7D6mJcoRQIF5dy4njmxR941l6rgtvfSB2zX3VlUIw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-class-properties@7.28.6': + resolution: {integrity: sha512-dY2wS3I2G7D697VHndN91TJr8/AAfXQNt5ynCTI/MpxMsSzHp+52uNivYT5wCPax3whc47DR8Ba7cmlQMg24bw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-class-static-block@7.28.6': + resolution: {integrity: sha512-rfQ++ghVwTWTqQ7w8qyDxL1XGihjBss4CmTgGRCTAC9RIbhVpyp4fOeZtta0Lbf+dTNIVJer6ych2ibHwkZqsQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.12.0 + + '@babel/plugin-transform-classes@7.28.6': + resolution: {integrity: sha512-EF5KONAqC5zAqT783iMGuM2ZtmEBy+mJMOKl2BCvPZ2lVrwvXnB6o+OBWCS+CoeCCpVRF2sA2RBKUxvT8tQT5Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-computed-properties@7.28.6': + resolution: {integrity: sha512-bcc3k0ijhHbc2lEfpFHgx7eYw9KNXqOerKWfzbxEHUGKnS3sz9C4CNL9OiFN1297bDNfUiSO7DaLzbvHQQQ1BQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-destructuring@7.28.5': + resolution: {integrity: sha512-Kl9Bc6D0zTUcFUvkNuQh4eGXPKKNDOJQXVyyM4ZAQPMveniJdxi8XMJwLo+xSoW3MIq81bD33lcUe9kZpl0MCw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-dotall-regex@7.28.6': + resolution: {integrity: sha512-SljjowuNKB7q5Oayv4FoPzeB74g3QgLt8IVJw9ADvWy3QnUb/01aw8I4AVv8wYnPvQz2GDDZ/g3GhcNyDBI4Bg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-duplicate-keys@7.27.1': + resolution: {integrity: sha512-MTyJk98sHvSs+cvZ4nOauwTTG1JeonDjSGvGGUNHreGQns+Mpt6WX/dVzWBHgg+dYZhkC4X+zTDfkTU+Vy9y7Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-duplicate-named-capturing-groups-regex@7.29.0': + resolution: {integrity: sha512-zBPcW2lFGxdiD8PUnPwJjag2J9otbcLQzvbiOzDxpYXyCuYX9agOwMPGn1prVH0a4qzhCKu24rlH4c1f7yA8rw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/plugin-transform-dynamic-import@7.27.1': + resolution: {integrity: sha512-MHzkWQcEmjzzVW9j2q8LGjwGWpG2mjwaaB0BNQwst3FIjqsg8Ct/mIZlvSPJvfi9y2AC8mi/ktxbFVL9pZ1I4A==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-explicit-resource-management@7.28.6': + resolution: {integrity: sha512-Iao5Konzx2b6g7EPqTy40UZbcdXE126tTxVFr/nAIj+WItNxjKSYTEw3RC+A2/ZetmdJsgueL1KhaMCQHkLPIg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-exponentiation-operator@7.28.6': + resolution: {integrity: sha512-WitabqiGjV/vJ0aPOLSFfNY1u9U3R7W36B03r5I2KoNix+a3sOhJ3pKFB3R5It9/UiK78NiO0KE9P21cMhlPkw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-export-namespace-from@7.27.1': + resolution: {integrity: sha512-tQvHWSZ3/jH2xuq/vZDy0jNn+ZdXJeM8gHvX4lnJmsc3+50yPlWdZXIc5ay+umX+2/tJIqHqiEqcJvxlmIvRvQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-for-of@7.27.1': + resolution: {integrity: sha512-BfbWFFEJFQzLCQ5N8VocnCtA8J1CLkNTe2Ms2wocj75dd6VpiqS5Z5quTYcUoo4Yq+DN0rtikODccuv7RU81sw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-function-name@7.27.1': + resolution: {integrity: sha512-1bQeydJF9Nr1eBCMMbC+hdwmRlsv5XYOMu03YSWFwNs0HsAmtSxxF1fyuYPqemVldVyFmlCU7w8UE14LupUSZQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-json-strings@7.28.6': + resolution: {integrity: sha512-Nr+hEN+0geQkzhbdgQVPoqr47lZbm+5fCUmO70722xJZd0Mvb59+33QLImGj6F+DkK3xgDi1YVysP8whD6FQAw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-literals@7.27.1': + resolution: {integrity: sha512-0HCFSepIpLTkLcsi86GG3mTUzxV5jpmbv97hTETW3yzrAij8aqlD36toB1D0daVFJM8NK6GvKO0gslVQmm+zZA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-logical-assignment-operators@7.28.6': + resolution: {integrity: sha512-+anKKair6gpi8VsM/95kmomGNMD0eLz1NQ8+Pfw5sAwWH9fGYXT50E55ZpV0pHUHWf6IUTWPM+f/7AAff+wr9A==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-member-expression-literals@7.27.1': + resolution: {integrity: sha512-hqoBX4dcZ1I33jCSWcXrP+1Ku7kdqXf1oeah7ooKOIiAdKQ+uqftgCFNOSzA5AMS2XIHEYeGFg4cKRCdpxzVOQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-modules-amd@7.27.1': + resolution: {integrity: sha512-iCsytMg/N9/oFq6n+gFTvUYDZQOMK5kEdeYxmxt91fcJGycfxVP9CnrxoliM0oumFERba2i8ZtwRUCMhvP1LnA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-modules-commonjs@7.28.6': + resolution: {integrity: sha512-jppVbf8IV9iWWwWTQIxJMAJCWBuuKx71475wHwYytrRGQ2CWiDvYlADQno3tcYpS/T2UUWFQp3nVtYfK/YBQrA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-modules-systemjs@7.29.0': + resolution: {integrity: sha512-PrujnVFbOdUpw4UHiVwKvKRLMMic8+eC0CuNlxjsyZUiBjhFdPsewdXCkveh2KqBA9/waD0W1b4hXSOBQJezpQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-modules-umd@7.27.1': + resolution: {integrity: sha512-iQBE/xC5BV1OxJbp6WG7jq9IWiD+xxlZhLrdwpPkTX3ydmXdvoCpyfJN7acaIBZaOqTfr76pgzqBJflNbeRK+w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-named-capturing-groups-regex@7.29.0': + resolution: {integrity: sha512-1CZQA5KNAD6ZYQLPw7oi5ewtDNxH/2vuCh+6SmvgDfhumForvs8a1o9n0UrEoBD8HU4djO2yWngTQlXl1NDVEQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/plugin-transform-new-target@7.27.1': + resolution: {integrity: sha512-f6PiYeqXQ05lYq3TIfIDu/MtliKUbNwkGApPUvyo6+tc7uaR4cPjPe7DFPr15Uyycg2lZU6btZ575CuQoYh7MQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-nullish-coalescing-operator@7.28.6': + resolution: {integrity: sha512-3wKbRgmzYbw24mDJXT7N+ADXw8BC/imU9yo9c9X9NKaLF1fW+e5H1U5QjMUBe4Qo4Ox/o++IyUkl1sVCLgevKg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-numeric-separator@7.28.6': + resolution: {integrity: sha512-SJR8hPynj8outz+SlStQSwvziMN4+Bq99it4tMIf5/Caq+3iOc0JtKyse8puvyXkk3eFRIA5ID/XfunGgO5i6w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-object-rest-spread@7.28.6': + resolution: {integrity: sha512-5rh+JR4JBC4pGkXLAcYdLHZjXudVxWMXbB6u6+E9lRL5TrGVbHt1TjxGbZ8CkmYw9zjkB7jutzOROArsqtncEA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-object-super@7.27.1': + resolution: {integrity: sha512-SFy8S9plRPbIcxlJ8A6mT/CxFdJx/c04JEctz4jf8YZaVS2px34j7NXRrlGlHkN/M2gnpL37ZpGRGVFLd3l8Ng==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-optional-catch-binding@7.28.6': + resolution: {integrity: sha512-R8ja/Pyrv0OGAvAXQhSTmWyPJPml+0TMqXlO5w+AsMEiwb2fg3WkOvob7UxFSL3OIttFSGSRFKQsOhJ/X6HQdQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-optional-chaining@7.28.6': + resolution: {integrity: sha512-A4zobikRGJTsX9uqVFdafzGkqD30t26ck2LmOzAuLL8b2x6k3TIqRiT2xVvA9fNmFeTX484VpsdgmKNA0bS23w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-parameters@7.27.7': + resolution: {integrity: sha512-qBkYTYCb76RRxUM6CcZA5KRu8K4SM8ajzVeUgVdMVO9NN9uI/GaVmBg/WKJJGnNokV9SY8FxNOVWGXzqzUidBg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-private-methods@7.28.6': + resolution: {integrity: sha512-piiuapX9CRv7+0st8lmuUlRSmX6mBcVeNQ1b4AYzJxfCMuBfB0vBXDiGSmm03pKJw1v6cZ8KSeM+oUnM6yAExg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-private-property-in-object@7.28.6': + resolution: {integrity: sha512-b97jvNSOb5+ehyQmBpmhOCiUC5oVK4PMnpRvO7+ymFBoqYjeDHIU9jnrNUuwHOiL9RpGDoKBpSViarV+BU+eVA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-property-literals@7.27.1': + resolution: {integrity: sha512-oThy3BCuCha8kDZ8ZkgOg2exvPYUlprMukKQXI1r1pJ47NCvxfkEy8vK+r/hT9nF0Aa4H1WUPZZjHTFtAhGfmQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-react-constant-elements@7.27.1': + resolution: {integrity: sha512-edoidOjl/ZxvYo4lSBOQGDSyToYVkTAwyVoa2tkuYTSmjrB1+uAedoL5iROVLXkxH+vRgA7uP4tMg2pUJpZ3Ug==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-react-display-name@7.28.0': + resolution: {integrity: sha512-D6Eujc2zMxKjfa4Zxl4GHMsmhKKZ9VpcqIchJLvwTxad9zWIYulwYItBovpDOoNLISpcZSXoDJ5gaGbQUDqViA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-react-jsx-development@7.27.1': + resolution: {integrity: sha512-ykDdF5yI4f1WrAolLqeF3hmYU12j9ntLQl/AOG1HAS21jxyg1Q0/J/tpREuYLfatGdGmXp/3yS0ZA76kOlVq9Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-react-jsx@7.28.6': + resolution: {integrity: sha512-61bxqhiRfAACulXSLd/GxqmAedUSrRZIu/cbaT18T1CetkTmtDN15it7i80ru4DVqRK1WMxQhXs+Lf9kajm5Ow==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-react-pure-annotations@7.27.1': + resolution: {integrity: sha512-JfuinvDOsD9FVMTHpzA/pBLisxpv1aSf+OIV8lgH3MuWrks19R27e6a6DipIg4aX1Zm9Wpb04p8wljfKrVSnPA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-regenerator@7.29.0': + resolution: {integrity: sha512-FijqlqMA7DmRdg/aINBSs04y8XNTYw/lr1gJ2WsmBnnaNw1iS43EPkJW+zK7z65auG3AWRFXWj+NcTQwYptUog==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-regexp-modifiers@7.28.6': + resolution: {integrity: sha512-QGWAepm9qxpaIs7UM9FvUSnCGlb8Ua1RhyM4/veAxLwt3gMat/LSGrZixyuj4I6+Kn9iwvqCyPTtbdxanYoWYg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/plugin-transform-reserved-words@7.27.1': + resolution: {integrity: sha512-V2ABPHIJX4kC7HegLkYoDpfg9PVmuWy/i6vUM5eGK22bx4YVFD3M5F0QQnWQoDs6AGsUWTVOopBiMFQgHaSkVw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-runtime@7.29.0': + resolution: {integrity: sha512-jlaRT5dJtMaMCV6fAuLbsQMSwz/QkvaHOHOSXRitGGwSpR1blCY4KUKoyP2tYO8vJcqYe8cEj96cqSztv3uF9w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-shorthand-properties@7.27.1': + resolution: {integrity: sha512-N/wH1vcn4oYawbJ13Y/FxcQrWk63jhfNa7jef0ih7PHSIHX2LB7GWE1rkPrOnka9kwMxb6hMl19p7lidA+EHmQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-spread@7.28.6': + resolution: {integrity: sha512-9U4QObUC0FtJl05AsUcodau/RWDytrU6uKgkxu09mLR9HLDAtUMoPuuskm5huQsoktmsYpI+bGmq+iapDcriKA==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-sticky-regex@7.27.1': + resolution: {integrity: sha512-lhInBO5bi/Kowe2/aLdBAawijx+q1pQzicSgnkB6dUPc1+RC8QmJHKf2OjvU+NZWitguJHEaEmbV6VWEouT58g==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-template-literals@7.27.1': + resolution: {integrity: sha512-fBJKiV7F2DxZUkg5EtHKXQdbsbURW3DZKQUWphDum0uRP6eHGGa/He9mc0mypL680pb+e/lDIthRohlv8NCHkg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-typeof-symbol@7.27.1': + resolution: {integrity: sha512-RiSILC+nRJM7FY5srIyc4/fGIwUhyDuuBSdWn4y6yT6gm652DpCHZjIipgn6B7MQ1ITOUnAKWixEUjQRIBIcLw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-typescript@7.28.6': + resolution: {integrity: sha512-0YWL2RFxOqEm9Efk5PvreamxPME8OyY0wM5wh5lHjF+VtVhdneCWGzZeSqzOfiobVqQaNCd2z0tQvnI9DaPWPw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-unicode-escapes@7.27.1': + resolution: {integrity: sha512-Ysg4v6AmF26k9vpfFuTZg8HRfVWzsh1kVfowA23y9j/Gu6dOuahdUVhkLqpObp3JIv27MLSii6noRnuKN8H0Mg==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-unicode-property-regex@7.28.6': + resolution: {integrity: sha512-4Wlbdl/sIZjzi/8St0evF0gEZrgOswVO6aOzqxh1kDZOl9WmLrHq2HtGhnOJZmHZYKP8WZ1MDLCt5DAWwRo57A==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-unicode-regex@7.27.1': + resolution: {integrity: sha512-xvINq24TRojDuyt6JGtHmkVkrfVV3FPT16uytxImLeBZqW3/H52yN+kM1MGuyPkIQxrzKwPHs5U/MP3qKyzkGw==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/plugin-transform-unicode-sets-regex@7.28.6': + resolution: {integrity: sha512-/wHc/paTUmsDYN7SZkpWxogTOBNnlx7nBQYfy6JJlCT7G3mVhltk3e++N7zV0XfgGsrqBxd4rJQt9H16I21Y1Q==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0 + + '@babel/preset-env@7.29.0': + resolution: {integrity: sha512-fNEdfc0yi16lt6IZo2Qxk3knHVdfMYX33czNb4v8yWhemoBhibCpQK/uYHtSKIiO+p/zd3+8fYVXhQdOVV608w==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/preset-modules@0.1.6-no-external-plugins': + resolution: {integrity: sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==} + peerDependencies: + '@babel/core': ^7.0.0-0 || ^8.0.0-0 <8.0.0 + + '@babel/preset-react@7.28.5': + resolution: {integrity: sha512-Z3J8vhRq7CeLjdC58jLv4lnZ5RKFUJWqH5emvxmv9Hv3BD1T9R/Im713R4MTKwvFaV74ejZ3sM01LyEKk4ugNQ==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/preset-typescript@7.28.5': + resolution: {integrity: sha512-+bQy5WOI2V6LJZpPVxY+yp66XdZ2yifu0Mc1aP5CQKgjn4QM5IN2i5fAZ4xKop47pr8rpVhiAeu+nDQa12C8+g==} + engines: {node: '>=6.9.0'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@babel/runtime-corejs3@7.29.0': + resolution: {integrity: sha512-TgUkdp71C9pIbBcHudc+gXZnihEDOjUAmXO1VO4HHGES7QLZcShR0stfKIxLSNIYx2fqhmJChOjm/wkF8wv4gA==} + engines: {node: '>=6.9.0'} + + '@babel/runtime@7.28.6': + resolution: {integrity: sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==} + engines: {node: '>=6.9.0'} + + '@babel/template@7.28.6': + resolution: {integrity: sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==} + engines: {node: '>=6.9.0'} + + '@babel/traverse@7.29.0': + resolution: {integrity: sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==} + engines: {node: '>=6.9.0'} + + '@babel/types@7.29.0': + resolution: {integrity: sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==} + engines: {node: '>=6.9.0'} + + '@bcoe/v8-coverage@0.2.3': + resolution: {integrity: sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==} + + '@braintree/sanitize-url@7.1.2': + resolution: {integrity: sha512-jigsZK+sMF/cuiB7sERuo9V7N9jx+dhmHHnQyDSVdpZwVutaBu7WvNYqMDLSgFgfB30n452TP3vjDAvFC973mA==} + + '@changesets/apply-release-plan@7.0.14': + resolution: {integrity: sha512-ddBvf9PHdy2YY0OUiEl3TV78mH9sckndJR14QAt87KLEbIov81XO0q0QAmvooBxXlqRRP8I9B7XOzZwQG7JkWA==} + + '@changesets/assemble-release-plan@6.0.9': + resolution: {integrity: sha512-tPgeeqCHIwNo8sypKlS3gOPmsS3wP0zHt67JDuL20P4QcXiw/O4Hl7oXiuLnP9yg+rXLQ2sScdV1Kkzde61iSQ==} + + '@changesets/changelog-git@0.2.1': + resolution: {integrity: sha512-x/xEleCFLH28c3bQeQIyeZf8lFXyDFVn1SgcBiR2Tw/r4IAWlk1fzxCEZ6NxQAjF2Nwtczoen3OA2qR+UawQ8Q==} + + '@changesets/cli@2.29.8': + resolution: {integrity: sha512-1weuGZpP63YWUYjay/E84qqwcnt5yJMM0tep10Up7Q5cS/DGe2IZ0Uj3HNMxGhCINZuR7aO9WBMdKnPit5ZDPA==} + hasBin: true + + '@changesets/config@3.1.2': + resolution: {integrity: sha512-CYiRhA4bWKemdYi/uwImjPxqWNpqGPNbEBdX1BdONALFIDK7MCUj6FPkzD+z9gJcvDFUQJn9aDVf4UG7OT6Kog==} + + '@changesets/errors@0.2.0': + resolution: {integrity: sha512-6BLOQUscTpZeGljvyQXlWOItQyU71kCdGz7Pi8H8zdw6BI0g3m43iL4xKUVPWtG+qrrL9DTjpdn8eYuCQSRpow==} + + '@changesets/get-dependents-graph@2.1.3': + resolution: {integrity: sha512-gphr+v0mv2I3Oxt19VdWRRUxq3sseyUpX9DaHpTUmLj92Y10AGy+XOtV+kbM6L/fDcpx7/ISDFK6T8A/P3lOdQ==} + + '@changesets/get-release-plan@4.0.14': + resolution: {integrity: sha512-yjZMHpUHgl4Xl5gRlolVuxDkm4HgSJqT93Ri1Uz8kGrQb+5iJ8dkXJ20M2j/Y4iV5QzS2c5SeTxVSKX+2eMI0g==} + + '@changesets/get-version-range-type@0.4.0': + resolution: {integrity: sha512-hwawtob9DryoGTpixy1D3ZXbGgJu1Rhr+ySH2PvTLHvkZuQ7sRT4oQwMh0hbqZH1weAooedEjRsbrWcGLCeyVQ==} + + '@changesets/git@3.0.4': + resolution: {integrity: sha512-BXANzRFkX+XcC1q/d27NKvlJ1yf7PSAgi8JG6dt8EfbHFHi4neau7mufcSca5zRhwOL8j9s6EqsxmT+s+/E6Sw==} + + '@changesets/logger@0.1.1': + resolution: {integrity: sha512-OQtR36ZlnuTxKqoW4Sv6x5YIhOmClRd5pWsjZsddYxpWs517R0HkyiefQPIytCVh4ZcC5x9XaG8KTdd5iRQUfg==} + + '@changesets/parse@0.4.2': + resolution: {integrity: sha512-Uo5MC5mfg4OM0jU3up66fmSn6/NE9INK+8/Vn/7sMVcdWg46zfbvvUSjD9EMonVqPi9fbrJH9SXHn48Tr1f2yA==} + + '@changesets/pre@2.0.2': + resolution: {integrity: sha512-HaL/gEyFVvkf9KFg6484wR9s0qjAXlZ8qWPDkTyKF6+zqjBe/I2mygg3MbpZ++hdi0ToqNUF8cjj7fBy0dg8Ug==} + + '@changesets/read@0.6.6': + resolution: {integrity: sha512-P5QaN9hJSQQKJShzzpBT13FzOSPyHbqdoIBUd2DJdgvnECCyO6LmAOWSV+O8se2TaZJVwSXjL+v9yhb+a9JeJg==} + + '@changesets/should-skip-package@0.1.2': + resolution: {integrity: sha512-qAK/WrqWLNCP22UDdBTMPH5f41elVDlsNyat180A33dWxuUDyNpg6fPi/FyTZwRriVjg0L8gnjJn2F9XAoF0qw==} + + '@changesets/types@4.1.0': + resolution: {integrity: sha512-LDQvVDv5Kb50ny2s25Fhm3d9QSZimsoUGBsUioj6MC3qbMUCuC8GPIvk/M6IvXx3lYhAs0lwWUQLb+VIEUCECw==} + + '@changesets/types@6.1.0': + resolution: {integrity: sha512-rKQcJ+o1nKNgeoYRHKOS07tAMNd3YSN0uHaJOZYjBAgxfV7TUE7JE+z4BzZdQwb5hKaYbayKN5KrYV7ODb2rAA==} + + '@changesets/write@0.4.0': + resolution: {integrity: sha512-CdTLvIOPiCNuH71pyDu3rA+Q0n65cmAbXnwWH84rKGiFumFzkmHNT8KHTMEchcxN+Kl8I54xGUhJ7l3E7X396Q==} + + '@chevrotain/cst-dts-gen@11.0.3': + resolution: {integrity: sha512-BvIKpRLeS/8UbfxXxgC33xOumsacaeCKAjAeLyOn7Pcp95HiRbrpl14S+9vaZLolnbssPIUuiUd8IvgkRyt6NQ==} + + '@chevrotain/gast@11.0.3': + resolution: {integrity: sha512-+qNfcoNk70PyS/uxmj3li5NiECO+2YKZZQMbmjTqRI3Qchu8Hig/Q9vgkHpI3alNjr7M+a2St5pw5w5F6NL5/Q==} + + '@chevrotain/regexp-to-ast@11.0.3': + resolution: {integrity: sha512-1fMHaBZxLFvWI067AVbGJav1eRY7N8DDvYCTwGBiE/ytKBgP8azTdgyrKyWZ9Mfh09eHWb5PgTSO8wi7U824RA==} + + '@chevrotain/types@11.0.3': + resolution: {integrity: sha512-gsiM3G8b58kZC2HaWR50gu6Y1440cHiJ+i3JUvcp/35JchYejb2+5MVeJK0iKThYpAa/P2PYFV4hoi44HD+aHQ==} + + '@chevrotain/utils@11.0.3': + resolution: {integrity: sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==} + + '@colors/colors@1.5.0': + resolution: {integrity: sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==} + engines: {node: '>=0.1.90'} + + '@csstools/cascade-layer-name-parser@2.0.5': + resolution: {integrity: sha512-p1ko5eHgV+MgXFVa4STPKpvPxr6ReS8oS2jzTukjR74i5zJNyWO1ZM1m8YKBXnzDKWfBN1ztLYlHxbVemDD88A==} + engines: {node: '>=18'} + peerDependencies: + '@csstools/css-parser-algorithms': ^3.0.5 + '@csstools/css-tokenizer': ^3.0.4 + + '@csstools/color-helpers@5.1.0': + resolution: {integrity: sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==} + engines: {node: '>=18'} + + '@csstools/css-calc@2.1.4': + resolution: {integrity: sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==} + engines: {node: '>=18'} + peerDependencies: + '@csstools/css-parser-algorithms': ^3.0.5 + '@csstools/css-tokenizer': ^3.0.4 + + '@csstools/css-color-parser@3.1.0': + resolution: {integrity: sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA==} + engines: {node: '>=18'} + peerDependencies: + '@csstools/css-parser-algorithms': ^3.0.5 + '@csstools/css-tokenizer': ^3.0.4 + + '@csstools/css-parser-algorithms@3.0.5': + resolution: {integrity: sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==} + engines: {node: '>=18'} + peerDependencies: + '@csstools/css-tokenizer': ^3.0.4 + + '@csstools/css-tokenizer@3.0.4': + resolution: {integrity: sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==} + engines: {node: '>=18'} + + '@csstools/media-query-list-parser@4.0.3': + resolution: {integrity: sha512-HAYH7d3TLRHDOUQK4mZKf9k9Ph/m8Akstg66ywKR4SFAigjs3yBiUeZtFxywiTm5moZMAp/5W/ZuFnNXXYLuuQ==} + engines: {node: '>=18'} + peerDependencies: + '@csstools/css-parser-algorithms': ^3.0.5 + '@csstools/css-tokenizer': ^3.0.4 + + '@csstools/postcss-alpha-function@1.0.1': + resolution: {integrity: sha512-isfLLwksH3yHkFXfCI2Gcaqg7wGGHZZwunoJzEZk0yKYIokgre6hYVFibKL3SYAoR1kBXova8LB+JoO5vZzi9w==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-cascade-layers@5.0.2': + resolution: {integrity: sha512-nWBE08nhO8uWl6kSAeCx4im7QfVko3zLrtgWZY4/bP87zrSPpSyN/3W3TDqz1jJuH+kbKOHXg5rJnK+ZVYcFFg==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-color-function-display-p3-linear@1.0.1': + resolution: {integrity: sha512-E5qusdzhlmO1TztYzDIi8XPdPoYOjoTY6HBYBCYSj+Gn4gQRBlvjgPQXzfzuPQqt8EhkC/SzPKObg4Mbn8/xMg==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-color-function@4.0.12': + resolution: {integrity: sha512-yx3cljQKRaSBc2hfh8rMZFZzChaFgwmO2JfFgFr1vMcF3C/uyy5I4RFIBOIWGq1D+XbKCG789CGkG6zzkLpagA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-color-mix-function@3.0.12': + resolution: {integrity: sha512-4STERZfCP5Jcs13P1U5pTvI9SkgLgfMUMhdXW8IlJWkzOOOqhZIjcNhWtNJZes2nkBDsIKJ0CJtFtuaZ00moag==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-color-mix-variadic-function-arguments@1.0.2': + resolution: {integrity: sha512-rM67Gp9lRAkTo+X31DUqMEq+iK+EFqsidfecmhrteErxJZb6tUoJBVQca1Vn1GpDql1s1rD1pKcuYzMsg7Z1KQ==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-content-alt-text@2.0.8': + resolution: {integrity: sha512-9SfEW9QCxEpTlNMnpSqFaHyzsiRpZ5J5+KqCu1u5/eEJAWsMhzT40qf0FIbeeglEvrGRMdDzAxMIz3wqoGSb+Q==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-contrast-color-function@2.0.12': + resolution: {integrity: sha512-YbwWckjK3qwKjeYz/CijgcS7WDUCtKTd8ShLztm3/i5dhh4NaqzsbYnhm4bjrpFpnLZ31jVcbK8YL77z3GBPzA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-exponential-functions@2.0.9': + resolution: {integrity: sha512-abg2W/PI3HXwS/CZshSa79kNWNZHdJPMBXeZNyPQFbbj8sKO3jXxOt/wF7juJVjyDTc6JrvaUZYFcSBZBhaxjw==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-font-format-keywords@4.0.0': + resolution: {integrity: sha512-usBzw9aCRDvchpok6C+4TXC57btc4bJtmKQWOHQxOVKen1ZfVqBUuCZ/wuqdX5GHsD0NRSr9XTP+5ID1ZZQBXw==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-gamut-mapping@2.0.11': + resolution: {integrity: sha512-fCpCUgZNE2piVJKC76zFsgVW1apF6dpYsqGyH8SIeCcM4pTEsRTWTLCaJIMKFEundsCKwY1rwfhtrio04RJ4Dw==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-gradients-interpolation-method@5.0.12': + resolution: {integrity: sha512-jugzjwkUY0wtNrZlFeyXzimUL3hN4xMvoPnIXxoZqxDvjZRiSh+itgHcVUWzJ2VwD/VAMEgCLvtaJHX+4Vj3Ow==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-hwb-function@4.0.12': + resolution: {integrity: sha512-mL/+88Z53KrE4JdePYFJAQWFrcADEqsLprExCM04GDNgHIztwFzj0Mbhd/yxMBngq0NIlz58VVxjt5abNs1VhA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-ic-unit@4.0.4': + resolution: {integrity: sha512-yQ4VmossuOAql65sCPppVO1yfb7hDscf4GseF0VCA/DTDaBc0Wtf8MTqVPfjGYlT5+2buokG0Gp7y0atYZpwjg==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-initial@2.0.1': + resolution: {integrity: sha512-L1wLVMSAZ4wovznquK0xmC7QSctzO4D0Is590bxpGqhqjboLXYA16dWZpfwImkdOgACdQ9PqXsuRroW6qPlEsg==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-is-pseudo-class@5.0.3': + resolution: {integrity: sha512-jS/TY4SpG4gszAtIg7Qnf3AS2pjcUM5SzxpApOrlndMeGhIbaTzWBzzP/IApXoNWEW7OhcjkRT48jnAUIFXhAQ==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-light-dark-function@2.0.11': + resolution: {integrity: sha512-fNJcKXJdPM3Lyrbmgw2OBbaioU7yuKZtiXClf4sGdQttitijYlZMD5K7HrC/eF83VRWRrYq6OZ0Lx92leV2LFA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-logical-float-and-clear@3.0.0': + resolution: {integrity: sha512-SEmaHMszwakI2rqKRJgE+8rpotFfne1ZS6bZqBoQIicFyV+xT1UF42eORPxJkVJVrH9C0ctUgwMSn3BLOIZldQ==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-logical-overflow@2.0.0': + resolution: {integrity: sha512-spzR1MInxPuXKEX2csMamshR4LRaSZ3UXVaRGjeQxl70ySxOhMpP2252RAFsg8QyyBXBzuVOOdx1+bVO5bPIzA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-logical-overscroll-behavior@2.0.0': + resolution: {integrity: sha512-e/webMjoGOSYfqLunyzByZj5KKe5oyVg/YSbie99VEaSDE2kimFm0q1f6t/6Jo+VVCQ/jbe2Xy+uX+C4xzWs4w==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-logical-resize@3.0.0': + resolution: {integrity: sha512-DFbHQOFW/+I+MY4Ycd/QN6Dg4Hcbb50elIJCfnwkRTCX05G11SwViI5BbBlg9iHRl4ytB7pmY5ieAFk3ws7yyg==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-logical-viewport-units@3.0.4': + resolution: {integrity: sha512-q+eHV1haXA4w9xBwZLKjVKAWn3W2CMqmpNpZUk5kRprvSiBEGMgrNH3/sJZ8UA3JgyHaOt3jwT9uFa4wLX4EqQ==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-media-minmax@2.0.9': + resolution: {integrity: sha512-af9Qw3uS3JhYLnCbqtZ9crTvvkR+0Se+bBqSr7ykAnl9yKhk6895z9rf+2F4dClIDJWxgn0iZZ1PSdkhrbs2ig==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-media-queries-aspect-ratio-number-values@3.0.5': + resolution: {integrity: sha512-zhAe31xaaXOY2Px8IYfoVTB3wglbJUVigGphFLj6exb7cjZRH9A6adyE22XfFK3P2PzwRk0VDeTJmaxpluyrDg==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-nested-calc@4.0.0': + resolution: {integrity: sha512-jMYDdqrQQxE7k9+KjstC3NbsmC063n1FTPLCgCRS2/qHUbHM0mNy9pIn4QIiQGs9I/Bg98vMqw7mJXBxa0N88A==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-normalize-display-values@4.0.1': + resolution: {integrity: sha512-TQUGBuRvxdc7TgNSTevYqrL8oItxiwPDixk20qCB5me/W8uF7BPbhRrAvFuhEoywQp/woRsUZ6SJ+sU5idZAIA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-oklab-function@4.0.12': + resolution: {integrity: sha512-HhlSmnE1NKBhXsTnNGjxvhryKtO7tJd1w42DKOGFD6jSHtYOrsJTQDKPMwvOfrzUAk8t7GcpIfRyM7ssqHpFjg==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-position-area-property@1.0.0': + resolution: {integrity: sha512-fUP6KR8qV2NuUZV3Cw8itx0Ep90aRjAZxAEzC3vrl6yjFv+pFsQbR18UuQctEKmA72K9O27CoYiKEgXxkqjg8Q==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-progressive-custom-properties@4.2.1': + resolution: {integrity: sha512-uPiiXf7IEKtUQXsxu6uWtOlRMXd2QWWy5fhxHDnPdXKCQckPP3E34ZgDoZ62r2iT+UOgWsSbM4NvHE5m3mAEdw==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-property-rule-prelude-list@1.0.0': + resolution: {integrity: sha512-IxuQjUXq19fobgmSSvUDO7fVwijDJaZMvWQugxfEUxmjBeDCVaDuMpsZ31MsTm5xbnhA+ElDi0+rQ7sQQGisFA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-random-function@2.0.1': + resolution: {integrity: sha512-q+FQaNiRBhnoSNo+GzqGOIBKoHQ43lYz0ICrV+UudfWnEF6ksS6DsBIJSISKQT2Bvu3g4k6r7t0zYrk5pDlo8w==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-relative-color-syntax@3.0.12': + resolution: {integrity: sha512-0RLIeONxu/mtxRtf3o41Lq2ghLimw0w9ByLWnnEVuy89exmEEq8bynveBxNW3nyHqLAFEeNtVEmC1QK9MZ8Huw==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-scope-pseudo-class@4.0.1': + resolution: {integrity: sha512-IMi9FwtH6LMNuLea1bjVMQAsUhFxJnyLSgOp/cpv5hrzWmrUYU5fm0EguNDIIOHUqzXode8F/1qkC/tEo/qN8Q==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-sign-functions@1.1.4': + resolution: {integrity: sha512-P97h1XqRPcfcJndFdG95Gv/6ZzxUBBISem0IDqPZ7WMvc/wlO+yU0c5D/OCpZ5TJoTt63Ok3knGk64N+o6L2Pg==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-stepped-value-functions@4.0.9': + resolution: {integrity: sha512-h9btycWrsex4dNLeQfyU3y3w40LMQooJWFMm/SK9lrKguHDcFl4VMkncKKoXi2z5rM9YGWbUQABI8BT2UydIcA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-syntax-descriptor-syntax-production@1.0.1': + resolution: {integrity: sha512-GneqQWefjM//f4hJ/Kbox0C6f2T7+pi4/fqTqOFGTL3EjnvOReTqO1qUQ30CaUjkwjYq9qZ41hzarrAxCc4gow==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-system-ui-font-family@1.0.0': + resolution: {integrity: sha512-s3xdBvfWYfoPSBsikDXbuorcMG1nN1M6GdU0qBsGfcmNR0A/qhloQZpTxjA3Xsyrk1VJvwb2pOfiOT3at/DuIQ==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-text-decoration-shorthand@4.0.3': + resolution: {integrity: sha512-KSkGgZfx0kQjRIYnpsD7X2Om9BUXX/Kii77VBifQW9Ih929hK0KNjVngHDH0bFB9GmfWcR9vJYJJRvw/NQjkrA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-trigonometric-functions@4.0.9': + resolution: {integrity: sha512-Hnh5zJUdpNrJqK9v1/E3BbrQhaDTj5YiX7P61TOvUhoDHnUmsNNxcDAgkQ32RrcWx9GVUvfUNPcUkn8R3vIX6A==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/postcss-unset-value@4.0.0': + resolution: {integrity: sha512-cBz3tOCI5Fw6NIFEwU3RiwK6mn3nKegjpJuzCndoGq3BZPkUjnsq7uQmIeMNeMbMk7YD2MfKcgCpZwX5jyXqCA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@csstools/selector-resolve-nested@3.1.0': + resolution: {integrity: sha512-mf1LEW0tJLKfWyvn5KdDrhpxHyuxpbNwTIwOYLIvsTffeyOf85j5oIzfG0yosxDgx/sswlqBnESYUcQH0vgZ0g==} + engines: {node: '>=18'} + peerDependencies: + postcss-selector-parser: ^7.0.0 + + '@csstools/selector-specificity@5.0.0': + resolution: {integrity: sha512-PCqQV3c4CoVm3kdPhyeZ07VmBRdH2EpMFA/pd9OASpOEC3aXNGoqPDAZ80D0cLpMBxnmk0+yNhGsEx31hq7Gtw==} + engines: {node: '>=18'} + peerDependencies: + postcss-selector-parser: ^7.0.0 + + '@csstools/utilities@2.0.0': + resolution: {integrity: sha512-5VdOr0Z71u+Yp3ozOx8T11N703wIFGVRgOWbOZMKgglPJsWA54MRIoMNVMa7shUToIhx5J8vX4sOZgD2XiihiQ==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + '@discoveryjs/json-ext@0.5.7': + resolution: {integrity: sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==} + engines: {node: '>=10.0.0'} + + '@docsearch/core@4.5.3': + resolution: {integrity: sha512-x/P5+HVzv9ALtbuJIfpkF8Eyc5RE8YCsFcOgLrrtWa9Ui+53ggZA5seIAanCRORbS4+m982lu7rZmebSiuMIcw==} + peerDependencies: + '@types/react': '>= 16.8.0 < 20.0.0' + react: '>= 16.8.0 < 20.0.0' + react-dom: '>= 16.8.0 < 20.0.0' + peerDependenciesMeta: + '@types/react': + optional: true + react: + optional: true + react-dom: + optional: true + + '@docsearch/css@4.5.3': + resolution: {integrity: sha512-kUpHaxn0AgI3LQfyzTYkNUuaFY4uEz/Ym9/N/FvyDE+PzSgZsCyDH9jE49B6N6f1eLCm9Yp64J9wENd6vypdxA==} + + '@docsearch/react@4.5.3': + resolution: {integrity: sha512-Hm3Lg/FD9HXV57WshhWOHOprbcObF5ptLzcjA5zdgJDzYOMwEN+AvY8heQ5YMTWyC6kW2d+Qk25AVlHnDWMSvA==} + peerDependencies: + '@types/react': '>= 16.8.0 < 20.0.0' + react: '>= 16.8.0 < 20.0.0' + react-dom: '>= 16.8.0 < 20.0.0' + search-insights: '>= 1 < 3' + peerDependenciesMeta: + '@types/react': + optional: true + react: + optional: true + react-dom: + optional: true + search-insights: + optional: true + + '@docusaurus/babel@3.9.2': + resolution: {integrity: sha512-GEANdi/SgER+L7Japs25YiGil/AUDnFFHaCGPBbundxoWtCkA2lmy7/tFmgED4y1htAy6Oi4wkJEQdGssnw9MA==} + engines: {node: '>=20.0'} + + '@docusaurus/bundler@3.9.2': + resolution: {integrity: sha512-ZOVi6GYgTcsZcUzjblpzk3wH1Fya2VNpd5jtHoCCFcJlMQ1EYXZetfAnRHLcyiFeBABaI1ltTYbOBtH/gahGVA==} + engines: {node: '>=20.0'} + peerDependencies: + '@docusaurus/faster': '*' + peerDependenciesMeta: + '@docusaurus/faster': + optional: true + + '@docusaurus/core@3.9.2': + resolution: {integrity: sha512-HbjwKeC+pHUFBfLMNzuSjqFE/58+rLVKmOU3lxQrpsxLBOGosYco/Q0GduBb0/jEMRiyEqjNT/01rRdOMWq5pw==} + engines: {node: '>=20.0'} + hasBin: true + peerDependencies: + '@mdx-js/react': ^3.0.0 + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/cssnano-preset@3.9.2': + resolution: {integrity: sha512-8gBKup94aGttRduABsj7bpPFTX7kbwu+xh3K9NMCF5K4bWBqTFYW+REKHF6iBVDHRJ4grZdIPbvkiHd/XNKRMQ==} + engines: {node: '>=20.0'} + + '@docusaurus/logger@3.9.2': + resolution: {integrity: sha512-/SVCc57ByARzGSU60c50rMyQlBuMIJCjcsJlkphxY6B0GV4UH3tcA1994N8fFfbJ9kX3jIBe/xg3XP5qBtGDbA==} + engines: {node: '>=20.0'} + + '@docusaurus/mdx-loader@3.9.2': + resolution: {integrity: sha512-wiYoGwF9gdd6rev62xDU8AAM8JuLI/hlwOtCzMmYcspEkzecKrP8J8X+KpYnTlACBUUtXNJpSoCwFWJhLRevzQ==} + engines: {node: '>=20.0'} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/module-type-aliases@3.9.2': + resolution: {integrity: sha512-8qVe2QA9hVLzvnxP46ysuofJUIc/yYQ82tvA/rBTrnpXtCjNSFLxEZfd5U8cYZuJIVlkPxamsIgwd5tGZXfvew==} + peerDependencies: + react: '*' + react-dom: '*' + + '@docusaurus/plugin-content-blog@3.9.2': + resolution: {integrity: sha512-3I2HXy3L1QcjLJLGAoTvoBnpOwa6DPUa3Q0dMK19UTY9mhPkKQg/DYhAGTiBUKcTR0f08iw7kLPqOhIgdV3eVQ==} + engines: {node: '>=20.0'} + peerDependencies: + '@docusaurus/plugin-content-docs': '*' + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/plugin-content-docs@3.9.2': + resolution: {integrity: sha512-C5wZsGuKTY8jEYsqdxhhFOe1ZDjH0uIYJ9T/jebHwkyxqnr4wW0jTkB72OMqNjsoQRcb0JN3PcSeTwFlVgzCZg==} + engines: {node: '>=20.0'} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/plugin-content-pages@3.9.2': + resolution: {integrity: sha512-s4849w/p4noXUrGpPUF0BPqIAfdAe76BLaRGAGKZ1gTDNiGxGcpsLcwJ9OTi1/V8A+AzvsmI9pkjie2zjIQZKA==} + engines: {node: '>=20.0'} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/plugin-css-cascade-layers@3.9.2': + resolution: {integrity: sha512-w1s3+Ss+eOQbscGM4cfIFBlVg/QKxyYgj26k5AnakuHkKxH6004ZtuLe5awMBotIYF2bbGDoDhpgQ4r/kcj4rQ==} + engines: {node: '>=20.0'} + + '@docusaurus/plugin-debug@3.9.2': + resolution: {integrity: sha512-j7a5hWuAFxyQAkilZwhsQ/b3T7FfHZ+0dub6j/GxKNFJp2h9qk/P1Bp7vrGASnvA9KNQBBL1ZXTe7jlh4VdPdA==} + engines: {node: '>=20.0'} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/plugin-google-analytics@3.9.2': + resolution: {integrity: sha512-mAwwQJ1Us9jL/lVjXtErXto4p4/iaLlweC54yDUK1a97WfkC6Z2k5/769JsFgwOwOP+n5mUQGACXOEQ0XDuVUw==} + engines: {node: '>=20.0'} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/plugin-google-gtag@3.9.2': + resolution: {integrity: sha512-YJ4lDCphabBtw19ooSlc1MnxtYGpjFV9rEdzjLsUnBCeis2djUyCozZaFhCg6NGEwOn7HDDyMh0yzcdRpnuIvA==} + engines: {node: '>=20.0'} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/plugin-google-tag-manager@3.9.2': + resolution: {integrity: sha512-LJtIrkZN/tuHD8NqDAW1Tnw0ekOwRTfobWPsdO15YxcicBo2ykKF0/D6n0vVBfd3srwr9Z6rzrIWYrMzBGrvNw==} + engines: {node: '>=20.0'} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/plugin-sitemap@3.9.2': + resolution: {integrity: sha512-WLh7ymgDXjG8oPoM/T4/zUP7KcSuFYRZAUTl8vR6VzYkfc18GBM4xLhcT+AKOwun6kBivYKUJf+vlqYJkm+RHw==} + engines: {node: '>=20.0'} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/plugin-svgr@3.9.2': + resolution: {integrity: sha512-n+1DE+5b3Lnf27TgVU5jM1d4x5tUh2oW5LTsBxJX4PsAPV0JGcmI6p3yLYtEY0LRVEIJh+8RsdQmRE66wSV8mw==} + engines: {node: '>=20.0'} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/preset-classic@3.9.2': + resolution: {integrity: sha512-IgyYO2Gvaigi21LuDIe+nvmN/dfGXAiMcV/murFqcpjnZc7jxFAxW+9LEjdPt61uZLxG4ByW/oUmX/DDK9t/8w==} + engines: {node: '>=20.0'} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/react-loadable@6.0.0': + resolution: {integrity: sha512-YMMxTUQV/QFSnbgrP3tjDzLHRg7vsbMn8e9HAa8o/1iXoiomo48b7sk/kkmWEuWNDPJVlKSJRB6Y2fHqdJk+SQ==} + peerDependencies: + react: '*' + + '@docusaurus/theme-classic@3.9.2': + resolution: {integrity: sha512-IGUsArG5hhekXd7RDb11v94ycpJpFdJPkLnt10fFQWOVxAtq5/D7hT6lzc2fhyQKaaCE62qVajOMKL7OiAFAIA==} + engines: {node: '>=20.0'} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/theme-common@3.9.2': + resolution: {integrity: sha512-6c4DAbR6n6nPbnZhY2V3tzpnKnGL+6aOsLvFL26VRqhlczli9eWG0VDUNoCQEPnGwDMhPS42UhSAnz5pThm5Ag==} + engines: {node: '>=20.0'} + peerDependencies: + '@docusaurus/plugin-content-docs': '*' + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/theme-mermaid@3.9.2': + resolution: {integrity: sha512-5vhShRDq/ntLzdInsQkTdoKWSzw8d1jB17sNPYhA/KvYYFXfuVEGHLM6nrf8MFbV8TruAHDG21Fn3W4lO8GaDw==} + engines: {node: '>=20.0'} + peerDependencies: + '@mermaid-js/layout-elk': ^0.1.9 + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + peerDependenciesMeta: + '@mermaid-js/layout-elk': + optional: true + + '@docusaurus/theme-search-algolia@3.9.2': + resolution: {integrity: sha512-GBDSFNwjnh5/LdkxCKQHkgO2pIMX1447BxYUBG2wBiajS21uj64a+gH/qlbQjDLxmGrbrllBrtJkUHxIsiwRnw==} + engines: {node: '>=20.0'} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/theme-translations@3.9.2': + resolution: {integrity: sha512-vIryvpP18ON9T9rjgMRFLr2xJVDpw1rtagEGf8Ccce4CkTrvM/fRB8N2nyWYOW5u3DdjkwKw5fBa+3tbn9P4PA==} + engines: {node: '>=20.0'} + + '@docusaurus/types@3.9.2': + resolution: {integrity: sha512-Ux1JUNswg+EfUEmajJjyhIohKceitY/yzjRUpu04WXgvVz+fbhVC0p+R0JhvEu4ytw8zIAys2hrdpQPBHRIa8Q==} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + react-dom: ^18.0.0 || ^19.0.0 + + '@docusaurus/utils-common@3.9.2': + resolution: {integrity: sha512-I53UC1QctruA6SWLvbjbhCpAw7+X7PePoe5pYcwTOEXD/PxeP8LnECAhTHHwWCblyUX5bMi4QLRkxvyZ+IT8Aw==} + engines: {node: '>=20.0'} + + '@docusaurus/utils-validation@3.9.2': + resolution: {integrity: sha512-l7yk3X5VnNmATbwijJkexdhulNsQaNDwoagiwujXoxFbWLcxHQqNQ+c/IAlzrfMMOfa/8xSBZ7KEKDesE/2J7A==} + engines: {node: '>=20.0'} + + '@docusaurus/utils@3.9.2': + resolution: {integrity: sha512-lBSBiRruFurFKXr5Hbsl2thmGweAPmddhF3jb99U4EMDA5L+e5Y1rAkOS07Nvrup7HUMBDrCV45meaxZnt28nQ==} + engines: {node: '>=20.0'} + + '@easyops-cn/autocomplete.js@0.38.1': + resolution: {integrity: sha512-drg76jS6syilOUmVNkyo1c7ZEBPcPuK+aJA7AksM5ZIIbV57DMHCywiCr+uHyv8BE5jUTU98j/H7gVrkHrWW3Q==} + + '@easyops-cn/docusaurus-search-local@0.52.3': + resolution: {integrity: sha512-bkKHD+FoAY+sBvd9vcHudx8X5JQXkyGBcpstpJwOUTTpKwT0rOtUtnfmizpMu113LqdHxOxvlekYkGeTNGYYvw==} + engines: {node: '>=12'} + peerDependencies: + '@docusaurus/theme-common': ^2 || ^3 + react: ^16.14.0 || ^17 || ^18 || ^19 + react-dom: ^16.14.0 || 17 || ^18 || ^19 + + '@emnapi/core@1.8.1': + resolution: {integrity: sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg==} + + '@emnapi/runtime@1.8.1': + resolution: {integrity: sha512-mehfKSMWjjNol8659Z8KxEMrdSJDDot5SXMq00dM8BN4o+CLNXQ0xH2V7EchNHV4RmbZLmmPdEaXZc5H2FXmDg==} + + '@emnapi/wasi-threads@1.1.0': + resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==} + + '@esbuild/aix-ppc64@0.21.5': + resolution: {integrity: sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [aix] + + '@esbuild/aix-ppc64@0.27.2': + resolution: {integrity: sha512-GZMB+a0mOMZs4MpDbj8RJp4cw+w1WV5NYD6xzgvzUJ5Ek2jerwfO2eADyI6ExDSUED+1X8aMbegahsJi+8mgpw==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.21.5': + resolution: {integrity: sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==} + engines: {node: '>=12'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm64@0.27.2': + resolution: {integrity: sha512-pvz8ZZ7ot/RBphf8fv60ljmaoydPU12VuXHImtAs0XhLLw+EXBi2BLe3OYSBslR4rryHvweW5gmkKFwTiFy6KA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.21.5': + resolution: {integrity: sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==} + engines: {node: '>=12'} + cpu: [arm] + os: [android] + + '@esbuild/android-arm@0.27.2': + resolution: {integrity: sha512-DVNI8jlPa7Ujbr1yjU2PfUSRtAUZPG9I1RwW4F4xFB1Imiu2on0ADiI/c3td+KmDtVKNbi+nffGDQMfcIMkwIA==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.21.5': + resolution: {integrity: sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==} + engines: {node: '>=12'} + cpu: [x64] + os: [android] + + '@esbuild/android-x64@0.27.2': + resolution: {integrity: sha512-z8Ank4Byh4TJJOh4wpz8g2vDy75zFL0TlZlkUkEwYXuPSgX8yzep596n6mT7905kA9uHZsf/o2OJZubl2l3M7A==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.21.5': + resolution: {integrity: sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==} + engines: {node: '>=12'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-arm64@0.27.2': + resolution: {integrity: sha512-davCD2Zc80nzDVRwXTcQP/28fiJbcOwvdolL0sOiOsbwBa72kegmVU0Wrh1MYrbuCL98Omp5dVhQFWRKR2ZAlg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.21.5': + resolution: {integrity: sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==} + engines: {node: '>=12'} + cpu: [x64] + os: [darwin] + + '@esbuild/darwin-x64@0.27.2': + resolution: {integrity: sha512-ZxtijOmlQCBWGwbVmwOF/UCzuGIbUkqB1faQRf5akQmxRJ1ujusWsb3CVfk/9iZKr2L5SMU5wPBi1UWbvL+VQA==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.21.5': + resolution: {integrity: sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==} + engines: {node: '>=12'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-arm64@0.27.2': + resolution: {integrity: sha512-lS/9CN+rgqQ9czogxlMcBMGd+l8Q3Nj1MFQwBZJyoEKI50XGxwuzznYdwcav6lpOGv5BqaZXqvBSiB/kJ5op+g==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.21.5': + resolution: {integrity: sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.27.2': + resolution: {integrity: sha512-tAfqtNYb4YgPnJlEFu4c212HYjQWSO/w/h/lQaBK7RbwGIkBOuNKQI9tqWzx7Wtp7bTPaGC6MJvWI608P3wXYA==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.21.5': + resolution: {integrity: sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==} + engines: {node: '>=12'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm64@0.27.2': + resolution: {integrity: sha512-hYxN8pr66NsCCiRFkHUAsxylNOcAQaxSSkHMMjcpx0si13t1LHFphxJZUiGwojB1a/Hd5OiPIqDdXONia6bhTw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.21.5': + resolution: {integrity: sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==} + engines: {node: '>=12'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-arm@0.27.2': + resolution: {integrity: sha512-vWfq4GaIMP9AIe4yj1ZUW18RDhx6EPQKjwe7n8BbIecFtCQG4CfHGaHuh7fdfq+y3LIA2vGS/o9ZBGVxIDi9hw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.21.5': + resolution: {integrity: sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==} + engines: {node: '>=12'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-ia32@0.27.2': + resolution: {integrity: sha512-MJt5BRRSScPDwG2hLelYhAAKh9imjHK5+NE/tvnRLbIqUWa+0E9N4WNMjmp/kXXPHZGqPLxggwVhz7QP8CTR8w==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.21.5': + resolution: {integrity: sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==} + engines: {node: '>=12'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-loong64@0.27.2': + resolution: {integrity: sha512-lugyF1atnAT463aO6KPshVCJK5NgRnU4yb3FUumyVz+cGvZbontBgzeGFO1nF+dPueHD367a2ZXe1NtUkAjOtg==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.21.5': + resolution: {integrity: sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==} + engines: {node: '>=12'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-mips64el@0.27.2': + resolution: {integrity: sha512-nlP2I6ArEBewvJ2gjrrkESEZkB5mIoaTswuqNFRv/WYd+ATtUpe9Y09RnJvgvdag7he0OWgEZWhviS1OTOKixw==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.21.5': + resolution: {integrity: sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-ppc64@0.27.2': + resolution: {integrity: sha512-C92gnpey7tUQONqg1n6dKVbx3vphKtTHJaNG2Ok9lGwbZil6DrfyecMsp9CrmXGQJmZ7iiVXvvZH6Ml5hL6XdQ==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.21.5': + resolution: {integrity: sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==} + engines: {node: '>=12'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-riscv64@0.27.2': + resolution: {integrity: sha512-B5BOmojNtUyN8AXlK0QJyvjEZkWwy/FKvakkTDCziX95AowLZKR6aCDhG7LeF7uMCXEJqwa8Bejz5LTPYm8AvA==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.21.5': + resolution: {integrity: sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==} + engines: {node: '>=12'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-s390x@0.27.2': + resolution: {integrity: sha512-p4bm9+wsPwup5Z8f4EpfN63qNagQ47Ua2znaqGH6bqLlmJ4bx97Y9JdqxgGZ6Y8xVTixUnEkoKSHcpRlDnNr5w==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.21.5': + resolution: {integrity: sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==} + engines: {node: '>=12'} + cpu: [x64] + os: [linux] + + '@esbuild/linux-x64@0.27.2': + resolution: {integrity: sha512-uwp2Tip5aPmH+NRUwTcfLb+W32WXjpFejTIOWZFw/v7/KnpCDKG66u4DLcurQpiYTiYwQ9B7KOeMJvLCu/OvbA==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-arm64@0.27.2': + resolution: {integrity: sha512-Kj6DiBlwXrPsCRDeRvGAUb/LNrBASrfqAIok+xB0LxK8CHqxZ037viF13ugfsIpePH93mX7xfJp97cyDuTZ3cw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.21.5': + resolution: {integrity: sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==} + engines: {node: '>=12'} + cpu: [x64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.27.2': + resolution: {integrity: sha512-HwGDZ0VLVBY3Y+Nw0JexZy9o/nUAWq9MlV7cahpaXKW6TOzfVno3y3/M8Ga8u8Yr7GldLOov27xiCnqRZf0tCA==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.27.2': + resolution: {integrity: sha512-DNIHH2BPQ5551A7oSHD0CKbwIA/Ox7+78/AWkbS5QoRzaqlev2uFayfSxq68EkonB+IKjiuxBFoV8ESJy8bOHA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.21.5': + resolution: {integrity: sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==} + engines: {node: '>=12'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.27.2': + resolution: {integrity: sha512-/it7w9Nb7+0KFIzjalNJVR5bOzA9Vay+yIPLVHfIQYG/j+j9VTH84aNB8ExGKPU4AzfaEvN9/V4HV+F+vo8OEg==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openharmony-arm64@0.27.2': + resolution: {integrity: sha512-LRBbCmiU51IXfeXk59csuX/aSaToeG7w48nMwA6049Y4J4+VbWALAuXcs+qcD04rHDuSCSRKdmY63sruDS5qag==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + + '@esbuild/sunos-x64@0.21.5': + resolution: {integrity: sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==} + engines: {node: '>=12'} + cpu: [x64] + os: [sunos] + + '@esbuild/sunos-x64@0.27.2': + resolution: {integrity: sha512-kMtx1yqJHTmqaqHPAzKCAkDaKsffmXkPHThSfRwZGyuqyIeBvf08KSsYXl+abf5HDAPMJIPnbBfXvP2ZC2TfHg==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.21.5': + resolution: {integrity: sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==} + engines: {node: '>=12'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-arm64@0.27.2': + resolution: {integrity: sha512-Yaf78O/B3Kkh+nKABUF++bvJv5Ijoy9AN1ww904rOXZFLWVc5OLOfL56W+C8F9xn5JQZa3UX6m+IktJnIb1Jjg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.21.5': + resolution: {integrity: sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==} + engines: {node: '>=12'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-ia32@0.27.2': + resolution: {integrity: sha512-Iuws0kxo4yusk7sw70Xa2E2imZU5HoixzxfGCdxwBdhiDgt9vX9VUCBhqcwY7/uh//78A1hMkkROMJq9l27oLQ==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.21.5': + resolution: {integrity: sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==} + engines: {node: '>=12'} + cpu: [x64] + os: [win32] + + '@esbuild/win32-x64@0.27.2': + resolution: {integrity: sha512-sRdU18mcKf7F+YgheI/zGf5alZatMUTKj/jNS6l744f9u3WFu4v7twcUI9vu4mknF4Y9aDlblIie0IM+5xxaqQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + + '@eslint-community/eslint-utils@4.9.1': + resolution: {integrity: sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + peerDependencies: + eslint: ^6.0.0 || ^7.0.0 || >=8.0.0 + + '@eslint-community/regexpp@4.12.2': + resolution: {integrity: sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==} + engines: {node: ^12.0.0 || ^14.0.0 || >=16.0.0} + + '@eslint/config-array@0.21.1': + resolution: {integrity: sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/config-helpers@0.4.2': + resolution: {integrity: sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/core@0.17.0': + resolution: {integrity: sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/eslintrc@3.3.3': + resolution: {integrity: sha512-Kr+LPIUVKz2qkx1HAMH8q1q6azbqBAsXJUxBl/ODDuVPX45Z9DfwB8tPjTi6nNZ8BuM3nbJxC5zCAg5elnBUTQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/js@9.39.2': + resolution: {integrity: sha512-q1mjIoW1VX4IvSocvM/vbTiveKC4k9eLrajNEuSsmjymSDEbpGddtpfOoN7YGAqBK3NG+uqo8ia4PDTt8buCYA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/object-schema@2.1.7': + resolution: {integrity: sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@eslint/plugin-kit@0.4.1': + resolution: {integrity: sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@fastify/busboy@2.1.1': + resolution: {integrity: sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==} + engines: {node: '>=14'} + + '@hapi/hoek@9.3.0': + resolution: {integrity: sha512-/c6rf4UJlmHlC9b5BaNvzAcFv7HZ2QHaV0D4/HNlBdvFnvQq8RI4kYdhyPCl7Xj+oWvTWQ8ujhqS53LIgAe6KQ==} + + '@hapi/topo@5.1.0': + resolution: {integrity: sha512-foQZKJig7Ob0BMAYBfcJk8d77QtOe7Wo4ox7ff1lQYoNNAb6jwcY1ncdoy2e9wQZzvNy7ODZCYJkK8kzmcAnAg==} + + '@humanfs/core@0.19.1': + resolution: {integrity: sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==} + engines: {node: '>=18.18.0'} + + '@humanfs/node@0.16.7': + resolution: {integrity: sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ==} + engines: {node: '>=18.18.0'} + + '@humanwhocodes/module-importer@1.0.1': + resolution: {integrity: sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==} + engines: {node: '>=12.22'} + + '@humanwhocodes/retry@0.4.3': + resolution: {integrity: sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==} + engines: {node: '>=18.18'} + + '@iconify/types@2.0.0': + resolution: {integrity: sha512-+wluvCrRhXrhyOmRDJ3q8mux9JkKy5SJ/v8ol2tu4FVjyYvtEzkc/3pK15ET6RKg4b4w4BmTk1+gsCUhf21Ykg==} + + '@iconify/utils@3.1.0': + resolution: {integrity: sha512-Zlzem1ZXhI1iHeeERabLNzBHdOa4VhQbqAcOQaMKuTuyZCpwKbC2R4Dd0Zo3g9EAc+Y4fiarO8HIHRAth7+skw==} + + '@inquirer/external-editor@1.0.3': + resolution: {integrity: sha512-RWbSrDiYmO4LbejWY7ttpxczuwQyZLBUyygsA9Nsv95hpzUWwnNTVQmAq3xuh7vNwCp07UTmE5i11XAEExx4RA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} + + '@istanbuljs/schema@0.1.3': + resolution: {integrity: sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==} + engines: {node: '>=8'} + + '@jest/schemas@29.6.3': + resolution: {integrity: sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jest/types@29.6.3': + resolution: {integrity: sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + '@jridgewell/gen-mapping@0.3.13': + resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} + + '@jridgewell/remapping@2.3.5': + resolution: {integrity: sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==} + + '@jridgewell/resolve-uri@3.1.2': + resolution: {integrity: sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==} + engines: {node: '>=6.0.0'} + + '@jridgewell/source-map@0.3.11': + resolution: {integrity: sha512-ZMp1V8ZFcPG5dIWnQLr3NSI1MiCU7UETdS/A0G8V/XWHvJv3ZsFqutJn1Y5RPmAPX6F3BiE397OqveU/9NCuIA==} + + '@jridgewell/sourcemap-codec@1.5.5': + resolution: {integrity: sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==} + + '@jridgewell/trace-mapping@0.3.31': + resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + + '@jsonjoy.com/base64@1.1.2': + resolution: {integrity: sha512-q6XAnWQDIMA3+FTiOYajoYqySkO+JSat0ytXGSuRdq9uXE7o92gzuQwQM14xaCRlBLGq3v5miDGC4vkVTn54xA==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/base64@17.65.0': + resolution: {integrity: sha512-Xrh7Fm/M0QAYpekSgmskdZYnFdSGnsxJ/tHaolA4bNwWdG9i65S8m83Meh7FOxyJyQAdo4d4J97NOomBLEfkDQ==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/buffers@1.2.1': + resolution: {integrity: sha512-12cdlDwX4RUM3QxmUbVJWqZ/mrK6dFQH4Zxq6+r1YXKXYBNgZXndx2qbCJwh3+WWkCSn67IjnlG3XYTvmvYtgA==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/buffers@17.65.0': + resolution: {integrity: sha512-eBrIXd0/Ld3p9lpDDlMaMn6IEfWqtHMD+z61u0JrIiPzsV1r7m6xDZFRxJyvIFTEO+SWdYF9EiQbXZGd8BzPfA==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/codegen@1.0.0': + resolution: {integrity: sha512-E8Oy+08cmCf0EK/NMxpaJZmOxPqM+6iSe2S4nlSBrPZOORoDJILxtbSUEDKQyTamm/BVAhIGllOBNU79/dwf0g==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/codegen@17.65.0': + resolution: {integrity: sha512-7MXcRYe7n3BG+fo3jicvjB0+6ypl2Y/bQp79Sp7KeSiiCgLqw4Oled6chVv07/xLVTdo3qa1CD0VCCnPaw+RGA==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-core@4.56.10': + resolution: {integrity: sha512-PyAEA/3cnHhsGcdY+AmIU+ZPqTuZkDhCXQ2wkXypdLitSpd6d5Ivxhnq4wa2ETRWFVJGabYynBWxIijOswSmOw==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-fsa@4.56.10': + resolution: {integrity: sha512-/FVK63ysNzTPOnCCcPoPHt77TOmachdMS422txM4KhxddLdbW1fIbFMYH0AM0ow/YchCyS5gqEjKLNyv71j/5Q==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-node-builtins@4.56.10': + resolution: {integrity: sha512-uUnKz8R0YJyKq5jXpZtkGV9U0pJDt8hmYcLRrPjROheIfjMXsz82kXMgAA/qNg0wrZ1Kv+hrg7azqEZx6XZCVw==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-node-to-fsa@4.56.10': + resolution: {integrity: sha512-oH+O6Y4lhn9NyG6aEoFwIBNKZeYy66toP5LJcDOMBgL99BKQMUf/zWJspdRhMdn/3hbzQsZ8EHHsuekbFLGUWw==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-node-utils@4.56.10': + resolution: {integrity: sha512-8EuPBgVI2aDPwFdaNQeNpHsyqPi3rr+85tMNG/lHvQLiVjzoZsvxA//Xd8aB567LUhy4QS03ptT+unkD/DIsNg==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-node@4.56.10': + resolution: {integrity: sha512-7R4Gv3tkUdW3dXfXiOkqxkElxKNVdd8BDOWC0/dbERd0pXpPY+s2s1Mino+aTvkGrFPiY+mmVxA7zhskm4Ue4Q==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-print@4.56.10': + resolution: {integrity: sha512-JW4fp5mAYepzFsSGrQ48ep8FXxpg4niFWHdF78wDrFGof7F3tKDJln72QFDEn/27M1yHd4v7sKHHVPh78aWcEw==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/fs-snapshot@4.56.10': + resolution: {integrity: sha512-DkR6l5fj7+qj0+fVKm/OOXMGfDFCGXLfyHkORH3DF8hxkpDgIHbhf/DwncBMs2igu/ST7OEkexn1gIqoU6Y+9g==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/json-pack@1.21.0': + resolution: {integrity: sha512-+AKG+R2cfZMShzrF2uQw34v3zbeDYUqnQ+jg7ORic3BGtfw9p/+N6RJbq/kkV8JmYZaINknaEQ2m0/f693ZPpg==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/json-pack@17.65.0': + resolution: {integrity: sha512-e0SG/6qUCnVhHa0rjDJHgnXnbsacooHVqQHxspjvlYQSkHm+66wkHw6Gql+3u/WxI/b1VsOdUi0M+fOtkgKGdQ==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/json-pointer@1.0.2': + resolution: {integrity: sha512-Fsn6wM2zlDzY1U+v4Nc8bo3bVqgfNTGcn6dMgs6FjrEnt4ZCe60o6ByKRjOGlI2gow0aE/Q41QOigdTqkyK5fg==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/json-pointer@17.65.0': + resolution: {integrity: sha512-uhTe+XhlIZpWOxgPcnO+iSCDgKKBpwkDVTyYiXX9VayGV8HSFVJM67M6pUE71zdnXF1W0Da21AvnhlmdwYPpow==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/util@1.9.0': + resolution: {integrity: sha512-pLuQo+VPRnN8hfPqUTLTHk126wuYdXVxE6aDmjSeV4NCAgyxWbiOIeNJVtID3h1Vzpoi9m4jXezf73I6LgabgQ==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@jsonjoy.com/util@17.65.0': + resolution: {integrity: sha512-cWiEHZccQORf96q2y6zU3wDeIVPeidmGqd9cNKJRYoVHTV0S1eHPy5JTbHpMnGfDvtvujQwQozOqgO9ABu6h0w==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + '@leichtgewicht/ip-codec@2.0.5': + resolution: {integrity: sha512-Vo+PSpZG2/fmgmiNzYK9qWRh8h/CHrwD0mo1h1DzL4yzHNSfWYujGTYsWGreD000gcgmZ7K4Ys6Tx9TxtsKdDw==} + + '@manypkg/find-root@1.1.0': + resolution: {integrity: sha512-mki5uBvhHzO8kYYix/WRy2WX8S3B5wdVSc9D6KcU5lQNglP2yt58/VfLuAK49glRXChosY8ap2oJ1qgma3GUVA==} + + '@manypkg/get-packages@1.1.3': + resolution: {integrity: sha512-fo+QhuU3qE/2TQMQmbVMqaQ6EWbMhi4ABWP+O4AM1NqPBuy0OrApV5LO6BrrgnhtAHS2NH6RrVk9OL181tTi8A==} + + '@mdx-js/mdx@3.1.1': + resolution: {integrity: sha512-f6ZO2ifpwAQIpzGWaBQT2TXxPv6z3RBzQKpVftEWN78Vl/YweF1uwussDx8ECAXVtr3Rs89fKyG9YlzUs9DyGQ==} + + '@mdx-js/react@3.1.1': + resolution: {integrity: sha512-f++rKLQgUVYDAtECQ6fn/is15GkEH9+nZPM3MS0RcxVqoTfawHvDlSCH7JbMhAM6uJ32v3eXLvLmLvjGu7PTQw==} + peerDependencies: + '@types/react': '>=16' + react: '>=16' + + '@mermaid-js/parser@0.6.3': + resolution: {integrity: sha512-lnjOhe7zyHjc+If7yT4zoedx2vo4sHaTmtkl1+or8BRTnCtDmcTpAjpzDSfCZrshM5bCoz0GyidzadJAH1xobA==} + + '@napi-rs/wasm-runtime@0.2.12': + resolution: {integrity: sha512-ZVWUcfwY4E/yPitQJl481FjFo3K22D6qF0DuFH6Y/nbnE11GY5uguDxZMGXPQ8WQ0128MXQD7TnfHyK4oWoIJQ==} + + '@noble/hashes@1.4.0': + resolution: {integrity: sha512-V1JJ1WTRUqHHrOSh597hURcMqVKVGL/ea3kv0gSnEdsEZ0/+VyPghM1lMNGc00z7CIQorSvbKpuJkxvuHbvdbg==} + engines: {node: '>= 16'} + + '@node-rs/jieba-android-arm-eabi@1.10.4': + resolution: {integrity: sha512-MhyvW5N3Fwcp385d0rxbCWH42kqDBatQTyP8XbnYbju2+0BO/eTeCCLYj7Agws4pwxn2LtdldXRSKavT7WdzNA==} + engines: {node: '>= 10'} + cpu: [arm] + os: [android] + + '@node-rs/jieba-android-arm64@1.10.4': + resolution: {integrity: sha512-XyDwq5+rQ+Tk55A+FGi6PtJbzf974oqnpyCcCPzwU3QVXJCa2Rr4Lci+fx8oOpU4plT3GuD+chXMYLsXipMgJA==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [android] + + '@node-rs/jieba-darwin-arm64@1.10.4': + resolution: {integrity: sha512-G++RYEJ2jo0rxF9626KUy90wp06TRUjAsvY/BrIzEOX/ingQYV/HjwQzNPRR1P1o32a6/U8RGo7zEBhfdybL6w==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [darwin] + + '@node-rs/jieba-darwin-x64@1.10.4': + resolution: {integrity: sha512-MmDNeOb2TXIZCPyWCi2upQnZpPjAxw5ZGEj6R8kNsPXVFALHIKMa6ZZ15LCOkSTsKXVC17j2t4h+hSuyYb6qfQ==} + engines: {node: '>= 10'} + cpu: [x64] + os: [darwin] + + '@node-rs/jieba-freebsd-x64@1.10.4': + resolution: {integrity: sha512-/x7aVQ8nqUWhpXU92RZqd333cq639i/olNpd9Z5hdlyyV5/B65LLy+Je2B2bfs62PVVm5QXRpeBcZqaHelp/bg==} + engines: {node: '>= 10'} + cpu: [x64] + os: [freebsd] + + '@node-rs/jieba-linux-arm-gnueabihf@1.10.4': + resolution: {integrity: sha512-crd2M35oJBRLkoESs0O6QO3BBbhpv+tqXuKsqhIG94B1d02RVxtRIvSDwO33QurxqSdvN9IeSnVpHbDGkuXm3g==} + engines: {node: '>= 10'} + cpu: [arm] + os: [linux] + + '@node-rs/jieba-linux-arm64-gnu@1.10.4': + resolution: {integrity: sha512-omIzNX1psUzPcsdnUhGU6oHeOaTCuCjUgOA/v/DGkvWC1jLcnfXe4vdYbtXMh4XOCuIgS1UCcvZEc8vQLXFbXQ==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + + '@node-rs/jieba-linux-arm64-musl@1.10.4': + resolution: {integrity: sha512-Y/tiJ1+HeS5nnmLbZOE+66LbsPOHZ/PUckAYVeLlQfpygLEpLYdlh0aPpS5uiaWMjAXYZYdFkpZHhxDmSLpwpw==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [linux] + + '@node-rs/jieba-linux-x64-gnu@1.10.4': + resolution: {integrity: sha512-WZO8ykRJpWGE9MHuZpy1lu3nJluPoeB+fIJJn5CWZ9YTVhNDWoCF4i/7nxz1ntulINYGQ8VVuCU9LD86Mek97g==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + + '@node-rs/jieba-linux-x64-musl@1.10.4': + resolution: {integrity: sha512-uBBD4S1rGKcgCyAk6VCKatEVQb6EDD5I40v/DxODi5CuZVCANi9m5oee/MQbAoaX7RydA2f0OSCE9/tcwXEwUg==} + engines: {node: '>= 10'} + cpu: [x64] + os: [linux] + + '@node-rs/jieba-wasm32-wasi@1.10.4': + resolution: {integrity: sha512-Y2umiKHjuIJy0uulNDz9SDYHdfq5Hmy7jY5nORO99B4pySKkcrMjpeVrmWXJLIsEKLJwcCXHxz8tjwU5/uhz0A==} + engines: {node: '>=14.0.0'} + cpu: [wasm32] + + '@node-rs/jieba-win32-arm64-msvc@1.10.4': + resolution: {integrity: sha512-nwMtViFm4hjqhz1it/juQnxpXgqlGltCuWJ02bw70YUDMDlbyTy3grCJPpQQpueeETcALUnTxda8pZuVrLRcBA==} + engines: {node: '>= 10'} + cpu: [arm64] + os: [win32] + + '@node-rs/jieba-win32-ia32-msvc@1.10.4': + resolution: {integrity: sha512-DCAvLx7Z+W4z5oKS+7vUowAJr0uw9JBw8x1Y23Xs/xMA4Em+OOSiaF5/tCJqZUCJ8uC4QeImmgDFiBqGNwxlyA==} + engines: {node: '>= 10'} + cpu: [ia32] + os: [win32] + + '@node-rs/jieba-win32-x64-msvc@1.10.4': + resolution: {integrity: sha512-+sqemSfS1jjb+Tt7InNbNzrRh1Ua3vProVvC4BZRPg010/leCbGFFiQHpzcPRfpxAXZrzG5Y0YBTsPzN/I4yHQ==} + engines: {node: '>= 10'} + cpu: [x64] + os: [win32] + + '@node-rs/jieba@1.10.4': + resolution: {integrity: sha512-GvDgi8MnBiyWd6tksojej8anIx18244NmIOc1ovEw8WKNUejcccLfyu8vj66LWSuoZuKILVtNsOy4jvg3aoxIw==} + engines: {node: '>= 10'} + + '@nodelib/fs.scandir@2.1.5': + resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} + engines: {node: '>= 8'} + + '@nodelib/fs.stat@2.0.5': + resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} + engines: {node: '>= 8'} + + '@nodelib/fs.walk@1.2.8': + resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} + engines: {node: '>= 8'} + + '@peculiar/asn1-cms@2.6.0': + resolution: {integrity: sha512-2uZqP+ggSncESeUF/9Su8rWqGclEfEiz1SyU02WX5fUONFfkjzS2Z/F1Li0ofSmf4JqYXIOdCAZqIXAIBAT1OA==} + + '@peculiar/asn1-csr@2.6.0': + resolution: {integrity: sha512-BeWIu5VpTIhfRysfEp73SGbwjjoLL/JWXhJ/9mo4vXnz3tRGm+NGm3KNcRzQ9VMVqwYS2RHlolz21svzRXIHPQ==} + + '@peculiar/asn1-ecc@2.6.0': + resolution: {integrity: sha512-FF3LMGq6SfAOwUG2sKpPXblibn6XnEIKa+SryvUl5Pik+WR9rmRA3OCiwz8R3lVXnYnyRkSZsSLdml8H3UiOcw==} + + '@peculiar/asn1-pfx@2.6.0': + resolution: {integrity: sha512-rtUvtf+tyKGgokHHmZzeUojRZJYPxoD/jaN1+VAB4kKR7tXrnDCA/RAWXAIhMJJC+7W27IIRGe9djvxKgsldCQ==} + + '@peculiar/asn1-pkcs8@2.6.0': + resolution: {integrity: sha512-KyQ4D8G/NrS7Fw3XCJrngxmjwO/3htnA0lL9gDICvEQ+GJ+EPFqldcJQTwPIdvx98Tua+WjkdKHSC0/Km7T+lA==} + + '@peculiar/asn1-pkcs9@2.6.0': + resolution: {integrity: sha512-b78OQ6OciW0aqZxdzliXGYHASeCvvw5caqidbpQRYW2mBtXIX2WhofNXTEe7NyxTb0P6J62kAAWLwn0HuMF1Fw==} + + '@peculiar/asn1-rsa@2.6.0': + resolution: {integrity: sha512-Nu4C19tsrTsCp9fDrH+sdcOKoVfdfoQQ7S3VqjJU6vedR7tY3RLkQ5oguOIB3zFW33USDUuYZnPEQYySlgha4w==} + + '@peculiar/asn1-schema@2.6.0': + resolution: {integrity: sha512-xNLYLBFTBKkCzEZIw842BxytQQATQv+lDTCEMZ8C196iJcJJMBUZxrhSTxLaohMyKK8QlzRNTRkUmanucnDSqg==} + + '@peculiar/asn1-x509-attr@2.6.0': + resolution: {integrity: sha512-MuIAXFX3/dc8gmoZBkwJWxUWOSvG4MMDntXhrOZpJVMkYX+MYc/rUAU2uJOved9iJEoiUx7//3D8oG83a78UJA==} + + '@peculiar/asn1-x509@2.6.0': + resolution: {integrity: sha512-uzYbPEpoQiBoTq0/+jZtpM6Gq6zADBx+JNFP3yqRgziWBxQ/Dt/HcuvRfm9zJTPdRcBqPNdaRHTVwpyiq6iNMA==} + + '@peculiar/x509@1.14.3': + resolution: {integrity: sha512-C2Xj8FZ0uHWeCXXqX5B4/gVFQmtSkiuOolzAgutjTfseNOHT3pUjljDZsTSxXFGgio54bCzVFqmEOUrIVk8RDA==} + engines: {node: '>=20.0.0'} + + '@pkgjs/parseargs@0.11.0': + resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} + engines: {node: '>=14'} + + '@pnpm/config.env-replace@1.1.0': + resolution: {integrity: sha512-htyl8TWnKL7K/ESFa1oW2UB5lVDxuF5DpM7tBi6Hu2LNL3mWkIzNLG6N4zoCUP1lCKNxWy/3iu8mS8MvToGd6w==} + engines: {node: '>=12.22.0'} + + '@pnpm/network.ca-file@1.0.2': + resolution: {integrity: sha512-YcPQ8a0jwYU9bTdJDpXjMi7Brhkr1mXsXrUJvjqM2mQDgkRiz8jFaQGOdaLxgjtUfQgZhKy/O3cG/YwmgKaxLA==} + engines: {node: '>=12.22.0'} + + '@pnpm/npm-conf@3.0.2': + resolution: {integrity: sha512-h104Kh26rR8tm+a3Qkc5S4VLYint3FE48as7+/5oCEcKR2idC/pF1G6AhIXKI+eHPJa/3J9i5z0Al47IeGHPkA==} + engines: {node: '>=12'} + + '@polka/url@1.0.0-next.29': + resolution: {integrity: sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==} + + '@rollup/rollup-android-arm-eabi@4.57.1': + resolution: {integrity: sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==} + cpu: [arm] + os: [android] + + '@rollup/rollup-android-arm64@4.57.1': + resolution: {integrity: sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==} + cpu: [arm64] + os: [android] + + '@rollup/rollup-darwin-arm64@4.57.1': + resolution: {integrity: sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==} + cpu: [arm64] + os: [darwin] + + '@rollup/rollup-darwin-x64@4.57.1': + resolution: {integrity: sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==} + cpu: [x64] + os: [darwin] + + '@rollup/rollup-freebsd-arm64@4.57.1': + resolution: {integrity: sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==} + cpu: [arm64] + os: [freebsd] + + '@rollup/rollup-freebsd-x64@4.57.1': + resolution: {integrity: sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==} + cpu: [x64] + os: [freebsd] + + '@rollup/rollup-linux-arm-gnueabihf@4.57.1': + resolution: {integrity: sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm-musleabihf@4.57.1': + resolution: {integrity: sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm64-gnu@4.57.1': + resolution: {integrity: sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-arm64-musl@4.57.1': + resolution: {integrity: sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-loong64-gnu@4.57.1': + resolution: {integrity: sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-loong64-musl@4.57.1': + resolution: {integrity: sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-ppc64-gnu@4.57.1': + resolution: {integrity: sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-ppc64-musl@4.57.1': + resolution: {integrity: sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.57.1': + resolution: {integrity: sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-riscv64-musl@4.57.1': + resolution: {integrity: sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-s390x-gnu@4.57.1': + resolution: {integrity: sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==} + cpu: [s390x] + os: [linux] + + '@rollup/rollup-linux-x64-gnu@4.57.1': + resolution: {integrity: sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-linux-x64-musl@4.57.1': + resolution: {integrity: sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-openbsd-x64@4.57.1': + resolution: {integrity: sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==} + cpu: [x64] + os: [openbsd] + + '@rollup/rollup-openharmony-arm64@4.57.1': + resolution: {integrity: sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==} + cpu: [arm64] + os: [openharmony] + + '@rollup/rollup-win32-arm64-msvc@4.57.1': + resolution: {integrity: sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==} + cpu: [arm64] + os: [win32] + + '@rollup/rollup-win32-ia32-msvc@4.57.1': + resolution: {integrity: sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==} + cpu: [ia32] + os: [win32] + + '@rollup/rollup-win32-x64-gnu@4.57.1': + resolution: {integrity: sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==} + cpu: [x64] + os: [win32] + + '@rollup/rollup-win32-x64-msvc@4.57.1': + resolution: {integrity: sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==} + cpu: [x64] + os: [win32] + + '@sideway/address@4.1.5': + resolution: {integrity: sha512-IqO/DUQHUkPeixNQ8n0JA6102hT9CmaljNTPmQ1u8MEhBo/R4Q8eKLN/vGZxuebwOroDB4cbpjheD4+/sKFK4Q==} + + '@sideway/formula@3.0.1': + resolution: {integrity: sha512-/poHZJJVjx3L+zVD6g9KgHfYnb443oi7wLu/XKojDviHy6HOEOA6z1Trk5aR1dGcmPenJEgb2sK2I80LeS3MIg==} + + '@sideway/pinpoint@2.0.0': + resolution: {integrity: sha512-RNiOoTPkptFtSVzQevY/yWtZwf/RxyVnPy/OcA9HBM3MlGDnBEYL5B41H0MTn0Uec8Hi+2qUtTfG2WWZBmMejQ==} + + '@sinclair/typebox@0.27.8': + resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==} + + '@sindresorhus/is@4.6.0': + resolution: {integrity: sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==} + engines: {node: '>=10'} + + '@sindresorhus/is@5.6.0': + resolution: {integrity: sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g==} + engines: {node: '>=14.16'} + + '@slorber/react-helmet-async@1.3.0': + resolution: {integrity: sha512-e9/OK8VhwUSc67diWI8Rb3I0YgI9/SBQtnhe9aEuK6MhZm7ntZZimXgwXnd8W96YTmSOb9M4d8LwhRZyhWr/1A==} + peerDependencies: + react: ^16.6.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + react-dom: ^16.6.0 || ^17.0.0 || ^18.0.0 || ^19.0.0 + + '@slorber/remark-comment@1.0.0': + resolution: {integrity: sha512-RCE24n7jsOj1M0UPvIQCHTe7fI0sFL4S2nwKVWwHyVr/wI/H8GosgsJGyhnsZoGFnD/P2hLf1mSbrrgSLN93NA==} + + '@svgr/babel-plugin-add-jsx-attribute@8.0.0': + resolution: {integrity: sha512-b9MIk7yhdS1pMCZM8VeNfUlSKVRhsHZNMl5O9SfaX0l0t5wjdgu4IDzGB8bpnGBBOjGST3rRFVsaaEtI4W6f7g==} + engines: {node: '>=14'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@svgr/babel-plugin-remove-jsx-attribute@8.0.0': + resolution: {integrity: sha512-BcCkm/STipKvbCl6b7QFrMh/vx00vIP63k2eM66MfHJzPr6O2U0jYEViXkHJWqXqQYjdeA9cuCl5KWmlwjDvbA==} + engines: {node: '>=14'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@svgr/babel-plugin-remove-jsx-empty-expression@8.0.0': + resolution: {integrity: sha512-5BcGCBfBxB5+XSDSWnhTThfI9jcO5f0Ai2V24gZpG+wXF14BzwxxdDb4g6trdOux0rhibGs385BeFMSmxtS3uA==} + engines: {node: '>=14'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@svgr/babel-plugin-replace-jsx-attribute-value@8.0.0': + resolution: {integrity: sha512-KVQ+PtIjb1BuYT3ht8M5KbzWBhdAjjUPdlMtpuw/VjT8coTrItWX6Qafl9+ji831JaJcu6PJNKCV0bp01lBNzQ==} + engines: {node: '>=14'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@svgr/babel-plugin-svg-dynamic-title@8.0.0': + resolution: {integrity: sha512-omNiKqwjNmOQJ2v6ge4SErBbkooV2aAWwaPFs2vUY7p7GhVkzRkJ00kILXQvRhA6miHnNpXv7MRnnSjdRjK8og==} + engines: {node: '>=14'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@svgr/babel-plugin-svg-em-dimensions@8.0.0': + resolution: {integrity: sha512-mURHYnu6Iw3UBTbhGwE/vsngtCIbHE43xCRK7kCw4t01xyGqb2Pd+WXekRRoFOBIY29ZoOhUCTEweDMdrjfi9g==} + engines: {node: '>=14'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@svgr/babel-plugin-transform-react-native-svg@8.1.0': + resolution: {integrity: sha512-Tx8T58CHo+7nwJ+EhUwx3LfdNSG9R2OKfaIXXs5soiy5HtgoAEkDay9LIimLOcG8dJQH1wPZp/cnAv6S9CrR1Q==} + engines: {node: '>=14'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@svgr/babel-plugin-transform-svg-component@8.0.0': + resolution: {integrity: sha512-DFx8xa3cZXTdb/k3kfPeaixecQLgKh5NVBMwD0AQxOzcZawK4oo1Jh9LbrcACUivsCA7TLG8eeWgrDXjTMhRmw==} + engines: {node: '>=12'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@svgr/babel-preset@8.1.0': + resolution: {integrity: sha512-7EYDbHE7MxHpv4sxvnVPngw5fuR6pw79SkcrILHJ/iMpuKySNCl5W1qcwPEpU+LgyRXOaAFgH0KhwD18wwg6ug==} + engines: {node: '>=14'} + peerDependencies: + '@babel/core': ^7.0.0-0 + + '@svgr/core@8.1.0': + resolution: {integrity: sha512-8QqtOQT5ACVlmsvKOJNEaWmRPmcojMOzCz4Hs2BGG/toAp/K38LcsMRyLp349glq5AzJbCEeimEoxaX6v/fLrA==} + engines: {node: '>=14'} + + '@svgr/hast-util-to-babel-ast@8.0.0': + resolution: {integrity: sha512-EbDKwO9GpfWP4jN9sGdYwPBU0kdomaPIL2Eu4YwmgP+sJeXT+L7bMwJUBnhzfH8Q2qMBqZ4fJwpCyYsAN3mt2Q==} + engines: {node: '>=14'} + + '@svgr/plugin-jsx@8.1.0': + resolution: {integrity: sha512-0xiIyBsLlr8quN+WyuxooNW9RJ0Dpr8uOnH/xrCVO8GLUcwHISwj1AG0k+LFzteTkAA0GbX0kj9q6Dk70PTiPA==} + engines: {node: '>=14'} + peerDependencies: + '@svgr/core': '*' + + '@svgr/plugin-svgo@8.1.0': + resolution: {integrity: sha512-Ywtl837OGO9pTLIN/onoWLmDQ4zFUycI1g76vuKGEz6evR/ZTJlJuz3G/fIkb6OVBJ2g0o6CGJzaEjfmEo3AHA==} + engines: {node: '>=14'} + peerDependencies: + '@svgr/core': '*' + + '@svgr/webpack@8.1.0': + resolution: {integrity: sha512-LnhVjMWyMQV9ZmeEy26maJk+8HTIbd59cH4F2MJ439k9DqejRisfFNGAPvRYlKETuh9LrImlS8aKsBgKjMA8WA==} + engines: {node: '>=14'} + + '@szmarczak/http-timer@5.0.1': + resolution: {integrity: sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw==} + engines: {node: '>=14.16'} + + '@trysound/sax@0.2.0': + resolution: {integrity: sha512-L7z9BgrNEcYyUYtF+HaEfiS5ebkh9jXqbszz7pC0hRBPaatV0XjSD3+eHrpqFemQfgwiFF0QPIarnIihIDn7OA==} + engines: {node: '>=10.13.0'} + + '@tybys/wasm-util@0.10.1': + resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} + + '@types/body-parser@1.19.6': + resolution: {integrity: sha512-HLFeCYgz89uk22N5Qg3dvGvsv46B8GLvKKo1zKG4NybA8U2DiEO3w9lqGg29t/tfLRJpJ6iQxnVw4OnB7MoM9g==} + + '@types/bonjour@3.5.13': + resolution: {integrity: sha512-z9fJ5Im06zvUL548KvYNecEVlA7cVDkGUi6kZusb04mpyEFKCIZJvloCcmpmLaIahDpOQGHaHmG6imtPMmPXGQ==} + + '@types/connect-history-api-fallback@1.5.4': + resolution: {integrity: sha512-n6Cr2xS1h4uAulPRdlw6Jl6s1oG8KrVilPN2yUITEs+K48EzMJJ3W1xy8K5eWuFvjp3R74AOIGSmp2UfBJ8HFw==} + + '@types/connect@3.4.38': + resolution: {integrity: sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==} + + '@types/d3-array@3.2.2': + resolution: {integrity: sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==} + + '@types/d3-axis@3.0.6': + resolution: {integrity: sha512-pYeijfZuBd87T0hGn0FO1vQ/cgLk6E1ALJjfkC0oJ8cbwkZl3TpgS8bVBLZN+2jjGgg38epgxb2zmoGtSfvgMw==} + + '@types/d3-brush@3.0.6': + resolution: {integrity: sha512-nH60IZNNxEcrh6L1ZSMNA28rj27ut/2ZmI3r96Zd+1jrZD++zD3LsMIjWlvg4AYrHn/Pqz4CF3veCxGjtbqt7A==} + + '@types/d3-chord@3.0.6': + resolution: {integrity: sha512-LFYWWd8nwfwEmTZG9PfQxd17HbNPksHBiJHaKuY1XeqscXacsS2tyoo6OdRsjf+NQYeB6XrNL3a25E3gH69lcg==} + + '@types/d3-color@3.1.3': + resolution: {integrity: sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==} + + '@types/d3-contour@3.0.6': + resolution: {integrity: sha512-BjzLgXGnCWjUSYGfH1cpdo41/hgdWETu4YxpezoztawmqsvCeep+8QGfiY6YbDvfgHz/DkjeIkkZVJavB4a3rg==} + + '@types/d3-delaunay@6.0.4': + resolution: {integrity: sha512-ZMaSKu4THYCU6sV64Lhg6qjf1orxBthaC161plr5KuPHo3CNm8DTHiLw/5Eq2b6TsNP0W0iJrUOFscY6Q450Hw==} + + '@types/d3-dispatch@3.0.7': + resolution: {integrity: sha512-5o9OIAdKkhN1QItV2oqaE5KMIiXAvDWBDPrD85e58Qlz1c1kI/J0NcqbEG88CoTwJrYe7ntUCVfeUl2UJKbWgA==} + + '@types/d3-drag@3.0.7': + resolution: {integrity: sha512-HE3jVKlzU9AaMazNufooRJ5ZpWmLIoc90A37WU2JMmeq28w1FQqCZswHZ3xR+SuxYftzHq6WU6KJHvqxKzTxxQ==} + + '@types/d3-dsv@3.0.7': + resolution: {integrity: sha512-n6QBF9/+XASqcKK6waudgL0pf/S5XHPPI8APyMLLUHd8NqouBGLsU8MgtO7NINGtPBtk9Kko/W4ea0oAspwh9g==} + + '@types/d3-ease@3.0.2': + resolution: {integrity: sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==} + + '@types/d3-fetch@3.0.7': + resolution: {integrity: sha512-fTAfNmxSb9SOWNB9IoG5c8Hg6R+AzUHDRlsXsDZsNp6sxAEOP0tkP3gKkNSO/qmHPoBFTxNrjDprVHDQDvo5aA==} + + '@types/d3-force@3.0.10': + resolution: {integrity: sha512-ZYeSaCF3p73RdOKcjj+swRlZfnYpK1EbaDiYICEEp5Q6sUiqFaFQ9qgoshp5CzIyyb/yD09kD9o2zEltCexlgw==} + + '@types/d3-format@3.0.4': + resolution: {integrity: sha512-fALi2aI6shfg7vM5KiR1wNJnZ7r6UuggVqtDA+xiEdPZQwy/trcQaHnwShLuLdta2rTymCNpxYTiMZX/e09F4g==} + + '@types/d3-geo@3.1.0': + resolution: {integrity: sha512-856sckF0oP/diXtS4jNsiQw/UuK5fQG8l/a9VVLeSouf1/PPbBE1i1W852zVwKwYCBkFJJB7nCFTbk6UMEXBOQ==} + + '@types/d3-hierarchy@3.1.7': + resolution: {integrity: sha512-tJFtNoYBtRtkNysX1Xq4sxtjK8YgoWUNpIiUee0/jHGRwqvzYxkq0hGVbbOGSz+JgFxxRu4K8nb3YpG3CMARtg==} + + '@types/d3-interpolate@3.0.4': + resolution: {integrity: sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==} + + '@types/d3-path@3.1.1': + resolution: {integrity: sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==} + + '@types/d3-polygon@3.0.2': + resolution: {integrity: sha512-ZuWOtMaHCkN9xoeEMr1ubW2nGWsp4nIql+OPQRstu4ypeZ+zk3YKqQT0CXVe/PYqrKpZAi+J9mTs05TKwjXSRA==} + + '@types/d3-quadtree@3.0.6': + resolution: {integrity: sha512-oUzyO1/Zm6rsxKRHA1vH0NEDG58HrT5icx/azi9MF1TWdtttWl0UIUsjEQBBh+SIkrpd21ZjEv7ptxWys1ncsg==} + + '@types/d3-random@3.0.3': + resolution: {integrity: sha512-Imagg1vJ3y76Y2ea0871wpabqp613+8/r0mCLEBfdtqC7xMSfj9idOnmBYyMoULfHePJyxMAw3nWhJxzc+LFwQ==} + + '@types/d3-scale-chromatic@3.1.0': + resolution: {integrity: sha512-iWMJgwkK7yTRmWqRB5plb1kadXyQ5Sj8V/zYlFGMUBbIPKQScw+Dku9cAAMgJG+z5GYDoMjWGLVOvjghDEFnKQ==} + + '@types/d3-scale@4.0.9': + resolution: {integrity: sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==} + + '@types/d3-selection@3.0.11': + resolution: {integrity: sha512-bhAXu23DJWsrI45xafYpkQ4NtcKMwWnAC/vKrd2l+nxMFuvOT3XMYTIj2opv8vq8AO5Yh7Qac/nSeP/3zjTK0w==} + + '@types/d3-shape@3.1.8': + resolution: {integrity: sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==} + + '@types/d3-time-format@4.0.3': + resolution: {integrity: sha512-5xg9rC+wWL8kdDj153qZcsJ0FWiFt0J5RB6LYUNZjwSnesfblqrI/bJ1wBdJ8OQfncgbJG5+2F+qfqnqyzYxyg==} + + '@types/d3-time@3.0.4': + resolution: {integrity: sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==} + + '@types/d3-timer@3.0.2': + resolution: {integrity: sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==} + + '@types/d3-transition@3.0.9': + resolution: {integrity: sha512-uZS5shfxzO3rGlu0cC3bjmMFKsXv+SmZZcgp0KD22ts4uGXp5EVYGzu/0YdwZeKmddhcAccYtREJKkPfXkZuCg==} + + '@types/d3-zoom@3.0.8': + resolution: {integrity: sha512-iqMC4/YlFCSlO8+2Ii1GGGliCAY4XdeG748w5vQUbevlbDu0zSjH/+jojorQVBK/se0j6DUFNPBGSqD3YWYnDw==} + + '@types/d3@7.4.3': + resolution: {integrity: sha512-lZXZ9ckh5R8uiFVt8ogUNf+pIrK4EsWrx2Np75WvF/eTpJ0FMHNhjXk8CKEx/+gpHbNQyJWehbFaTvqmHWB3ww==} + + '@types/debug@4.1.12': + resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} + + '@types/eslint-scope@3.7.7': + resolution: {integrity: sha512-MzMFlSLBqNF2gcHWO0G1vP/YQyfvrxZ0bF+u7mzUdZ1/xK4A4sru+nraZz5i3iEIk1l1uyicaDVTB4QbbEkAYg==} + + '@types/eslint@9.6.1': + resolution: {integrity: sha512-FXx2pKgId/WyYo2jXw63kk7/+TY7u7AziEJxJAnSFzHlqTAS3Ync6SvgYAN/k4/PQpnnVuzoMuVnByKK2qp0ag==} + + '@types/estree-jsx@1.0.5': + resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==} + + '@types/estree@1.0.8': + resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + + '@types/express-serve-static-core@4.19.8': + resolution: {integrity: sha512-02S5fmqeoKzVZCHPZid4b8JH2eM5HzQLZWN2FohQEy/0eXTq8VXZfSN6Pcr3F6N9R/vNrj7cpgbhjie6m/1tCA==} + + '@types/express@4.17.25': + resolution: {integrity: sha512-dVd04UKsfpINUnK0yBoYHDF3xu7xVH4BuDotC/xGuycx4CgbP48X/KF/586bcObxT0HENHXEU8Nqtu6NR+eKhw==} + + '@types/geojson@7946.0.16': + resolution: {integrity: sha512-6C8nqWur3j98U6+lXDfTUWIfgvZU+EumvpHKcYjujKH7woYyLj2sUmff0tRhrqM7BohUw7Pz3ZB1jj2gW9Fvmg==} + + '@types/gtag.js@0.0.12': + resolution: {integrity: sha512-YQV9bUsemkzG81Ea295/nF/5GijnD2Af7QhEofh7xu+kvCN6RdodgNwwGWXB5GMI3NoyvQo0odNctoH/qLMIpg==} + + '@types/hast@3.0.4': + resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==} + + '@types/history@4.7.11': + resolution: {integrity: sha512-qjDJRrmvBMiTx+jyLxvLfJU7UznFuokDv4f3WRuriHKERccVpFU+8XMQUAbDzoiJCsmexxRExQeMwwCdamSKDA==} + + '@types/html-minifier-terser@6.1.0': + resolution: {integrity: sha512-oh/6byDPnL1zeNXFrDXFLyZjkr1MsBG667IM792caf1L2UPOOMf65NFzjUH/ltyfwjAGfs1rsX1eftK0jC/KIg==} + + '@types/http-cache-semantics@4.2.0': + resolution: {integrity: sha512-L3LgimLHXtGkWikKnsPg0/VFx9OGZaC+eN1u4r+OB1XRqH3meBIAVC2zr1WdMH+RHmnRkqliQAOHNJ/E0j/e0Q==} + + '@types/http-errors@2.0.5': + resolution: {integrity: sha512-r8Tayk8HJnX0FztbZN7oVqGccWgw98T/0neJphO91KkmOzug1KkofZURD4UaD5uH8AqcFLfdPErnBod0u71/qg==} + + '@types/http-proxy@1.17.17': + resolution: {integrity: sha512-ED6LB+Z1AVylNTu7hdzuBqOgMnvG/ld6wGCG8wFnAzKX5uyW2K3WD52v0gnLCTK/VLpXtKckgWuyScYK6cSPaw==} + + '@types/istanbul-lib-coverage@2.0.6': + resolution: {integrity: sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==} + + '@types/istanbul-lib-report@3.0.3': + resolution: {integrity: sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==} + + '@types/istanbul-reports@3.0.4': + resolution: {integrity: sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==} + + '@types/json-schema@7.0.15': + resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==} + + '@types/mdast@4.0.4': + resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==} + + '@types/mdx@2.0.13': + resolution: {integrity: sha512-+OWZQfAYyio6YkJb3HLxDrvnx6SWWDbC0zVPfBRzUk0/nqoDyf6dNxQi3eArPe8rJ473nobTMQ/8Zk+LxJ+Yuw==} + + '@types/mime@1.3.5': + resolution: {integrity: sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==} + + '@types/ms@2.1.0': + resolution: {integrity: sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==} + + '@types/node@12.20.55': + resolution: {integrity: sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ==} + + '@types/node@17.0.45': + resolution: {integrity: sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==} + + '@types/node@20.19.30': + resolution: {integrity: sha512-WJtwWJu7UdlvzEAUm484QNg5eAoq5QR08KDNx7g45Usrs2NtOPiX8ugDqmKdXkyL03rBqU5dYNYVQetEpBHq2g==} + + '@types/prismjs@1.26.5': + resolution: {integrity: sha512-AUZTa7hQ2KY5L7AmtSiqxlhWxb4ina0yd8hNbl4TWuqnv/pFP0nDMb3YrfSBf4hJVGLh2YEIBfKaBW/9UEl6IQ==} + + '@types/prop-types@15.7.15': + resolution: {integrity: sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==} + + '@types/qs@6.14.0': + resolution: {integrity: sha512-eOunJqu0K1923aExK6y8p6fsihYEn/BYuQ4g0CxAAgFc4b/ZLN4CrsRZ55srTdqoiLzU2B2evC+apEIxprEzkQ==} + + '@types/range-parser@1.2.7': + resolution: {integrity: sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==} + + '@types/react-dom@18.3.7': + resolution: {integrity: sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==} + peerDependencies: + '@types/react': ^18.0.0 + + '@types/react-router-config@5.0.11': + resolution: {integrity: sha512-WmSAg7WgqW7m4x8Mt4N6ZyKz0BubSj/2tVUMsAHp+Yd2AMwcSbeFq9WympT19p5heCFmF97R9eD5uUR/t4HEqw==} + + '@types/react-router-dom@5.3.3': + resolution: {integrity: sha512-kpqnYK4wcdm5UaWI3fLcELopqLrHgLqNsdpHauzlQktfkHL3npOSwtj1Uz9oKBAzs7lFtVkV8j83voAz2D8fhw==} + + '@types/react-router@5.1.20': + resolution: {integrity: sha512-jGjmu/ZqS7FjSH6owMcD5qpq19+1RS9DeVRqfl1FeBMxTDQAGwlMWOcs52NDoXaNKyG3d1cYQFMs9rCrb88o9Q==} + + '@types/react@18.3.27': + resolution: {integrity: sha512-cisd7gxkzjBKU2GgdYrTdtQx1SORymWyaAFhaxQPK9bYO9ot3Y5OikQRvY0VYQtvwjeQnizCINJAenh/V7MK2w==} + + '@types/retry@0.12.2': + resolution: {integrity: sha512-XISRgDJ2Tc5q4TRqvgJtzsRkFYNJzZrhTdtMoGVBttwzzQJkPnS3WWTFc7kuDRoPtPakl+T+OfdEUjYJj7Jbow==} + + '@types/sax@1.2.7': + resolution: {integrity: sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A==} + + '@types/send@0.17.6': + resolution: {integrity: sha512-Uqt8rPBE8SY0RK8JB1EzVOIZ32uqy8HwdxCnoCOsYrvnswqmFZ/k+9Ikidlk/ImhsdvBsloHbAlewb2IEBV/Og==} + + '@types/send@1.2.1': + resolution: {integrity: sha512-arsCikDvlU99zl1g69TcAB3mzZPpxgw0UQnaHeC1Nwb015xp8bknZv5rIfri9xTOcMuaVgvabfIRA7PSZVuZIQ==} + + '@types/serve-index@1.9.4': + resolution: {integrity: sha512-qLpGZ/c2fhSs5gnYsQxtDEq3Oy8SXPClIXkW5ghvAvsNuVSA8k+gCONcUCS/UjLEYvYps+e8uBtfgXgvhwfNug==} + + '@types/serve-static@1.15.10': + resolution: {integrity: sha512-tRs1dB+g8Itk72rlSI2ZrW6vZg0YrLI81iQSTkMmOqnqCaNr/8Ek4VwWcN5vZgCYWbg/JJSGBlUaYGAOP73qBw==} + + '@types/sockjs@0.3.36': + resolution: {integrity: sha512-MK9V6NzAS1+Ud7JV9lJLFqW85VbC9dq3LmwZCuBe4wBDgKC0Kj/jd8Xl+nSviU+Qc3+m7umHHyHg//2KSa0a0Q==} + + '@types/trusted-types@2.0.7': + resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==} + + '@types/unist@2.0.11': + resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==} + + '@types/unist@3.0.3': + resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==} + + '@types/ws@8.18.1': + resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==} + + '@types/yargs-parser@21.0.3': + resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==} + + '@types/yargs@17.0.35': + resolution: {integrity: sha512-qUHkeCyQFxMXg79wQfTtfndEC+N9ZZg76HJftDJp+qH2tV7Gj4OJi7l+PiWwJ+pWtW8GwSmqsDj/oymhrTWXjg==} + + '@typescript-eslint/eslint-plugin@8.54.0': + resolution: {integrity: sha512-hAAP5io/7csFStuOmR782YmTthKBJ9ND3WVL60hcOjvtGFb+HJxH4O5huAcmcZ9v9G8P+JETiZ/G1B8MALnWZQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + '@typescript-eslint/parser': ^8.54.0 + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/parser@8.54.0': + resolution: {integrity: sha512-BtE0k6cjwjLZoZixN0t5AKP0kSzlGu7FctRXYuPAm//aaiZhmfq1JwdYpYr1brzEspYyFeF+8XF5j2VK6oalrA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/project-service@8.54.0': + resolution: {integrity: sha512-YPf+rvJ1s7MyiWM4uTRhE4DvBXrEV+d8oC3P9Y2eT7S+HBS0clybdMIPnhiATi9vZOYDc7OQ1L/i6ga6NFYK/g==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/scope-manager@8.54.0': + resolution: {integrity: sha512-27rYVQku26j/PbHYcVfRPonmOlVI6gihHtXFbTdB5sb6qA0wdAQAbyXFVarQ5t4HRojIz64IV90YtsjQSSGlQg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/tsconfig-utils@8.54.0': + resolution: {integrity: sha512-dRgOyT2hPk/JwxNMZDsIXDgyl9axdJI3ogZ2XWhBPsnZUv+hPesa5iuhdYt2gzwA9t8RE5ytOJ6xB0moV0Ujvw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/type-utils@8.54.0': + resolution: {integrity: sha512-hiLguxJWHjjwL6xMBwD903ciAwd7DmK30Y9Axs/etOkftC3ZNN9K44IuRD/EB08amu+Zw6W37x9RecLkOo3pMA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/types@8.54.0': + resolution: {integrity: sha512-PDUI9R1BVjqu7AUDsRBbKMtwmjWcn4J3le+5LpcFgWULN3LvHC5rkc9gCVxbrsrGmO1jfPybN5s6h4Jy+OnkAA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@typescript-eslint/typescript-estree@8.54.0': + resolution: {integrity: sha512-BUwcskRaPvTk6fzVWgDPdUndLjB87KYDrN5EYGetnktoeAvPtO4ONHlAZDnj5VFnUANg0Sjm7j4usBlnoVMHwA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/utils@8.54.0': + resolution: {integrity: sha512-9Cnda8GS57AQakvRyG0PTejJNlA2xhvyNtEVIMlDWOOeEyBkYWhGPnfrIAnqxLMTSTo6q8g12XVjjev5l1NvMA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + peerDependencies: + eslint: ^8.57.0 || ^9.0.0 + typescript: '>=4.8.4 <6.0.0' + + '@typescript-eslint/visitor-keys@8.54.0': + resolution: {integrity: sha512-VFlhGSl4opC0bprJiItPQ1RfUhGDIBokcPwaFH4yiBCaNPeld/9VeXbiPO1cLyorQi1G1vL+ecBk1x8o1axORA==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + '@ungap/structured-clone@1.3.0': + resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} + + '@vitest/coverage-v8@2.1.9': + resolution: {integrity: sha512-Z2cOr0ksM00MpEfyVE8KXIYPEcBFxdbLSs56L8PO0QQMxt/6bDj45uQfxoc96v05KW3clk7vvgP0qfDit9DmfQ==} + peerDependencies: + '@vitest/browser': 2.1.9 + vitest: 2.1.9 + peerDependenciesMeta: + '@vitest/browser': + optional: true + + '@vitest/expect@2.1.9': + resolution: {integrity: sha512-UJCIkTBenHeKT1TTlKMJWy1laZewsRIzYighyYiJKZreqtdxSos/S1t+ktRMQWu2CKqaarrkeszJx1cgC5tGZw==} + + '@vitest/mocker@2.1.9': + resolution: {integrity: sha512-tVL6uJgoUdi6icpxmdrn5YNo3g3Dxv+IHJBr0GXHaEdTcw3F+cPKnsXFhli6nO+f/6SDKPHEK1UN+k+TQv0Ehg==} + peerDependencies: + msw: ^2.4.9 + vite: ^5.0.0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true + + '@vitest/pretty-format@2.1.9': + resolution: {integrity: sha512-KhRIdGV2U9HOUzxfiHmY8IFHTdqtOhIzCpd8WRdJiE7D/HUcZVD0EgQCVjm+Q9gkUXWgBvMmTtZgIG48wq7sOQ==} + + '@vitest/runner@2.1.9': + resolution: {integrity: sha512-ZXSSqTFIrzduD63btIfEyOmNcBmQvgOVsPNPe0jYtESiXkhd8u2erDLnMxmGrDCwHCCHE7hxwRDCT3pt0esT4g==} + + '@vitest/snapshot@2.1.9': + resolution: {integrity: sha512-oBO82rEjsxLNJincVhLhaxxZdEtV0EFHMK5Kmx5sJ6H9L183dHECjiefOAdnqpIgT5eZwT04PoggUnW88vOBNQ==} + + '@vitest/spy@2.1.9': + resolution: {integrity: sha512-E1B35FwzXXTs9FHNK6bDszs7mtydNi5MIfUWpceJ8Xbfb1gBMscAnwLbEu+B44ed6W3XjL9/ehLPHR1fkf1KLQ==} + + '@vitest/utils@2.1.9': + resolution: {integrity: sha512-v0psaMSkNJ3A2NMrUEHFRzJtDPFn+/VWZ5WxImB21T9fjucJRmS7xCS3ppEnARb9y11OAzaD+P2Ps+b+BGX5iQ==} + + '@webassemblyjs/ast@1.14.1': + resolution: {integrity: sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==} + + '@webassemblyjs/floating-point-hex-parser@1.13.2': + resolution: {integrity: sha512-6oXyTOzbKxGH4steLbLNOu71Oj+C8Lg34n6CqRvqfS2O71BxY6ByfMDRhBytzknj9yGUPVJ1qIKhRlAwO1AovA==} + + '@webassemblyjs/helper-api-error@1.13.2': + resolution: {integrity: sha512-U56GMYxy4ZQCbDZd6JuvvNV/WFildOjsaWD3Tzzvmw/mas3cXzRJPMjP83JqEsgSbyrmaGjBfDtV7KDXV9UzFQ==} + + '@webassemblyjs/helper-buffer@1.14.1': + resolution: {integrity: sha512-jyH7wtcHiKssDtFPRB+iQdxlDf96m0E39yb0k5uJVhFGleZFoNw1c4aeIcVUPPbXUVJ94wwnMOAqUHyzoEPVMA==} + + '@webassemblyjs/helper-numbers@1.13.2': + resolution: {integrity: sha512-FE8aCmS5Q6eQYcV3gI35O4J789wlQA+7JrqTTpJqn5emA4U2hvwJmvFRC0HODS+3Ye6WioDklgd6scJ3+PLnEA==} + + '@webassemblyjs/helper-wasm-bytecode@1.13.2': + resolution: {integrity: sha512-3QbLKy93F0EAIXLh0ogEVR6rOubA9AoZ+WRYhNbFyuB70j3dRdwH9g+qXhLAO0kiYGlg3TxDV+I4rQTr/YNXkA==} + + '@webassemblyjs/helper-wasm-section@1.14.1': + resolution: {integrity: sha512-ds5mXEqTJ6oxRoqjhWDU83OgzAYjwsCV8Lo/N+oRsNDmx/ZDpqalmrtgOMkHwxsG0iI//3BwWAErYRHtgn0dZw==} + + '@webassemblyjs/ieee754@1.13.2': + resolution: {integrity: sha512-4LtOzh58S/5lX4ITKxnAK2USuNEvpdVV9AlgGQb8rJDHaLeHciwG4zlGr0j/SNWlr7x3vO1lDEsuePvtcDNCkw==} + + '@webassemblyjs/leb128@1.13.2': + resolution: {integrity: sha512-Lde1oNoIdzVzdkNEAWZ1dZ5orIbff80YPdHx20mrHwHrVNNTjNr8E3xz9BdpcGqRQbAEa+fkrCb+fRFTl/6sQw==} + + '@webassemblyjs/utf8@1.13.2': + resolution: {integrity: sha512-3NQWGjKTASY1xV5m7Hr0iPeXD9+RDobLll3T9d2AO+g3my8xy5peVyjSag4I50mR1bBSN/Ct12lo+R9tJk0NZQ==} + + '@webassemblyjs/wasm-edit@1.14.1': + resolution: {integrity: sha512-RNJUIQH/J8iA/1NzlE4N7KtyZNHi3w7at7hDjvRNm5rcUXa00z1vRz3glZoULfJ5mpvYhLybmVcwcjGrC1pRrQ==} + + '@webassemblyjs/wasm-gen@1.14.1': + resolution: {integrity: sha512-AmomSIjP8ZbfGQhumkNvgC33AY7qtMCXnN6bL2u2Js4gVCg8fp735aEiMSBbDR7UQIj90n4wKAFUSEd0QN2Ukg==} + + '@webassemblyjs/wasm-opt@1.14.1': + resolution: {integrity: sha512-PTcKLUNvBqnY2U6E5bdOQcSM+oVP/PmrDY9NzowJjislEjwP/C4an2303MCVS2Mg9d3AJpIGdUFIQQWbPds0Sw==} + + '@webassemblyjs/wasm-parser@1.14.1': + resolution: {integrity: sha512-JLBl+KZ0R5qB7mCnud/yyX08jWFw5MsoalJ1pQ4EdFlgj9VdXKGuENGsiCIjegI1W7p91rUlcB/LB5yRJKNTcQ==} + + '@webassemblyjs/wast-printer@1.14.1': + resolution: {integrity: sha512-kPSSXE6De1XOR820C90RIo2ogvZG+c3KiHzqUoO/F34Y2shGzesfqv7o57xrxovZJH/MetF5UjroJ/R/3isoiw==} + + '@xtuc/ieee754@1.2.0': + resolution: {integrity: sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==} + + '@xtuc/long@4.2.2': + resolution: {integrity: sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==} + + accepts@1.3.8: + resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==} + engines: {node: '>= 0.6'} + + acorn-import-phases@1.0.4: + resolution: {integrity: sha512-wKmbr/DDiIXzEOiWrTTUcDm24kQ2vGfZQvM2fwg2vXqR5uW6aapr7ObPtj1th32b9u90/Pf4AItvdTh42fBmVQ==} + engines: {node: '>=10.13.0'} + peerDependencies: + acorn: ^8.14.0 + + acorn-jsx@5.3.2: + resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + acorn-walk@8.3.4: + resolution: {integrity: sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==} + engines: {node: '>=0.4.0'} + + acorn@8.15.0: + resolution: {integrity: sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==} + engines: {node: '>=0.4.0'} + hasBin: true + + address@1.2.2: + resolution: {integrity: sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA==} + engines: {node: '>= 10.0.0'} + + aggregate-error@3.1.0: + resolution: {integrity: sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==} + engines: {node: '>=8'} + + ajv-formats@2.1.1: + resolution: {integrity: sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==} + peerDependencies: + ajv: ^8.0.0 + peerDependenciesMeta: + ajv: + optional: true + + ajv-keywords@3.5.2: + resolution: {integrity: sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==} + peerDependencies: + ajv: ^6.9.1 + + ajv-keywords@5.1.0: + resolution: {integrity: sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==} + peerDependencies: + ajv: ^8.8.2 + + ajv@6.12.6: + resolution: {integrity: sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==} + + ajv@8.17.1: + resolution: {integrity: sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==} + + algoliasearch-helper@3.27.0: + resolution: {integrity: sha512-eNYchRerbsvk2doHOMfdS1/B6Tm70oGtu8mzQlrNzbCeQ8p1MjCW8t/BL6iZ5PD+cL5NNMgTMyMnmiXZ1sgmNw==} + peerDependencies: + algoliasearch: '>= 3.1 < 6' + + algoliasearch@5.47.0: + resolution: {integrity: sha512-AGtz2U7zOV4DlsuYV84tLp2tBbA7RPtLA44jbVH4TTpDcc1dIWmULjHSsunlhscbzDydnjuFlNhflR3nV4VJaQ==} + engines: {node: '>= 14.0.0'} + + ansi-align@3.0.1: + resolution: {integrity: sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==} + + ansi-colors@4.1.3: + resolution: {integrity: sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw==} + engines: {node: '>=6'} + + ansi-escapes@4.3.2: + resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} + engines: {node: '>=8'} + + ansi-html-community@0.0.8: + resolution: {integrity: sha512-1APHAyr3+PCamwNw3bXCPp4HFLONZt/yIH0sZp0/469KWNTEy+qN5jQ3GVX6DMZ1UXAi34yVwtTeaG/HpBuuzw==} + engines: {'0': node >= 0.8.0} + hasBin: true + + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-regex@6.2.2: + resolution: {integrity: sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==} + engines: {node: '>=12'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + + ansi-styles@6.2.3: + resolution: {integrity: sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==} + engines: {node: '>=12'} + + any-promise@1.3.0: + resolution: {integrity: sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==} + + anymatch@3.1.3: + resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} + engines: {node: '>= 8'} + + arg@5.0.2: + resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==} + + argparse@1.0.10: + resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} + + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + array-flatten@1.1.1: + resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==} + + array-union@2.1.0: + resolution: {integrity: sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==} + engines: {node: '>=8'} + + asn1js@3.0.7: + resolution: {integrity: sha512-uLvq6KJu04qoQM6gvBfKFjlh6Gl0vOKQuR5cJMDHQkmwfMOQeN3F3SHCv9SNYSL+CRoHvOGFfllDlVz03GQjvQ==} + engines: {node: '>=12.0.0'} + + assertion-error@2.0.1: + resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} + engines: {node: '>=12'} + + astring@1.9.0: + resolution: {integrity: sha512-LElXdjswlqjWrPpJFg1Fx4wpkOCxj1TDHlSV4PlaRxHGWko024xICaa97ZkMfs6DRKlCguiAI+rbXv5GWwXIkg==} + hasBin: true + + autoprefixer@10.4.24: + resolution: {integrity: sha512-uHZg7N9ULTVbutaIsDRoUkoS8/h3bdsmVJYZ5l3wv8Cp/6UIIoRDm90hZ+BwxUj/hGBEzLxdHNSKuFpn8WOyZw==} + engines: {node: ^10 || ^12 || >=14} + hasBin: true + peerDependencies: + postcss: ^8.1.0 + + babel-loader@9.2.1: + resolution: {integrity: sha512-fqe8naHt46e0yIdkjUZYqddSXfej3AHajX+CSO5X7oy0EmPc6o5Xh+RClNoHjnieWz9AW4kZxW9yyFMhVB1QLA==} + engines: {node: '>= 14.15.0'} + peerDependencies: + '@babel/core': ^7.12.0 + webpack: '>=5' + + babel-plugin-dynamic-import-node@2.3.3: + resolution: {integrity: sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==} + + babel-plugin-polyfill-corejs2@0.4.15: + resolution: {integrity: sha512-hR3GwrRwHUfYwGfrisXPIDP3JcYfBrW7wKE7+Au6wDYl7fm/ka1NEII6kORzxNU556JjfidZeBsO10kYvtV1aw==} + peerDependencies: + '@babel/core': ^7.4.0 || ^8.0.0-0 <8.0.0 + + babel-plugin-polyfill-corejs3@0.13.0: + resolution: {integrity: sha512-U+GNwMdSFgzVmfhNm8GJUX88AadB3uo9KpJqS3FaqNIPKgySuvMb+bHPsOmmuWyIcuqZj/pzt1RUIUZns4y2+A==} + peerDependencies: + '@babel/core': ^7.4.0 || ^8.0.0-0 <8.0.0 + + babel-plugin-polyfill-corejs3@0.14.0: + resolution: {integrity: sha512-AvDcMxJ34W4Wgy4KBIIePQTAOP1Ie2WFwkQp3dB7FQ/f0lI5+nM96zUnYEOE1P9sEg0es5VCP0HxiWu5fUHZAQ==} + peerDependencies: + '@babel/core': ^7.4.0 || ^8.0.0-0 <8.0.0 + + babel-plugin-polyfill-regenerator@0.6.6: + resolution: {integrity: sha512-hYm+XLYRMvupxiQzrvXUj7YyvFFVfv5gI0R71AJzudg1g2AI2vyCPPIFEBjk162/wFzti3inBHo7isWFuEVS/A==} + peerDependencies: + '@babel/core': ^7.4.0 || ^8.0.0-0 <8.0.0 + + bail@2.0.2: + resolution: {integrity: sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==} + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + baseline-browser-mapping@2.9.19: + resolution: {integrity: sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==} + hasBin: true + + batch@0.6.1: + resolution: {integrity: sha512-x+VAiMRL6UPkx+kudNvxTl6hB2XNNCG2r+7wixVfIYwu/2HKRXimwQyaumLjMveWvT2Hkd/cAJw+QBMfJ/EKVw==} + + better-path-resolve@1.0.0: + resolution: {integrity: sha512-pbnl5XzGBdrFU/wT4jqmJVPn2B6UHPBOhzMQkY/SPUPB6QtUXtmBHBIwCbXJol93mOpGMnQyP/+BB19q04xj7g==} + engines: {node: '>=4'} + + big.js@5.2.2: + resolution: {integrity: sha512-vyL2OymJxmarO8gxMr0mhChsO9QGwhynfuu4+MHTAW6czfq9humCB7rKpUjDd9YUiDPU4mzpyupFSvOClAwbmQ==} + + binary-extensions@2.3.0: + resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} + engines: {node: '>=8'} + + body-parser@1.20.4: + resolution: {integrity: sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==} + engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} + + bonjour-service@1.3.0: + resolution: {integrity: sha512-3YuAUiSkWykd+2Azjgyxei8OWf8thdn8AITIog2M4UICzoqfjlqr64WIjEXZllf/W6vK1goqleSR6brGomxQqA==} + + boolbase@1.0.0: + resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} + + boxen@6.2.1: + resolution: {integrity: sha512-H4PEsJXfFI/Pt8sjDWbHlQPx4zL/bvSQjcilJmaulGt5mLDorHOHpmdXAJcBcmru7PhYSp/cDMWRko4ZUMFkSw==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + boxen@7.1.1: + resolution: {integrity: sha512-2hCgjEmP8YLWQ130n2FerGv7rYpfBmnmp9Uy2Le1vge6X3gZIfSmEzP5QTDElFxcvVcXlEn8Aq6MU/PZygIOog==} + engines: {node: '>=14.16'} + + brace-expansion@1.1.12: + resolution: {integrity: sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==} + + brace-expansion@2.0.2: + resolution: {integrity: sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + browserslist@4.28.1: + resolution: {integrity: sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==} + engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} + hasBin: true + + buffer-from@1.1.2: + resolution: {integrity: sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==} + + bundle-name@4.1.0: + resolution: {integrity: sha512-tjwM5exMg6BGRI+kNmTntNsvdZS1X8BFYS6tnJ2hdH0kVxM6/eVZ2xy+FqStSWvYmtfFMDLIxurorHwDKfDz5Q==} + engines: {node: '>=18'} + + bundle-require@5.1.0: + resolution: {integrity: sha512-3WrrOuZiyaaZPWiEt4G3+IffISVC9HYlWueJEBWED4ZH4aIAC2PnkdnuRrR94M+w6yGWn4AglWtJtBI8YqvgoA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + peerDependencies: + esbuild: '>=0.18' + + bytes@3.0.0: + resolution: {integrity: sha512-pMhOfFDPiv9t5jjIXkHosWmkSyQbvsgEVNkz0ERHbuLh2T/7j4Mqqpz523Fe8MVY89KC6Sh/QfS2sM+SjgFDcw==} + engines: {node: '>= 0.8'} + + bytes@3.1.2: + resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} + engines: {node: '>= 0.8'} + + bytestreamjs@2.0.1: + resolution: {integrity: sha512-U1Z/ob71V/bXfVABvNr/Kumf5VyeQRBEm6Txb0PQ6S7V5GpBM3w4Cbqz/xPDicR5tN0uvDifng8C+5qECeGwyQ==} + engines: {node: '>=6.0.0'} + + cac@6.7.14: + resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} + engines: {node: '>=8'} + + cacheable-lookup@7.0.0: + resolution: {integrity: sha512-+qJyx4xiKra8mZrcwhjMRMUhD5NR1R8esPkzIYxX96JiecFoxAXFuz/GpR3+ev4PE1WamHip78wV0vcmPQtp8w==} + engines: {node: '>=14.16'} + + cacheable-request@10.2.14: + resolution: {integrity: sha512-zkDT5WAF4hSSoUgyfg5tFIxz8XQK+25W/TLVojJTMKBaxevLBBtLxgqguAuVQB8PVW79FVjHcU+GJ9tVbDZ9mQ==} + engines: {node: '>=14.16'} + + call-bind-apply-helpers@1.0.2: + resolution: {integrity: sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==} + engines: {node: '>= 0.4'} + + call-bind@1.0.8: + resolution: {integrity: sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==} + engines: {node: '>= 0.4'} + + call-bound@1.0.4: + resolution: {integrity: sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==} + engines: {node: '>= 0.4'} + + callsites@3.1.0: + resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} + engines: {node: '>=6'} + + camel-case@4.1.2: + resolution: {integrity: sha512-gxGWBrTT1JuMx6R+o5PTXMmUnhnVzLQ9SNutD4YqKtI6ap897t3tKECYla6gCWEkplXnlNybEkZg9GEGxKFCgw==} + + camelcase@6.3.0: + resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} + engines: {node: '>=10'} + + camelcase@7.0.1: + resolution: {integrity: sha512-xlx1yCK2Oc1APsPXDL2LdlNP6+uu8OCDdhOBSVT279M/S+y75O30C2VuD8T2ogdePBBl7PfPF4504tnLgX3zfw==} + engines: {node: '>=14.16'} + + caniuse-api@3.0.0: + resolution: {integrity: sha512-bsTwuIg/BZZK/vreVTYYbSWoe2F+71P7K5QGEX+pT250DZbfU1MQ5prOKpPR+LL6uWKK3KMwMCAS74QB3Um1uw==} + + caniuse-lite@1.0.30001766: + resolution: {integrity: sha512-4C0lfJ0/YPjJQHagaE9x2Elb69CIqEPZeG0anQt9SIvIoOH4a4uaRl73IavyO+0qZh6MDLH//DrXThEYKHkmYA==} + + ccount@2.0.1: + resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} + + chai@5.3.3: + resolution: {integrity: sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==} + engines: {node: '>=18'} + + chalk@4.1.2: + resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} + engines: {node: '>=10'} + + chalk@5.6.2: + resolution: {integrity: sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==} + engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} + + char-regex@1.0.2: + resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==} + engines: {node: '>=10'} + + character-entities-html4@2.1.0: + resolution: {integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==} + + character-entities-legacy@3.0.0: + resolution: {integrity: sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==} + + character-entities@2.0.2: + resolution: {integrity: sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==} + + character-reference-invalid@2.0.1: + resolution: {integrity: sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==} + + chardet@2.1.1: + resolution: {integrity: sha512-PsezH1rqdV9VvyNhxxOW32/d75r01NY7TQCmOqomRo15ZSOKbpTFVsfjghxo6JloQUCGnH4k1LGu0R4yCLlWQQ==} + + check-error@2.1.3: + resolution: {integrity: sha512-PAJdDJusoxnwm1VwW07VWwUN1sl7smmC3OKggvndJFadxxDRyFJBX/ggnu/KE4kQAB7a3Dp8f/YXC1FlUprWmA==} + engines: {node: '>= 16'} + + cheerio-select@2.1.0: + resolution: {integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==} + + cheerio@1.0.0-rc.12: + resolution: {integrity: sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==} + engines: {node: '>= 6'} + + cheerio@1.2.0: + resolution: {integrity: sha512-WDrybc/gKFpTYQutKIK6UvfcuxijIZfMfXaYm8NMsPQxSYvf+13fXUJ4rztGGbJcBQ/GF55gvrZ0Bc0bj/mqvg==} + engines: {node: '>=20.18.1'} + + chevrotain-allstar@0.3.1: + resolution: {integrity: sha512-b7g+y9A0v4mxCW1qUhf3BSVPg+/NvGErk/dOkrDaHA0nQIQGAtrOjlX//9OQtRlSCy+x9rfB5N8yC71lH1nvMw==} + peerDependencies: + chevrotain: ^11.0.0 + + chevrotain@11.0.3: + resolution: {integrity: sha512-ci2iJH6LeIkvP9eJW6gpueU8cnZhv85ELY8w8WiFtNjMHA5ad6pQLaJo9mEly/9qUyCpvqX8/POVUTf18/HFdw==} + + chokidar@3.6.0: + resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} + engines: {node: '>= 8.10.0'} + + chokidar@4.0.3: + resolution: {integrity: sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==} + engines: {node: '>= 14.16.0'} + + chrome-trace-event@1.0.4: + resolution: {integrity: sha512-rNjApaLzuwaOTjCiT8lSDdGN1APCiqkChLMJxJPWLunPAt5fy8xgU9/jNOchV84wfIxrA0lRQB7oCT8jrn/wrQ==} + engines: {node: '>=6.0'} + + ci-info@3.9.0: + resolution: {integrity: sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==} + engines: {node: '>=8'} + + clean-css@5.3.3: + resolution: {integrity: sha512-D5J+kHaVb/wKSFcyyV75uCn8fiY4sV38XJoe4CUyGQ+mOU/fMVYUdH1hJC+CJQ5uY3EnW27SbJYS4X8BiLrAFg==} + engines: {node: '>= 10.0'} + + clean-stack@2.2.0: + resolution: {integrity: sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==} + engines: {node: '>=6'} + + cli-boxes@3.0.0: + resolution: {integrity: sha512-/lzGpEWL/8PfI0BmBOPRwp0c/wFNX1RdUML3jK/RcSBA9T8mZDdQpqYBKtCFTOfQbwPqWEOpjqW+Fnayc0969g==} + engines: {node: '>=10'} + + cli-table3@0.6.5: + resolution: {integrity: sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==} + engines: {node: 10.* || >= 12.*} + + clone-deep@4.0.1: + resolution: {integrity: sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==} + engines: {node: '>=6'} + + clsx@2.1.1: + resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==} + engines: {node: '>=6'} + + collapse-white-space@2.1.0: + resolution: {integrity: sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw==} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + colord@2.9.3: + resolution: {integrity: sha512-jeC1axXpnb0/2nn/Y1LPuLdgXBLH7aDcHu4KEKfqw3CUhX7ZpfBSlPKyqXE6btIgEzfWtrX3/tyBCaCvXvMkOw==} + + colorette@2.0.20: + resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==} + + combine-promises@1.2.0: + resolution: {integrity: sha512-VcQB1ziGD0NXrhKxiwyNbCDmRzs/OShMs2GqW2DlU2A/Sd0nQxE1oWDAE5O0ygSx5mgQOn9eIFh7yKPgFRVkPQ==} + engines: {node: '>=10'} + + comlink@4.4.2: + resolution: {integrity: sha512-OxGdvBmJuNKSCMO4NTl1L47VRp6xn2wG4F/2hYzB6tiCb709otOxtEYCSvK80PtjODfXXZu8ds+Nw5kVCjqd2g==} + + comma-separated-tokens@2.0.3: + resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==} + + commander@10.0.1: + resolution: {integrity: sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==} + engines: {node: '>=14'} + + commander@2.20.3: + resolution: {integrity: sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==} + + commander@4.1.1: + resolution: {integrity: sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==} + engines: {node: '>= 6'} + + commander@5.1.0: + resolution: {integrity: sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg==} + engines: {node: '>= 6'} + + commander@7.2.0: + resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} + engines: {node: '>= 10'} + + commander@8.3.0: + resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==} + engines: {node: '>= 12'} + + common-path-prefix@3.0.0: + resolution: {integrity: sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==} + + compressible@2.0.18: + resolution: {integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==} + engines: {node: '>= 0.6'} + + compression@1.8.1: + resolution: {integrity: sha512-9mAqGPHLakhCLeNyxPkK4xVo746zQ/czLH1Ky+vkitMnWfWZps8r0qXuwhwizagCRttsL4lfG4pIOvaWLpAP0w==} + engines: {node: '>= 0.8.0'} + + concat-map@0.0.1: + resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} + + confbox@0.1.8: + resolution: {integrity: sha512-RMtmw0iFkeR4YV+fUOSucriAQNb9g8zFR52MWCtl+cCZOFRNL6zeB395vPzFhEjjn4fMxXudmELnl/KF/WrK6w==} + + config-chain@1.1.13: + resolution: {integrity: sha512-qj+f8APARXHrM0hraqXYb2/bOVSV4PvJQlNZ/DVj0QrmNM2q2euizkeuVckQ57J+W0mRH6Hvi+k50M4Jul2VRQ==} + + configstore@6.0.0: + resolution: {integrity: sha512-cD31W1v3GqUlQvbBCGcXmd2Nj9SvLDOP1oQ0YFuLETufzSPaKp11rYBsSOm7rCsW3OnIRAFM3OxRhceaXNYHkA==} + engines: {node: '>=12'} + + connect-history-api-fallback@2.0.0: + resolution: {integrity: sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==} + engines: {node: '>=0.8'} + + consola@3.4.2: + resolution: {integrity: sha512-5IKcdX0nnYavi6G7TtOhwkYzyjfJlatbjMjuLSfE2kYT5pMDOilZ4OvMhi637CcDICTmz3wARPoyhqyX1Y+XvA==} + engines: {node: ^14.18.0 || >=16.10.0} + + content-disposition@0.5.2: + resolution: {integrity: sha512-kRGRZw3bLlFISDBgwTSA1TMBFN6J6GWDeubmDE3AF+3+yXL8hTWv8r5rkLbqYXY4RjPk/EzHnClI3zQf1cFmHA==} + engines: {node: '>= 0.6'} + + content-disposition@0.5.4: + resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==} + engines: {node: '>= 0.6'} + + content-type@1.0.5: + resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==} + engines: {node: '>= 0.6'} + + convert-source-map@2.0.0: + resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} + + cookie-signature@1.0.7: + resolution: {integrity: sha512-NXdYc3dLr47pBkpUCHtKSwIOQXLVn8dZEuywboCOJY/osA0wFSLlSawr3KN8qXJEyX66FcONTH8EIlVuK0yyFA==} + + cookie@0.7.2: + resolution: {integrity: sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==} + engines: {node: '>= 0.6'} + + copy-webpack-plugin@11.0.0: + resolution: {integrity: sha512-fX2MWpamkW0hZxMEg0+mYnA40LTosOSa5TqZ9GYIBzyJa9C3QUaMPSE2xAi/buNr8u89SfD9wHSQVBzrRa/SOQ==} + engines: {node: '>= 14.15.0'} + peerDependencies: + webpack: ^5.1.0 + + core-js-compat@3.48.0: + resolution: {integrity: sha512-OM4cAF3D6VtH/WkLtWvyNC56EZVXsZdU3iqaMG2B4WvYrlqU831pc4UtG5yp0sE9z8Y02wVN7PjW5Zf9Gt0f1Q==} + + core-js-pure@3.48.0: + resolution: {integrity: sha512-1slJgk89tWC51HQ1AEqG+s2VuwpTRr8ocu4n20QUcH1v9lAN0RXen0Q0AABa/DK1I7RrNWLucplOHMx8hfTGTw==} + + core-js@3.48.0: + resolution: {integrity: sha512-zpEHTy1fjTMZCKLHUZoVeylt9XrzaIN2rbPXEt0k+q7JE5CkCZdo6bNq55bn24a69CH7ErAVLKijxJja4fw+UQ==} + + core-util-is@1.0.3: + resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} + + cose-base@1.0.3: + resolution: {integrity: sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==} + + cose-base@2.2.0: + resolution: {integrity: sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==} + + cosmiconfig@8.3.6: + resolution: {integrity: sha512-kcZ6+W5QzcJ3P1Mt+83OUv/oHFqZHIx8DuxG6eZ5RGMERoLqp4BuGjhHLYGK+Kf5XVkQvqBSmAy/nGWN3qDgEA==} + engines: {node: '>=14'} + peerDependencies: + typescript: '>=4.9.5' + peerDependenciesMeta: + typescript: + optional: true + + cross-spawn@7.0.6: + resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} + engines: {node: '>= 8'} + + crypto-random-string@4.0.0: + resolution: {integrity: sha512-x8dy3RnvYdlUcPOjkEHqozhiwzKNSq7GcPuXFbnyMOCHxX8V3OgIg/pYuabl2sbUPfIJaeAQB7PMOK8DFIdoRA==} + engines: {node: '>=12'} + + css-blank-pseudo@7.0.1: + resolution: {integrity: sha512-jf+twWGDf6LDoXDUode+nc7ZlrqfaNphrBIBrcmeP3D8yw1uPaix1gCC8LUQUGQ6CycuK2opkbFFWFuq/a94ag==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + css-declaration-sorter@7.3.1: + resolution: {integrity: sha512-gz6x+KkgNCjxq3Var03pRYLhyNfwhkKF1g/yoLgDNtFvVu0/fOLV9C8fFEZRjACp/XQLumjAYo7JVjzH3wLbxA==} + engines: {node: ^14 || ^16 || >=18} + peerDependencies: + postcss: ^8.0.9 + + css-has-pseudo@7.0.3: + resolution: {integrity: sha512-oG+vKuGyqe/xvEMoxAQrhi7uY16deJR3i7wwhBerVrGQKSqUC5GiOVxTpM9F9B9hw0J+eKeOWLH7E9gZ1Dr5rA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + css-loader@6.11.0: + resolution: {integrity: sha512-CTJ+AEQJjq5NzLga5pE39qdiSV56F8ywCIsqNIRF0r7BDgWsN25aazToqAFg7ZrtA/U016xudB3ffgweORxX7g==} + engines: {node: '>= 12.13.0'} + peerDependencies: + '@rspack/core': 0.x || 1.x + webpack: ^5.0.0 + peerDependenciesMeta: + '@rspack/core': + optional: true + webpack: + optional: true + + css-minimizer-webpack-plugin@5.0.1: + resolution: {integrity: sha512-3caImjKFQkS+ws1TGcFn0V1HyDJFq1Euy589JlD6/3rV2kj+w7r5G9WDMgSHvpvXHNZ2calVypZWuEDQd9wfLg==} + engines: {node: '>= 14.15.0'} + peerDependencies: + '@parcel/css': '*' + '@swc/css': '*' + clean-css: '*' + csso: '*' + esbuild: '*' + lightningcss: '*' + webpack: ^5.0.0 + peerDependenciesMeta: + '@parcel/css': + optional: true + '@swc/css': + optional: true + clean-css: + optional: true + csso: + optional: true + esbuild: + optional: true + lightningcss: + optional: true + + css-prefers-color-scheme@10.0.0: + resolution: {integrity: sha512-VCtXZAWivRglTZditUfB4StnsWr6YVZ2PRtuxQLKTNRdtAf8tpzaVPE9zXIF3VaSc7O70iK/j1+NXxyQCqdPjQ==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + css-select@4.3.0: + resolution: {integrity: sha512-wPpOYtnsVontu2mODhA19JrqWxNsfdatRKd64kmpRbQgh1KtItko5sTnEpPdpSaJszTOhEMlF/RPz28qj4HqhQ==} + + css-select@5.2.2: + resolution: {integrity: sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==} + + css-tree@2.2.1: + resolution: {integrity: sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==} + engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0, npm: '>=7.0.0'} + + css-tree@2.3.1: + resolution: {integrity: sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==} + engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0} + + css-what@6.2.2: + resolution: {integrity: sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==} + engines: {node: '>= 6'} + + cssdb@8.7.1: + resolution: {integrity: sha512-+F6LKx48RrdGOtE4DT5jz7Uo+VeyKXpK797FAevIkzjV8bMHz6xTO5F7gNDcRCHmPgD5jj2g6QCsY9zmVrh38A==} + + cssesc@3.0.0: + resolution: {integrity: sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==} + engines: {node: '>=4'} + hasBin: true + + cssnano-preset-advanced@6.1.2: + resolution: {integrity: sha512-Nhao7eD8ph2DoHolEzQs5CfRpiEP0xa1HBdnFZ82kvqdmbwVBUr2r1QuQ4t1pi+D1ZpqpcO4T+wy/7RxzJ/WPQ==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + cssnano-preset-default@6.1.2: + resolution: {integrity: sha512-1C0C+eNaeN8OcHQa193aRgYexyJtU8XwbdieEjClw+J9d94E41LwT6ivKH0WT+fYwYWB0Zp3I3IZ7tI/BbUbrg==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + cssnano-utils@4.0.2: + resolution: {integrity: sha512-ZR1jHg+wZ8o4c3zqf1SIUSTIvm/9mU343FMR6Obe/unskbvpGhZOo1J6d/r8D1pzkRQYuwbcH3hToOuoA2G7oQ==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + cssnano@6.1.2: + resolution: {integrity: sha512-rYk5UeX7VAM/u0lNqewCdasdtPK81CgX8wJFLEIXHbV2oldWRgJAsZrdhRXkV1NJzA2g850KiFm9mMU2HxNxMA==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + csso@5.0.5: + resolution: {integrity: sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==} + engines: {node: ^10 || ^12.20.0 || ^14.13.0 || >=15.0.0, npm: '>=7.0.0'} + + csstype@3.2.3: + resolution: {integrity: sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==} + + cytoscape-cose-bilkent@4.1.0: + resolution: {integrity: sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==} + peerDependencies: + cytoscape: ^3.2.0 + + cytoscape-fcose@2.2.0: + resolution: {integrity: sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==} + peerDependencies: + cytoscape: ^3.2.0 + + cytoscape@3.33.1: + resolution: {integrity: sha512-iJc4TwyANnOGR1OmWhsS9ayRS3s+XQ185FmuHObThD+5AeJCakAAbWv8KimMTt08xCCLNgneQwFp+JRJOr9qGQ==} + engines: {node: '>=0.10'} + + d3-array@2.12.1: + resolution: {integrity: sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==} + + d3-array@3.2.4: + resolution: {integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==} + engines: {node: '>=12'} + + d3-axis@3.0.0: + resolution: {integrity: sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==} + engines: {node: '>=12'} + + d3-brush@3.0.0: + resolution: {integrity: sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==} + engines: {node: '>=12'} + + d3-chord@3.0.1: + resolution: {integrity: sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==} + engines: {node: '>=12'} + + d3-color@3.1.0: + resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==} + engines: {node: '>=12'} + + d3-contour@4.0.2: + resolution: {integrity: sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==} + engines: {node: '>=12'} + + d3-delaunay@6.0.4: + resolution: {integrity: sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==} + engines: {node: '>=12'} + + d3-dispatch@3.0.1: + resolution: {integrity: sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==} + engines: {node: '>=12'} + + d3-drag@3.0.0: + resolution: {integrity: sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==} + engines: {node: '>=12'} + + d3-dsv@3.0.1: + resolution: {integrity: sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==} + engines: {node: '>=12'} + hasBin: true + + d3-ease@3.0.1: + resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==} + engines: {node: '>=12'} + + d3-fetch@3.0.1: + resolution: {integrity: sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==} + engines: {node: '>=12'} + + d3-force@3.0.0: + resolution: {integrity: sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==} + engines: {node: '>=12'} + + d3-format@3.1.2: + resolution: {integrity: sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==} + engines: {node: '>=12'} + + d3-geo@3.1.1: + resolution: {integrity: sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==} + engines: {node: '>=12'} + + d3-hierarchy@3.1.2: + resolution: {integrity: sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==} + engines: {node: '>=12'} + + d3-interpolate@3.0.1: + resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==} + engines: {node: '>=12'} + + d3-path@1.0.9: + resolution: {integrity: sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==} + + d3-path@3.1.0: + resolution: {integrity: sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==} + engines: {node: '>=12'} + + d3-polygon@3.0.1: + resolution: {integrity: sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==} + engines: {node: '>=12'} + + d3-quadtree@3.0.1: + resolution: {integrity: sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==} + engines: {node: '>=12'} + + d3-random@3.0.1: + resolution: {integrity: sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==} + engines: {node: '>=12'} + + d3-sankey@0.12.3: + resolution: {integrity: sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==} + + d3-scale-chromatic@3.1.0: + resolution: {integrity: sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==} + engines: {node: '>=12'} + + d3-scale@4.0.2: + resolution: {integrity: sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==} + engines: {node: '>=12'} + + d3-selection@3.0.0: + resolution: {integrity: sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==} + engines: {node: '>=12'} + + d3-shape@1.3.7: + resolution: {integrity: sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==} + + d3-shape@3.2.0: + resolution: {integrity: sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==} + engines: {node: '>=12'} + + d3-time-format@4.1.0: + resolution: {integrity: sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==} + engines: {node: '>=12'} + + d3-time@3.1.0: + resolution: {integrity: sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==} + engines: {node: '>=12'} + + d3-timer@3.0.1: + resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==} + engines: {node: '>=12'} + + d3-transition@3.0.1: + resolution: {integrity: sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==} + engines: {node: '>=12'} + peerDependencies: + d3-selection: 2 - 3 + + d3-zoom@3.0.0: + resolution: {integrity: sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==} + engines: {node: '>=12'} + + d3@7.9.0: + resolution: {integrity: sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==} + engines: {node: '>=12'} + + dagre-d3-es@7.0.13: + resolution: {integrity: sha512-efEhnxpSuwpYOKRm/L5KbqoZmNNukHa/Flty4Wp62JRvgH2ojwVgPgdYyr4twpieZnyRDdIH7PY2mopX26+j2Q==} + + dayjs@1.11.19: + resolution: {integrity: sha512-t5EcLVS6QPBNqM2z8fakk/NKel+Xzshgt8FFKAn+qwlD1pzZWxh0nVCrvFK7ZDb6XucZeF9z8C7CBWTRIVApAw==} + + debounce@1.2.1: + resolution: {integrity: sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug==} + + debug@2.6.9: + resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + debug@4.4.3: + resolution: {integrity: sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + decode-named-character-reference@1.3.0: + resolution: {integrity: sha512-GtpQYB283KrPp6nRw50q3U9/VfOutZOe103qlN7BPP6Ad27xYnOIWv4lPzo8HCAL+mMZofJ9KEy30fq6MfaK6Q==} + + decompress-response@6.0.0: + resolution: {integrity: sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==} + engines: {node: '>=10'} + + deep-eql@5.0.2: + resolution: {integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==} + engines: {node: '>=6'} + + deep-extend@0.6.0: + resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} + engines: {node: '>=4.0.0'} + + deep-is@0.1.4: + resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} + + deepmerge@4.3.1: + resolution: {integrity: sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==} + engines: {node: '>=0.10.0'} + + default-browser-id@5.0.1: + resolution: {integrity: sha512-x1VCxdX4t+8wVfd1so/9w+vQ4vx7lKd2Qp5tDRutErwmR85OgmfX7RlLRMWafRMY7hbEiXIbudNrjOAPa/hL8Q==} + engines: {node: '>=18'} + + default-browser@5.4.0: + resolution: {integrity: sha512-XDuvSq38Hr1MdN47EDvYtx3U0MTqpCEn+F6ft8z2vYDzMrvQhVp0ui9oQdqW3MvK3vqUETglt1tVGgjLuJ5izg==} + engines: {node: '>=18'} + + defer-to-connect@2.0.1: + resolution: {integrity: sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==} + engines: {node: '>=10'} + + define-data-property@1.1.4: + resolution: {integrity: sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==} + engines: {node: '>= 0.4'} + + define-lazy-prop@2.0.0: + resolution: {integrity: sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==} + engines: {node: '>=8'} + + define-lazy-prop@3.0.0: + resolution: {integrity: sha512-N+MeXYoqr3pOgn8xfyRPREN7gHakLYjhsHhWGT3fWAiL4IkAt0iDw14QiiEm2bE30c5XX5q0FtAA3CK5f9/BUg==} + engines: {node: '>=12'} + + define-properties@1.2.1: + resolution: {integrity: sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==} + engines: {node: '>= 0.4'} + + delaunator@5.0.1: + resolution: {integrity: sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==} + + depd@1.1.2: + resolution: {integrity: sha512-7emPTl6Dpo6JRXOXjLRxck+FlLRX5847cLKEn00PLAgc3g2hTZZgr+e4c2v6QpSmLeFP3n5yUo7ft6avBK/5jQ==} + engines: {node: '>= 0.6'} + + depd@2.0.0: + resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} + engines: {node: '>= 0.8'} + + dequal@2.0.3: + resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} + engines: {node: '>=6'} + + destroy@1.2.0: + resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==} + engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} + + detect-indent@6.1.0: + resolution: {integrity: sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==} + engines: {node: '>=8'} + + detect-node@2.1.0: + resolution: {integrity: sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==} + + detect-port@1.6.1: + resolution: {integrity: sha512-CmnVc+Hek2egPx1PeTFVta2W78xy2K/9Rkf6cC4T59S50tVnzKj+tnx5mmx5lwvCkujZ4uRrpRSuV+IVs3f90Q==} + engines: {node: '>= 4.0.0'} + hasBin: true + + devlop@1.1.0: + resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==} + + dir-glob@3.0.1: + resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} + engines: {node: '>=8'} + + dns-packet@5.6.1: + resolution: {integrity: sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw==} + engines: {node: '>=6'} + + dom-converter@0.2.0: + resolution: {integrity: sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==} + + dom-serializer@1.4.1: + resolution: {integrity: sha512-VHwB3KfrcOOkelEG2ZOfxqLZdfkil8PtJi4P8N2MMXucZq2yLp75ClViUlOVwyoHEDjYU433Aq+5zWP61+RGag==} + + dom-serializer@2.0.0: + resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==} + + domelementtype@2.3.0: + resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==} + + domhandler@4.3.1: + resolution: {integrity: sha512-GrwoxYN+uWlzO8uhUXRl0P+kHE4GtVPfYzVLcUxPL7KNdHKj66vvlhiweIHqYYXWlw+T8iLMp42Lm67ghw4WMQ==} + engines: {node: '>= 4'} + + domhandler@5.0.3: + resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==} + engines: {node: '>= 4'} + + dompurify@3.3.1: + resolution: {integrity: sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==} + + domutils@2.8.0: + resolution: {integrity: sha512-w96Cjofp72M5IIhpjgobBimYEfoPjx1Vx0BSX9P30WBdZW2WIKU0T1Bd0kz2eNZ9ikjKgHbEyKx8BB6H1L3h3A==} + + domutils@3.2.2: + resolution: {integrity: sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==} + + dot-case@3.0.4: + resolution: {integrity: sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==} + + dot-prop@6.0.1: + resolution: {integrity: sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==} + engines: {node: '>=10'} + + dotenv@16.6.1: + resolution: {integrity: sha512-uBq4egWHTcTt33a72vpSG0z3HnPuIl6NqYcTrKEg2azoEyl2hpW0zqlxysq2pK9HlDIHyHyakeYaYnSAwd8bow==} + engines: {node: '>=12'} + + dunder-proto@1.0.1: + resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} + engines: {node: '>= 0.4'} + + duplexer@0.1.2: + resolution: {integrity: sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==} + + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + + ee-first@1.1.1: + resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} + + electron-to-chromium@1.5.283: + resolution: {integrity: sha512-3vifjt1HgrGW/h76UEeny+adYApveS9dH2h3p57JYzBSXJIKUJAvtmIytDKjcSCt9xHfrNCFJ7gts6vkhuq++w==} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + + emojilib@2.4.0: + resolution: {integrity: sha512-5U0rVMU5Y2n2+ykNLQqMoqklN9ICBT/KsvC1Gz6vqHbz2AXXGkG+Pm5rMWk/8Vjrr/mY9985Hi8DYzn1F09Nyw==} + + emojis-list@3.0.0: + resolution: {integrity: sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==} + engines: {node: '>= 4'} + + emoticon@4.1.0: + resolution: {integrity: sha512-VWZfnxqwNcc51hIy/sbOdEem6D+cVtpPzEEtVAFdaas30+1dgkyaOQ4sQ6Bp0tOMqWO1v+HQfYaoodOkdhK6SQ==} + + encodeurl@2.0.0: + resolution: {integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==} + engines: {node: '>= 0.8'} + + encoding-sniffer@0.2.1: + resolution: {integrity: sha512-5gvq20T6vfpekVtqrYQsSCFZ1wEg5+wW0/QaZMWkFr6BqD3NfKs0rLCx4rrVlSWJeZb5NBJgVLswK/w2MWU+Gw==} + + enhanced-resolve@5.18.4: + resolution: {integrity: sha512-LgQMM4WXU3QI+SYgEc2liRgznaD5ojbmY3sb8LxyguVkIg5FxdpTkvk72te2R38/TGKxH634oLxXRGY6d7AP+Q==} + engines: {node: '>=10.13.0'} + + enquirer@2.4.1: + resolution: {integrity: sha512-rRqJg/6gd538VHvR3PSrdRBb/1Vy2YfzHqzvbhGIQpDRKIa4FgV/54b5Q1xYSxOOwKvjXweS26E0Q+nAMwp2pQ==} + engines: {node: '>=8.6'} + + entities@2.2.0: + resolution: {integrity: sha512-p92if5Nz619I0w+akJrLZH0MX0Pb5DX39XOwQTtXSdQQOaYH03S1uIQp4mhOZtAXrxq4ViO67YTiLBo2638o9A==} + + entities@4.5.0: + resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} + engines: {node: '>=0.12'} + + entities@6.0.1: + resolution: {integrity: sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==} + engines: {node: '>=0.12'} + + entities@7.0.1: + resolution: {integrity: sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA==} + engines: {node: '>=0.12'} + + error-ex@1.3.4: + resolution: {integrity: sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==} + + es-define-property@1.0.1: + resolution: {integrity: sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==} + engines: {node: '>= 0.4'} + + es-errors@1.3.0: + resolution: {integrity: sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==} + engines: {node: '>= 0.4'} + + es-module-lexer@1.7.0: + resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} + + es-module-lexer@2.0.0: + resolution: {integrity: sha512-5POEcUuZybH7IdmGsD8wlf0AI55wMecM9rVBTI/qEAy2c1kTOm3DjFYjrBdI2K3BaJjJYfYFeRtM0t9ssnRuxw==} + + es-object-atoms@1.1.1: + resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} + engines: {node: '>= 0.4'} + + esast-util-from-estree@2.0.0: + resolution: {integrity: sha512-4CyanoAudUSBAn5K13H4JhsMH6L9ZP7XbLVe/dKybkxMO7eDyLsT8UHl9TRNrU2Gr9nz+FovfSIjuXWJ81uVwQ==} + + esast-util-from-js@2.0.1: + resolution: {integrity: sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw==} + + esbuild@0.21.5: + resolution: {integrity: sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==} + engines: {node: '>=12'} + hasBin: true + + esbuild@0.27.2: + resolution: {integrity: sha512-HyNQImnsOC7X9PMNaCIeAm4ISCQXs5a5YasTXVliKv4uuBo1dKrG0A+uQS8M5eXjVMnLg3WgXaKvprHlFJQffw==} + engines: {node: '>=18'} + hasBin: true + + escalade@3.2.0: + resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} + engines: {node: '>=6'} + + escape-goat@4.0.0: + resolution: {integrity: sha512-2Sd4ShcWxbx6OY1IHyla/CVNwvg7XwZVoXZHcSu9w9SReNP1EzzD5T8NWKIR38fIqEns9kDWKUQTXXAmlDrdPg==} + engines: {node: '>=12'} + + escape-html@1.0.3: + resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==} + + escape-string-regexp@1.0.5: + resolution: {integrity: sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==} + engines: {node: '>=0.8.0'} + + escape-string-regexp@4.0.0: + resolution: {integrity: sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==} + engines: {node: '>=10'} + + escape-string-regexp@5.0.0: + resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} + engines: {node: '>=12'} + + eslint-scope@5.1.1: + resolution: {integrity: sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==} + engines: {node: '>=8.0.0'} + + eslint-scope@8.4.0: + resolution: {integrity: sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + eslint-visitor-keys@3.4.3: + resolution: {integrity: sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + eslint-visitor-keys@4.2.1: + resolution: {integrity: sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + eslint@9.39.2: + resolution: {integrity: sha512-LEyamqS7W5HB3ujJyvi0HQK/dtVINZvd5mAAp9eT5S/ujByGjiZLCzPcHVzuXbpJDJF/cxwHlfceVUDZ2lnSTw==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + hasBin: true + peerDependencies: + jiti: '*' + peerDependenciesMeta: + jiti: + optional: true + + espree@10.4.0: + resolution: {integrity: sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ==} + engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} + + esprima@4.0.1: + resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} + engines: {node: '>=4'} + hasBin: true + + esquery@1.7.0: + resolution: {integrity: sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==} + engines: {node: '>=0.10'} + + esrecurse@4.3.0: + resolution: {integrity: sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==} + engines: {node: '>=4.0'} + + estraverse@4.3.0: + resolution: {integrity: sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==} + engines: {node: '>=4.0'} + + estraverse@5.3.0: + resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} + engines: {node: '>=4.0'} + + estree-util-attach-comments@3.0.0: + resolution: {integrity: sha512-cKUwm/HUcTDsYh/9FgnuFqpfquUbwIqwKM26BVCGDPVgvaCl/nDCCjUfiLlx6lsEZ3Z4RFxNbOQ60pkaEwFxGw==} + + estree-util-build-jsx@3.0.1: + resolution: {integrity: sha512-8U5eiL6BTrPxp/CHbs2yMgP8ftMhR5ww1eIKoWRMlqvltHF8fZn5LRDvTKuxD3DUn+shRbLGqXemcP51oFCsGQ==} + + estree-util-is-identifier-name@3.0.0: + resolution: {integrity: sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==} + + estree-util-scope@1.0.0: + resolution: {integrity: sha512-2CAASclonf+JFWBNJPndcOpA8EMJwa0Q8LUFJEKqXLW6+qBvbFZuF5gItbQOs/umBUkjviCSDCbBwU2cXbmrhQ==} + + estree-util-to-js@2.0.0: + resolution: {integrity: sha512-WDF+xj5rRWmD5tj6bIqRi6CkLIXbbNQUcxQHzGysQzvHmdYG2G7p/Tf0J0gpxGgkeMZNTIjT/AoSvC9Xehcgdg==} + + estree-util-value-to-estree@3.5.0: + resolution: {integrity: sha512-aMV56R27Gv3QmfmF1MY12GWkGzzeAezAX+UplqHVASfjc9wNzI/X6hC0S9oxq61WT4aQesLGslWP9tKk6ghRZQ==} + + estree-util-visit@2.0.0: + resolution: {integrity: sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww==} + + estree-walker@3.0.3: + resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} + + esutils@2.0.3: + resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} + engines: {node: '>=0.10.0'} + + eta@2.2.0: + resolution: {integrity: sha512-UVQ72Rqjy/ZKQalzV5dCCJP80GrmPrMxh6NlNf+erV6ObL0ZFkhCstWRawS85z3smdr3d2wXPsZEY7rDPfGd2g==} + engines: {node: '>=6.0.0'} + + etag@1.8.1: + resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} + engines: {node: '>= 0.6'} + + eval@0.1.8: + resolution: {integrity: sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==} + engines: {node: '>= 0.8'} + + eventemitter3@4.0.7: + resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} + + events@3.3.0: + resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} + engines: {node: '>=0.8.x'} + + execa@5.1.1: + resolution: {integrity: sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==} + engines: {node: '>=10'} + + expect-type@1.3.0: + resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==} + engines: {node: '>=12.0.0'} + + express@4.22.1: + resolution: {integrity: sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==} + engines: {node: '>= 0.10.0'} + + extend-shallow@2.0.1: + resolution: {integrity: sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==} + engines: {node: '>=0.10.0'} + + extend@3.0.2: + resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} + + extendable-error@0.1.7: + resolution: {integrity: sha512-UOiS2in6/Q0FK0R0q6UY9vYpQ21mr/Qn1KOnte7vsACuNJf514WvCCUHSRCPcgjPT2bAhNIJdlE6bVap1GKmeg==} + + fast-deep-equal@3.1.3: + resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} + + fast-glob@3.3.3: + resolution: {integrity: sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==} + engines: {node: '>=8.6.0'} + + fast-json-stable-stringify@2.1.0: + resolution: {integrity: sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==} + + fast-levenshtein@2.0.6: + resolution: {integrity: sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==} + + fast-uri@3.1.0: + resolution: {integrity: sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==} + + fastq@1.20.1: + resolution: {integrity: sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==} + + fault@2.0.1: + resolution: {integrity: sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ==} + + faye-websocket@0.11.4: + resolution: {integrity: sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==} + engines: {node: '>=0.8.0'} + + fdir@6.5.0: + resolution: {integrity: sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==} + engines: {node: '>=12.0.0'} + peerDependencies: + picomatch: ^3 || ^4 + peerDependenciesMeta: + picomatch: + optional: true + + feed@4.2.2: + resolution: {integrity: sha512-u5/sxGfiMfZNtJ3OvQpXcvotFpYkL0n9u9mM2vkui2nGo8b4wvDkJ8gAkYqbA8QpGyFCv3RK0Z+Iv+9veCS9bQ==} + engines: {node: '>=0.4.0'} + + figures@3.2.0: + resolution: {integrity: sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==} + engines: {node: '>=8'} + + file-entry-cache@8.0.0: + resolution: {integrity: sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==} + engines: {node: '>=16.0.0'} + + file-loader@6.2.0: + resolution: {integrity: sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==} + engines: {node: '>= 10.13.0'} + peerDependencies: + webpack: ^4.0.0 || ^5.0.0 + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + finalhandler@1.3.2: + resolution: {integrity: sha512-aA4RyPcd3badbdABGDuTXCMTtOneUCAYH/gxoYRTZlIJdF0YPWuGqiAsIrhNnnqdXGswYk6dGujem4w80UJFhg==} + engines: {node: '>= 0.8'} + + find-cache-dir@4.0.0: + resolution: {integrity: sha512-9ZonPT4ZAK4a+1pUPVPZJapbi7O5qbbJPdYw/NOQWZZbVLdDTYM3A4R9z/DpAM08IDaFGsvPgiGZ82WEwUDWjg==} + engines: {node: '>=14.16'} + + find-up@4.1.0: + resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==} + engines: {node: '>=8'} + + find-up@5.0.0: + resolution: {integrity: sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==} + engines: {node: '>=10'} + + find-up@6.3.0: + resolution: {integrity: sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + fix-dts-default-cjs-exports@1.0.1: + resolution: {integrity: sha512-pVIECanWFC61Hzl2+oOCtoJ3F17kglZC/6N94eRWycFgBH35hHx0Li604ZIzhseh97mf2p0cv7vVrOZGoqhlEg==} + + flat-cache@4.0.1: + resolution: {integrity: sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==} + engines: {node: '>=16'} + + flat@5.0.2: + resolution: {integrity: sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==} + hasBin: true + + flatted@3.3.3: + resolution: {integrity: sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==} + + follow-redirects@1.15.11: + resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==} + engines: {node: '>=4.0'} + peerDependencies: + debug: '*' + peerDependenciesMeta: + debug: + optional: true + + foreground-child@3.3.1: + resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==} + engines: {node: '>=14'} + + form-data-encoder@2.1.4: + resolution: {integrity: sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==} + engines: {node: '>= 14.17'} + + format@0.2.2: + resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==} + engines: {node: '>=0.4.x'} + + forwarded@0.2.0: + resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==} + engines: {node: '>= 0.6'} + + fraction.js@5.3.4: + resolution: {integrity: sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==} + + fresh@0.5.2: + resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==} + engines: {node: '>= 0.6'} + + fs-extra@10.1.0: + resolution: {integrity: sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ==} + engines: {node: '>=12'} + + fs-extra@11.3.3: + resolution: {integrity: sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg==} + engines: {node: '>=14.14'} + + fs-extra@7.0.1: + resolution: {integrity: sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw==} + engines: {node: '>=6 <7 || >=8'} + + fs-extra@8.1.0: + resolution: {integrity: sha512-yhlQgA6mnOJUKOsRUFsgJdQCvkKhcz8tlZG5HBQfReYZy46OwLcY+Zia0mtdHsOo9y/hP+CxMN0TU9QxoOtG4g==} + engines: {node: '>=6 <7 || >=8'} + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + function-bind@1.1.2: + resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} + + gensync@1.0.0-beta.2: + resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} + engines: {node: '>=6.9.0'} + + get-intrinsic@1.3.0: + resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} + engines: {node: '>= 0.4'} + + get-own-enumerable-property-symbols@3.0.2: + resolution: {integrity: sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==} + + get-proto@1.0.1: + resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} + engines: {node: '>= 0.4'} + + get-stream@6.0.1: + resolution: {integrity: sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==} + engines: {node: '>=10'} + + github-slugger@1.5.0: + resolution: {integrity: sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw==} + + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + glob-parent@6.0.2: + resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} + engines: {node: '>=10.13.0'} + + glob-to-regex.js@1.2.0: + resolution: {integrity: sha512-QMwlOQKU/IzqMUOAZWubUOT8Qft+Y0KQWnX9nK3ch0CJg0tTp4TvGZsTfudYKv2NzoQSyPcnA6TYeIQ3jGichQ==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + glob-to-regexp@0.4.1: + resolution: {integrity: sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==} + + glob@10.5.0: + resolution: {integrity: sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==} + hasBin: true + + global-dirs@3.0.1: + resolution: {integrity: sha512-NBcGGFbBA9s1VzD41QXDG+3++t9Mn5t1FpLdhESY6oKY4gYTFpX4wO3sqGUa0Srjtbfj3szX0RnemmrVRUdULA==} + engines: {node: '>=10'} + + globals@14.0.0: + resolution: {integrity: sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==} + engines: {node: '>=18'} + + globby@11.1.0: + resolution: {integrity: sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==} + engines: {node: '>=10'} + + globby@13.2.2: + resolution: {integrity: sha512-Y1zNGV+pzQdh7H39l9zgB4PJqjRNqydvdYCDG4HFXM4XuvSaQQlEc91IU1yALL8gUTDomgBAfz3XJdmUS+oo0w==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + gopd@1.2.0: + resolution: {integrity: sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==} + engines: {node: '>= 0.4'} + + got@12.6.1: + resolution: {integrity: sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ==} + engines: {node: '>=14.16'} + + graceful-fs@4.2.10: + resolution: {integrity: sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==} + + graceful-fs@4.2.11: + resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + + gray-matter@4.0.3: + resolution: {integrity: sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==} + engines: {node: '>=6.0'} + + gzip-size@6.0.0: + resolution: {integrity: sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==} + engines: {node: '>=10'} + + hachure-fill@0.5.2: + resolution: {integrity: sha512-3GKBOn+m2LX9iq+JC1064cSFprJY4jL1jCXTcpnfER5HYE2l/4EfWSGzkPa/ZDBmYI0ZOEj5VHV/eKnPGkHuOg==} + + handle-thing@2.0.1: + resolution: {integrity: sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==} + + has-flag@4.0.0: + resolution: {integrity: sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==} + engines: {node: '>=8'} + + has-property-descriptors@1.0.2: + resolution: {integrity: sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==} + + has-symbols@1.1.0: + resolution: {integrity: sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==} + engines: {node: '>= 0.4'} + + has-yarn@3.0.0: + resolution: {integrity: sha512-IrsVwUHhEULx3R8f/aA8AHuEzAorplsab/v8HBzEiIukwq5i/EC+xmOW+HfP1OaDP+2JkgT1yILHN2O3UFIbcA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + hasown@2.0.2: + resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==} + engines: {node: '>= 0.4'} + + hast-util-from-parse5@8.0.3: + resolution: {integrity: sha512-3kxEVkEKt0zvcZ3hCRYI8rqrgwtlIOFMWkbclACvjlDw8Li9S2hk/d51OI0nr/gIpdMHNepwgOKqZ/sy0Clpyg==} + + hast-util-parse-selector@4.0.0: + resolution: {integrity: sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==} + + hast-util-raw@9.1.0: + resolution: {integrity: sha512-Y8/SBAHkZGoNkpzqqfCldijcuUKh7/su31kEBp67cFY09Wy0mTRgtsLYsiIxMJxlu0f6AA5SUTbDR8K0rxnbUw==} + + hast-util-to-estree@3.1.3: + resolution: {integrity: sha512-48+B/rJWAp0jamNbAAf9M7Uf//UVqAoMmgXhBdxTDJLGKY+LRnZ99qcG+Qjl5HfMpYNzS5v4EAwVEF34LeAj7w==} + + hast-util-to-jsx-runtime@2.3.6: + resolution: {integrity: sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg==} + + hast-util-to-parse5@8.0.1: + resolution: {integrity: sha512-MlWT6Pjt4CG9lFCjiz4BH7l9wmrMkfkJYCxFwKQic8+RTZgWPuWxwAfjJElsXkex7DJjfSJsQIt931ilUgmwdA==} + + hast-util-whitespace@3.0.0: + resolution: {integrity: sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==} + + hastscript@9.0.1: + resolution: {integrity: sha512-g7df9rMFX/SPi34tyGCyUBREQoKkapwdY/T04Qn9TDWfHhAYt4/I0gMVirzK5wEzeUqIjEB+LXC/ypb7Aqno5w==} + + he@1.2.0: + resolution: {integrity: sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==} + hasBin: true + + history@4.10.1: + resolution: {integrity: sha512-36nwAD620w12kuzPAsyINPWJqlNbij+hpK1k9XRloDtym8mxzGYl2c17LnV6IAGB2Dmg4tEa7G7DlawS0+qjew==} + + hoist-non-react-statics@3.3.2: + resolution: {integrity: sha512-/gGivxi8JPKWNm/W0jSmzcMPpfpPLc3dY/6GxhX2hQ9iGj3aDfklV4ET7NjKpSinLpJ5vafa9iiGIEZg10SfBw==} + + hpack.js@2.1.6: + resolution: {integrity: sha512-zJxVehUdMGIKsRaNt7apO2Gqp0BdqW5yaiGHXXmbpvxgBYVZnAql+BJb4RO5ad2MgpbZKn5G6nMnegrH1FcNYQ==} + + html-escaper@2.0.2: + resolution: {integrity: sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==} + + html-minifier-terser@6.1.0: + resolution: {integrity: sha512-YXxSlJBZTP7RS3tWnQw74ooKa6L9b9i9QYXY21eUEvhZ3u9XLfv6OnFsQq6RxkhHygsaUMvYsZRV5rU/OVNZxw==} + engines: {node: '>=12'} + hasBin: true + + html-minifier-terser@7.2.0: + resolution: {integrity: sha512-tXgn3QfqPIpGl9o+K5tpcj3/MN4SfLtsx2GWwBC3SSd0tXQGyF3gsSqad8loJgKZGM3ZxbYDd5yhiBIdWpmvLA==} + engines: {node: ^14.13.1 || >=16.0.0} + hasBin: true + + html-tags@3.3.1: + resolution: {integrity: sha512-ztqyC3kLto0e9WbNp0aeP+M3kTt+nbaIveGmUxAtZa+8iFgKLUOD4YKM5j+f3QD89bra7UeumolZHKuOXnTmeQ==} + engines: {node: '>=8'} + + html-void-elements@3.0.0: + resolution: {integrity: sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==} + + html-webpack-plugin@5.6.6: + resolution: {integrity: sha512-bLjW01UTrvoWTJQL5LsMRo1SypHW80FTm12OJRSnr3v6YHNhfe+1r0MYUZJMACxnCHURVnBWRwAsWs2yPU9Ezw==} + engines: {node: '>=10.13.0'} + peerDependencies: + '@rspack/core': 0.x || 1.x + webpack: ^5.20.0 + peerDependenciesMeta: + '@rspack/core': + optional: true + webpack: + optional: true + + htmlparser2@10.1.0: + resolution: {integrity: sha512-VTZkM9GWRAtEpveh7MSF6SjjrpNVNNVJfFup7xTY3UpFtm67foy9HDVXneLtFVt4pMz5kZtgNcvCniNFb1hlEQ==} + + htmlparser2@6.1.0: + resolution: {integrity: sha512-gyyPk6rgonLFEDGoeRgQNaEUvdJ4ktTmmUh/h2t7s+M8oPpIPxgNACWa+6ESR57kXstwqPiCut0V8NRpcwgU7A==} + + htmlparser2@8.0.2: + resolution: {integrity: sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==} + + http-cache-semantics@4.2.0: + resolution: {integrity: sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==} + + http-deceiver@1.2.7: + resolution: {integrity: sha512-LmpOGxTfbpgtGVxJrj5k7asXHCgNZp5nLfp+hWc8QQRqtb7fUy6kRY3BO1h9ddF6yIPYUARgxGOwB42DnxIaNw==} + + http-errors@1.8.1: + resolution: {integrity: sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g==} + engines: {node: '>= 0.6'} + + http-errors@2.0.1: + resolution: {integrity: sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==} + engines: {node: '>= 0.8'} + + http-parser-js@0.5.10: + resolution: {integrity: sha512-Pysuw9XpUq5dVc/2SMHpuTY01RFl8fttgcyunjL7eEMhGM3cI4eOmiCycJDVCo/7O7ClfQD3SaI6ftDzqOXYMA==} + + http-proxy-middleware@2.0.9: + resolution: {integrity: sha512-c1IyJYLYppU574+YI7R4QyX2ystMtVXZwIdzazUIPIJsHuWNd+mho2j+bKoHftndicGj9yh+xjd+l0yj7VeT1Q==} + engines: {node: '>=12.0.0'} + peerDependencies: + '@types/express': ^4.17.13 + peerDependenciesMeta: + '@types/express': + optional: true + + http-proxy@1.18.1: + resolution: {integrity: sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==} + engines: {node: '>=8.0.0'} + + http2-wrapper@2.2.1: + resolution: {integrity: sha512-V5nVw1PAOgfI3Lmeaj2Exmeg7fenjhRUgz1lPSezy1CuhPYbgQtbQj4jZfEAEMlaL+vupsvhjqCyjzob0yxsmQ==} + engines: {node: '>=10.19.0'} + + human-id@4.1.3: + resolution: {integrity: sha512-tsYlhAYpjCKa//8rXZ9DqKEawhPoSytweBC2eNvcaDK+57RZLHGqNs3PZTQO6yekLFSuvA6AlnAfrw1uBvtb+Q==} + hasBin: true + + human-signals@2.1.0: + resolution: {integrity: sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==} + engines: {node: '>=10.17.0'} + + hyperdyperid@1.2.0: + resolution: {integrity: sha512-Y93lCzHYgGWdrJ66yIktxiaGULYc6oGiABxhcO5AufBeOyoIdZF7bIfLaOrbM0iGIOXQQgxxRrFEnb+Y6w1n4A==} + engines: {node: '>=10.18'} + + iconv-lite@0.4.24: + resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==} + engines: {node: '>=0.10.0'} + + iconv-lite@0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + + iconv-lite@0.7.2: + resolution: {integrity: sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==} + engines: {node: '>=0.10.0'} + + icss-utils@5.1.0: + resolution: {integrity: sha512-soFhflCVWLfRNOPU3iv5Z9VUdT44xFRbzjLsEzSr5AQmgqPMTHdU3PMT1Cf1ssx8fLNJDA1juftYl+PUcv3MqA==} + engines: {node: ^10 || ^12 || >= 14} + peerDependencies: + postcss: ^8.1.0 + + ignore@5.3.2: + resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} + engines: {node: '>= 4'} + + ignore@7.0.5: + resolution: {integrity: sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg==} + engines: {node: '>= 4'} + + image-size@2.0.2: + resolution: {integrity: sha512-IRqXKlaXwgSMAMtpNzZa1ZAe8m+Sa1770Dhk8VkSsP9LS+iHD62Zd8FQKs8fbPiagBE7BzoFX23cxFnwshpV6w==} + engines: {node: '>=16.x'} + hasBin: true + + immediate@3.3.0: + resolution: {integrity: sha512-HR7EVodfFUdQCTIeySw+WDRFJlPcLOJbXfwwZ7Oom6tjsvZ3bOkCDJHehQC3nxJrv7+f9XecwazynjU8e4Vw3Q==} + + import-fresh@3.3.1: + resolution: {integrity: sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==} + engines: {node: '>=6'} + + import-lazy@4.0.0: + resolution: {integrity: sha512-rKtvo6a868b5Hu3heneU+L4yEQ4jYKLtjpnPeUdK7h0yzXGmyBTypknlkCvHFBqfX9YlorEiMM6Dnq/5atfHkw==} + engines: {node: '>=8'} + + imurmurhash@0.1.4: + resolution: {integrity: sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==} + engines: {node: '>=0.8.19'} + + indent-string@4.0.0: + resolution: {integrity: sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==} + engines: {node: '>=8'} + + infima@0.2.0-alpha.45: + resolution: {integrity: sha512-uyH0zfr1erU1OohLk0fT4Rrb94AOhguWNOcD9uGrSpRvNB+6gZXUoJX5J0NtvzBO10YZ9PgvA4NFgt+fYg8ojw==} + engines: {node: '>=12'} + + inherits@2.0.4: + resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + + ini@1.3.8: + resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} + + ini@2.0.0: + resolution: {integrity: sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==} + engines: {node: '>=10'} + + inline-style-parser@0.2.7: + resolution: {integrity: sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA==} + + internmap@1.0.1: + resolution: {integrity: sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==} + + internmap@2.0.3: + resolution: {integrity: sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==} + engines: {node: '>=12'} + + invariant@2.2.4: + resolution: {integrity: sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA==} + + ipaddr.js@1.9.1: + resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} + engines: {node: '>= 0.10'} + + ipaddr.js@2.3.0: + resolution: {integrity: sha512-Zv/pA+ciVFbCSBBjGfaKUya/CcGmUHzTydLMaTwrUUEM2DIEO3iZvueGxmacvmN50fGpGVKeTXpb2LcYQxeVdg==} + engines: {node: '>= 10'} + + is-alphabetical@2.0.1: + resolution: {integrity: sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==} + + is-alphanumerical@2.0.1: + resolution: {integrity: sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==} + + is-arrayish@0.2.1: + resolution: {integrity: sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==} + + is-binary-path@2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} + + is-ci@3.0.1: + resolution: {integrity: sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ==} + hasBin: true + + is-core-module@2.16.1: + resolution: {integrity: sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==} + engines: {node: '>= 0.4'} + + is-decimal@2.0.1: + resolution: {integrity: sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==} + + is-docker@2.2.1: + resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==} + engines: {node: '>=8'} + hasBin: true + + is-docker@3.0.0: + resolution: {integrity: sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + hasBin: true + + is-extendable@0.1.1: + resolution: {integrity: sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==} + engines: {node: '>=0.10.0'} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-hexadecimal@2.0.1: + resolution: {integrity: sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==} + + is-inside-container@1.0.0: + resolution: {integrity: sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==} + engines: {node: '>=14.16'} + hasBin: true + + is-installed-globally@0.4.0: + resolution: {integrity: sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==} + engines: {node: '>=10'} + + is-network-error@1.3.0: + resolution: {integrity: sha512-6oIwpsgRfnDiyEDLMay/GqCl3HoAtH5+RUKW29gYkL0QA+ipzpDLA16yQs7/RHCSu+BwgbJaOUqa4A99qNVQVw==} + engines: {node: '>=16'} + + is-npm@6.1.0: + resolution: {integrity: sha512-O2z4/kNgyjhQwVR1Wpkbfc19JIhggF97NZNCpWTnjH7kVcZMUrnut9XSN7txI7VdyIYk5ZatOq3zvSuWpU8hoA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + is-obj@1.0.1: + resolution: {integrity: sha512-l4RyHgRqGN4Y3+9JHVrNqO+tN0rV5My76uW5/nuO4K1b6vw5G8d/cmFjP9tRfEsdhZNt0IFdZuK/c2Vr4Nb+Qg==} + engines: {node: '>=0.10.0'} + + is-obj@2.0.0: + resolution: {integrity: sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==} + engines: {node: '>=8'} + + is-path-inside@3.0.3: + resolution: {integrity: sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==} + engines: {node: '>=8'} + + is-plain-obj@3.0.0: + resolution: {integrity: sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==} + engines: {node: '>=10'} + + is-plain-obj@4.1.0: + resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} + engines: {node: '>=12'} + + is-plain-object@2.0.4: + resolution: {integrity: sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==} + engines: {node: '>=0.10.0'} + + is-regexp@1.0.0: + resolution: {integrity: sha512-7zjFAPO4/gwyQAAgRRmqeEeyIICSdmCqa3tsVHMdBzaXXRiqopZL4Cyghg/XulGWrtABTpbnYYzzIRffLkP4oA==} + engines: {node: '>=0.10.0'} + + is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + + is-subdir@1.2.0: + resolution: {integrity: sha512-2AT6j+gXe/1ueqbW6fLZJiIw3F8iXGJtt0yDrZaBhAZEG1raiTxKWU+IPqMCzQAXOUCKdA4UDMgacKH25XG2Cw==} + engines: {node: '>=4'} + + is-typedarray@1.0.0: + resolution: {integrity: sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==} + + is-windows@1.0.2: + resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==} + engines: {node: '>=0.10.0'} + + is-wsl@2.2.0: + resolution: {integrity: sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww==} + engines: {node: '>=8'} + + is-wsl@3.1.0: + resolution: {integrity: sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw==} + engines: {node: '>=16'} + + is-yarn-global@0.4.1: + resolution: {integrity: sha512-/kppl+R+LO5VmhYSEWARUFjodS25D68gvj8W7z0I7OWhUla5xWu8KL6CtB2V0R6yqhnRgbcaREMr4EEM6htLPQ==} + engines: {node: '>=12'} + + isarray@0.0.1: + resolution: {integrity: sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==} + + isarray@1.0.0: + resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + isobject@3.0.1: + resolution: {integrity: sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==} + engines: {node: '>=0.10.0'} + + istanbul-lib-coverage@3.2.2: + resolution: {integrity: sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==} + engines: {node: '>=8'} + + istanbul-lib-report@3.0.1: + resolution: {integrity: sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==} + engines: {node: '>=10'} + + istanbul-lib-source-maps@5.0.6: + resolution: {integrity: sha512-yg2d+Em4KizZC5niWhQaIomgf5WlL4vOOjZ5xGCmF8SnPE/mDWWXgvRExdcpCgh9lLRRa1/fSYp2ymmbJ1pI+A==} + engines: {node: '>=10'} + + istanbul-reports@3.2.0: + resolution: {integrity: sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==} + engines: {node: '>=8'} + + jackspeak@3.4.3: + resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==} + + jest-util@29.7.0: + resolution: {integrity: sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jest-worker@27.5.1: + resolution: {integrity: sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==} + engines: {node: '>= 10.13.0'} + + jest-worker@29.7.0: + resolution: {integrity: sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + + jiti@1.21.7: + resolution: {integrity: sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==} + hasBin: true + + joi@17.13.3: + resolution: {integrity: sha512-otDA4ldcIx+ZXsKHWmp0YizCweVRZG96J10b0FevjfuncLO1oX59THoAmHkNubYJ+9gWsYsp5k8v4ib6oDv1fA==} + + joycon@3.1.1: + resolution: {integrity: sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==} + engines: {node: '>=10'} + + js-tokens@4.0.0: + resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==} + + js-yaml@3.14.2: + resolution: {integrity: sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==} + hasBin: true + + js-yaml@4.1.1: + resolution: {integrity: sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==} + hasBin: true + + jsesc@3.1.0: + resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==} + engines: {node: '>=6'} + hasBin: true + + json-buffer@3.0.1: + resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} + + json-parse-even-better-errors@2.3.1: + resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} + + json-schema-traverse@0.4.1: + resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} + + json-schema-traverse@1.0.0: + resolution: {integrity: sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==} + + json-stable-stringify-without-jsonify@1.0.1: + resolution: {integrity: sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==} + + json5@2.2.3: + resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} + engines: {node: '>=6'} + hasBin: true + + jsonfile@4.0.0: + resolution: {integrity: sha512-m6F1R3z8jjlf2imQHS2Qez5sjKWQzbuuhuJ/FKYFRZvPE3PuHcSMVZzfsLhGVOkfd20obL5SWEBew5ShlquNxg==} + + jsonfile@6.2.0: + resolution: {integrity: sha512-FGuPw30AdOIUTRMC2OMRtQV+jkVj2cfPqSeWXv1NEAJ1qZ5zb1X6z1mFhbfOB/iy3ssJCD+3KuZ8r8C3uVFlAg==} + + katex@0.16.28: + resolution: {integrity: sha512-YHzO7721WbmAL6Ov1uzN/l5mY5WWWhJBSW+jq4tkfZfsxmo1hu6frS0EOswvjBUnWE6NtjEs48SFn5CQESRLZg==} + hasBin: true + + keyv@4.5.4: + resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} + + khroma@2.1.0: + resolution: {integrity: sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==} + + kind-of@6.0.3: + resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} + engines: {node: '>=0.10.0'} + + klaw-sync@6.0.0: + resolution: {integrity: sha512-nIeuVSzdCCs6TDPTqI8w1Yre34sSq7AkZ4B3sfOBbI2CgVSB4Du4aLQijFU2+lhAFCwt9+42Hel6lQNIv6AntQ==} + + kleur@3.0.3: + resolution: {integrity: sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==} + engines: {node: '>=6'} + + langium@3.3.1: + resolution: {integrity: sha512-QJv/h939gDpvT+9SiLVlY7tZC3xB2qK57v0J04Sh9wpMb6MP1q8gB21L3WIo8T5P1MSMg3Ep14L7KkDCFG3y4w==} + engines: {node: '>=16.0.0'} + + latest-version@7.0.0: + resolution: {integrity: sha512-KvNT4XqAMzdcL6ka6Tl3i2lYeFDgXNCuIX+xNx6ZMVR1dFq+idXd9FLKNMOIx0t9mJ9/HudyX4oZWXZQ0UJHeg==} + engines: {node: '>=14.16'} + + launch-editor@2.12.0: + resolution: {integrity: sha512-giOHXoOtifjdHqUamwKq6c49GzBdLjvxrd2D+Q4V6uOHopJv7p9VJxikDsQ/CBXZbEITgUqSVHXLTG3VhPP1Dg==} + + layout-base@1.0.2: + resolution: {integrity: sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==} + + layout-base@2.0.1: + resolution: {integrity: sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==} + + leven@3.1.0: + resolution: {integrity: sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==} + engines: {node: '>=6'} + + levn@0.4.1: + resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} + engines: {node: '>= 0.8.0'} + + lilconfig@3.1.3: + resolution: {integrity: sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==} + engines: {node: '>=14'} + + lines-and-columns@1.2.4: + resolution: {integrity: sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==} + + linkify-it@5.0.0: + resolution: {integrity: sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==} + + load-tsconfig@0.2.5: + resolution: {integrity: sha512-IXO6OCs9yg8tMKzfPZ1YmheJbZCiEsnBdcB03l0OcfK9prKnJb96siuHCr5Fl37/yo9DnKU+TLpxzTUspw9shg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + loader-runner@4.3.1: + resolution: {integrity: sha512-IWqP2SCPhyVFTBtRcgMHdzlf9ul25NwaFx4wCEH/KjAXuuHY4yNjvPXsBokp8jCB936PyWRaPKUNh8NvylLp2Q==} + engines: {node: '>=6.11.5'} + + loader-utils@2.0.4: + resolution: {integrity: sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==} + engines: {node: '>=8.9.0'} + + locate-path@5.0.0: + resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==} + engines: {node: '>=8'} + + locate-path@6.0.0: + resolution: {integrity: sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==} + engines: {node: '>=10'} + + locate-path@7.2.0: + resolution: {integrity: sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + lodash-es@4.17.21: + resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==} + + lodash-es@4.17.23: + resolution: {integrity: sha512-kVI48u3PZr38HdYz98UmfPnXl2DXrpdctLrFLCd3kOx1xUkOmpFPx7gCWWM5MPkL/fD8zb+Ph0QzjGFs4+hHWg==} + + lodash.debounce@4.0.8: + resolution: {integrity: sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==} + + lodash.memoize@4.1.2: + resolution: {integrity: sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==} + + lodash.merge@4.6.2: + resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + + lodash.startcase@4.4.0: + resolution: {integrity: sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==} + + lodash.uniq@4.5.0: + resolution: {integrity: sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==} + + lodash@4.17.23: + resolution: {integrity: sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==} + + longest-streak@3.1.0: + resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==} + + loose-envify@1.4.0: + resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} + hasBin: true + + loupe@3.2.1: + resolution: {integrity: sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==} + + lower-case@2.0.2: + resolution: {integrity: sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==} + + lowercase-keys@3.0.0: + resolution: {integrity: sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + lru-cache@10.4.3: + resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} + + lru-cache@5.1.1: + resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} + + lunr-languages@1.14.0: + resolution: {integrity: sha512-hWUAb2KqM3L7J5bcrngszzISY4BxrXn/Xhbb9TTCJYEGqlR1nG67/M14sp09+PTIRklobrn57IAxcdcO/ZFyNA==} + + lunr@2.3.9: + resolution: {integrity: sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==} + + magic-string@0.30.21: + resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} + + magicast@0.3.5: + resolution: {integrity: sha512-L0WhttDl+2BOsybvEOLK7fW3UA0OQ0IQ2d6Zl2x/a6vVRs3bAY0ECOSHHeL5jD+SbOpOCUEi0y1DgHEn9Qn1AQ==} + + make-dir@4.0.0: + resolution: {integrity: sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==} + engines: {node: '>=10'} + + mark.js@8.11.1: + resolution: {integrity: sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==} + + markdown-extensions@2.0.0: + resolution: {integrity: sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q==} + engines: {node: '>=16'} + + markdown-it@14.0.0: + resolution: {integrity: sha512-seFjF0FIcPt4P9U39Bq1JYblX0KZCjDLFFQPHpL5AzHpqPEKtosxmdq/LTVZnjfH7tjt9BxStm+wXcDBNuYmzw==} + hasBin: true + + markdown-table@2.0.0: + resolution: {integrity: sha512-Ezda85ToJUBhM6WGaG6veasyym+Tbs3cMAw/ZhOPqXiYsr0jgocBV3j3nx+4lk47plLlIqjwuTm/ywVI+zjJ/A==} + + markdown-table@3.0.4: + resolution: {integrity: sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==} + + markdownlint-micromark@0.1.8: + resolution: {integrity: sha512-1ouYkMRo9/6gou9gObuMDnvZM8jC/ly3QCFQyoSPCS2XV1ZClU0xpKbL1Ar3bWWRT1RnBZkWUEiNKrI2CwiBQA==} + engines: {node: '>=16'} + + markdownlint@0.33.0: + resolution: {integrity: sha512-4lbtT14A3m0LPX1WS/3d1m7Blg+ZwiLq36WvjQqFGsX3Gik99NV+VXp/PW3n+Q62xyPdbvGOCfjPqjW+/SKMig==} + engines: {node: '>=18'} + + marked@16.4.2: + resolution: {integrity: sha512-TI3V8YYWvkVf3KJe1dRkpnjs68JUPyEa5vjKrp1XEEJUAOaQc+Qj+L1qWbPd0SJuAdQkFU0h73sXXqwDYxsiDA==} + engines: {node: '>= 20'} + hasBin: true + + math-intrinsics@1.1.0: + resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==} + engines: {node: '>= 0.4'} + + mdast-util-directive@3.1.0: + resolution: {integrity: sha512-I3fNFt+DHmpWCYAT7quoM6lHf9wuqtI+oCOfvILnoicNIqjh5E3dEJWiXuYME2gNe8vl1iMQwyUHa7bgFmak6Q==} + + mdast-util-find-and-replace@3.0.2: + resolution: {integrity: sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==} + + mdast-util-from-markdown@2.0.2: + resolution: {integrity: sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==} + + mdast-util-frontmatter@2.0.1: + resolution: {integrity: sha512-LRqI9+wdgC25P0URIJY9vwocIzCcksduHQ9OF2joxQoyTNVduwLAFUzjoopuRJbJAReaKrNQKAZKL3uCMugWJA==} + + mdast-util-gfm-autolink-literal@2.0.1: + resolution: {integrity: sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==} + + mdast-util-gfm-footnote@2.1.0: + resolution: {integrity: sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==} + + mdast-util-gfm-strikethrough@2.0.0: + resolution: {integrity: sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==} + + mdast-util-gfm-table@2.0.0: + resolution: {integrity: sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==} + + mdast-util-gfm-task-list-item@2.0.0: + resolution: {integrity: sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==} + + mdast-util-gfm@3.1.0: + resolution: {integrity: sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==} + + mdast-util-mdx-expression@2.0.1: + resolution: {integrity: sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==} + + mdast-util-mdx-jsx@3.2.0: + resolution: {integrity: sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q==} + + mdast-util-mdx@3.0.0: + resolution: {integrity: sha512-JfbYLAW7XnYTTbUsmpu0kdBUVe+yKVJZBItEjwyYJiDJuZ9w4eeaqks4HQO+R7objWgS2ymV60GYpI14Ug554w==} + + mdast-util-mdxjs-esm@2.0.1: + resolution: {integrity: sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==} + + mdast-util-phrasing@4.1.0: + resolution: {integrity: sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==} + + mdast-util-to-hast@13.2.1: + resolution: {integrity: sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==} + + mdast-util-to-markdown@2.1.2: + resolution: {integrity: sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==} + + mdast-util-to-string@4.0.0: + resolution: {integrity: sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==} + + mdn-data@2.0.28: + resolution: {integrity: sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==} + + mdn-data@2.0.30: + resolution: {integrity: sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==} + + mdurl@2.0.0: + resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==} + + media-typer@0.3.0: + resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==} + engines: {node: '>= 0.6'} + + memfs@4.56.10: + resolution: {integrity: sha512-eLvzyrwqLHnLYalJP7YZ3wBe79MXktMdfQbvMrVD80K+NhrIukCVBvgP30zTJYEEDh9hZ/ep9z0KOdD7FSHo7w==} + peerDependencies: + tslib: '2' + + merge-descriptors@1.0.3: + resolution: {integrity: sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==} + + merge-stream@2.0.0: + resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} + + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + + mermaid@11.12.2: + resolution: {integrity: sha512-n34QPDPEKmaeCG4WDMGy0OT6PSyxKCfy2pJgShP+Qow2KLrvWjclwbc3yXfSIf4BanqWEhQEpngWwNp/XhZt6w==} + + methods@1.1.2: + resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==} + engines: {node: '>= 0.6'} + + micromark-core-commonmark@2.0.3: + resolution: {integrity: sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==} + + micromark-extension-directive@3.0.2: + resolution: {integrity: sha512-wjcXHgk+PPdmvR58Le9d7zQYWy+vKEU9Se44p2CrCDPiLr2FMyiT4Fyb5UFKFC66wGB3kPlgD7q3TnoqPS7SZA==} + + micromark-extension-frontmatter@2.0.0: + resolution: {integrity: sha512-C4AkuM3dA58cgZha7zVnuVxBhDsbttIMiytjgsM2XbHAB2faRVaHRle40558FBN+DJcrLNCoqG5mlrpdU4cRtg==} + + micromark-extension-gfm-autolink-literal@2.1.0: + resolution: {integrity: sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==} + + micromark-extension-gfm-footnote@2.1.0: + resolution: {integrity: sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==} + + micromark-extension-gfm-strikethrough@2.1.0: + resolution: {integrity: sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==} + + micromark-extension-gfm-table@2.1.1: + resolution: {integrity: sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==} + + micromark-extension-gfm-tagfilter@2.0.0: + resolution: {integrity: sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==} + + micromark-extension-gfm-task-list-item@2.1.0: + resolution: {integrity: sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==} + + micromark-extension-gfm@3.0.0: + resolution: {integrity: sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==} + + micromark-extension-mdx-expression@3.0.1: + resolution: {integrity: sha512-dD/ADLJ1AeMvSAKBwO22zG22N4ybhe7kFIZ3LsDI0GlsNr2A3KYxb0LdC1u5rj4Nw+CHKY0RVdnHX8vj8ejm4Q==} + + micromark-extension-mdx-jsx@3.0.2: + resolution: {integrity: sha512-e5+q1DjMh62LZAJOnDraSSbDMvGJ8x3cbjygy2qFEi7HCeUT4BDKCvMozPozcD6WmOt6sVvYDNBKhFSz3kjOVQ==} + + micromark-extension-mdx-md@2.0.0: + resolution: {integrity: sha512-EpAiszsB3blw4Rpba7xTOUptcFeBFi+6PY8VnJ2hhimH+vCQDirWgsMpz7w1XcZE7LVrSAUGb9VJpG9ghlYvYQ==} + + micromark-extension-mdxjs-esm@3.0.0: + resolution: {integrity: sha512-DJFl4ZqkErRpq/dAPyeWp15tGrcrrJho1hKK5uBS70BCtfrIFg81sqcTVu3Ta+KD1Tk5vAtBNElWxtAa+m8K9A==} + + micromark-extension-mdxjs@3.0.0: + resolution: {integrity: sha512-A873fJfhnJ2siZyUrJ31l34Uqwy4xIFmvPY1oj+Ean5PHcPBYzEsvqvWGaWcfEIr11O5Dlw3p2y0tZWpKHDejQ==} + + micromark-factory-destination@2.0.1: + resolution: {integrity: sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==} + + micromark-factory-label@2.0.1: + resolution: {integrity: sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==} + + micromark-factory-mdx-expression@2.0.3: + resolution: {integrity: sha512-kQnEtA3vzucU2BkrIa8/VaSAsP+EJ3CKOvhMuJgOEGg9KDC6OAY6nSnNDVRiVNRqj7Y4SlSzcStaH/5jge8JdQ==} + + micromark-factory-space@1.1.0: + resolution: {integrity: sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ==} + + micromark-factory-space@2.0.1: + resolution: {integrity: sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==} + + micromark-factory-title@2.0.1: + resolution: {integrity: sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==} + + micromark-factory-whitespace@2.0.1: + resolution: {integrity: sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==} + + micromark-util-character@1.2.0: + resolution: {integrity: sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==} + + micromark-util-character@2.1.1: + resolution: {integrity: sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==} + + micromark-util-chunked@2.0.1: + resolution: {integrity: sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==} + + micromark-util-classify-character@2.0.1: + resolution: {integrity: sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==} + + micromark-util-combine-extensions@2.0.1: + resolution: {integrity: sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==} + + micromark-util-decode-numeric-character-reference@2.0.2: + resolution: {integrity: sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==} + + micromark-util-decode-string@2.0.1: + resolution: {integrity: sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==} + + micromark-util-encode@2.0.1: + resolution: {integrity: sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==} + + micromark-util-events-to-acorn@2.0.3: + resolution: {integrity: sha512-jmsiEIiZ1n7X1Rr5k8wVExBQCg5jy4UXVADItHmNk1zkwEVhBuIUKRu3fqv+hs4nxLISi2DQGlqIOGiFxgbfHg==} + + micromark-util-html-tag-name@2.0.1: + resolution: {integrity: sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==} + + micromark-util-normalize-identifier@2.0.1: + resolution: {integrity: sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==} + + micromark-util-resolve-all@2.0.1: + resolution: {integrity: sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==} + + micromark-util-sanitize-uri@2.0.1: + resolution: {integrity: sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==} + + micromark-util-subtokenize@2.1.0: + resolution: {integrity: sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==} + + micromark-util-symbol@1.1.0: + resolution: {integrity: sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==} + + micromark-util-symbol@2.0.1: + resolution: {integrity: sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==} + + micromark-util-types@1.1.0: + resolution: {integrity: sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==} + + micromark-util-types@2.0.2: + resolution: {integrity: sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==} + + micromark@4.0.2: + resolution: {integrity: sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==} + + micromatch@4.0.8: + resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==} + engines: {node: '>=8.6'} + + mime-db@1.33.0: + resolution: {integrity: sha512-BHJ/EKruNIqJf/QahvxwQZXKygOQ256myeN/Ew+THcAa5q+PjyTTMMeNQC4DZw5AwfvelsUrA6B67NKMqXDbzQ==} + engines: {node: '>= 0.6'} + + mime-db@1.52.0: + resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} + engines: {node: '>= 0.6'} + + mime-db@1.54.0: + resolution: {integrity: sha512-aU5EJuIN2WDemCcAp2vFBfp/m4EAhWJnUNSSw0ixs7/kXbd6Pg64EmwJkNdFhB8aWt1sH2CTXrLxo/iAGV3oPQ==} + engines: {node: '>= 0.6'} + + mime-types@2.1.18: + resolution: {integrity: sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==} + engines: {node: '>= 0.6'} + + mime-types@2.1.35: + resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} + engines: {node: '>= 0.6'} + + mime-types@3.0.2: + resolution: {integrity: sha512-Lbgzdk0h4juoQ9fCKXW4by0UJqj+nOOrI9MJ1sSj4nI8aI2eo1qmvQEie4VD1glsS250n15LsWsYtCugiStS5A==} + engines: {node: '>=18'} + + mime@1.6.0: + resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==} + engines: {node: '>=4'} + hasBin: true + + mimic-fn@2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + + mimic-response@3.1.0: + resolution: {integrity: sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==} + engines: {node: '>=10'} + + mimic-response@4.0.0: + resolution: {integrity: sha512-e5ISH9xMYU0DzrT+jl8q2ze9D6eWBto+I8CNpe+VI+K2J/F/k3PdkdTdz4wvGVH4NTpo+NRYTVIuMQEMMcsLqg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + mini-css-extract-plugin@2.10.0: + resolution: {integrity: sha512-540P2c5dYnJlyJxTaSloliZexv8rji6rY8FhQN+WF/82iHQfA23j/xtJx97L+mXOML27EqksSek/g4eK7jaL3g==} + engines: {node: '>= 12.13.0'} + peerDependencies: + webpack: ^5.0.0 + + minimalistic-assert@1.0.1: + resolution: {integrity: sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==} + + minimatch@3.1.2: + resolution: {integrity: sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==} + + minimatch@9.0.5: + resolution: {integrity: sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==} + engines: {node: '>=16 || 14 >=14.17'} + + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + + minipass@7.1.2: + resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==} + engines: {node: '>=16 || 14 >=14.17'} + + mlly@1.8.0: + resolution: {integrity: sha512-l8D9ODSRWLe2KHJSifWGwBqpTZXIXTeo8mlKjY+E2HAakaTeNpqAyBZ8GSqLzHgw4XmHmC8whvpjJNMbFZN7/g==} + + mri@1.2.0: + resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} + engines: {node: '>=4'} + + mrmime@2.0.1: + resolution: {integrity: sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==} + engines: {node: '>=10'} + + ms@2.0.0: + resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} + + ms@2.1.3: + resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + + multicast-dns@7.2.5: + resolution: {integrity: sha512-2eznPJP8z2BFLX50tf0LuODrpINqP1RVIm/CObbTcBRITQgmC/TjcREF1NeTBzIcR5XO/ukWo+YHOjBbFwIupg==} + hasBin: true + + mz@2.7.0: + resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} + + nanoid@3.3.11: + resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + natural-compare@1.4.0: + resolution: {integrity: sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==} + + negotiator@0.6.3: + resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==} + engines: {node: '>= 0.6'} + + negotiator@0.6.4: + resolution: {integrity: sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==} + engines: {node: '>= 0.6'} + + neo-async@2.6.2: + resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} + + no-case@3.0.4: + resolution: {integrity: sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==} + + node-emoji@2.2.0: + resolution: {integrity: sha512-Z3lTE9pLaJF47NyMhd4ww1yFTAP8YhYI8SleJiHzM46Fgpm5cnNzSl9XfzFNqbaz+VlJrIj3fXQ4DeN1Rjm6cw==} + engines: {node: '>=18'} + + node-releases@2.0.27: + resolution: {integrity: sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==} + + normalize-path@3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + + normalize-url@8.1.1: + resolution: {integrity: sha512-JYc0DPlpGWB40kH5g07gGTrYuMqV653k3uBKY6uITPWds3M0ov3GaWGp9lbE3Bzngx8+XkfzgvASb9vk9JDFXQ==} + engines: {node: '>=14.16'} + + npm-run-path@4.0.1: + resolution: {integrity: sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==} + engines: {node: '>=8'} + + nprogress@0.2.0: + resolution: {integrity: sha512-I19aIingLgR1fmhftnbWWO3dXc0hSxqHQHQb3H8m+K3TnEn/iSeTZZOyvKXWqQESMwuUVnatlCnZdLBZZt2VSA==} + + nth-check@2.1.1: + resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} + + null-loader@4.0.1: + resolution: {integrity: sha512-pxqVbi4U6N26lq+LmgIbB5XATP0VdZKOG25DhHi8btMmJJefGArFyDg1yc4U3hWCJbMqSrw0qyrz1UQX+qYXqg==} + engines: {node: '>= 10.13.0'} + peerDependencies: + webpack: ^4.0.0 || ^5.0.0 + + object-assign@4.1.1: + resolution: {integrity: sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==} + engines: {node: '>=0.10.0'} + + object-inspect@1.13.4: + resolution: {integrity: sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==} + engines: {node: '>= 0.4'} + + object-keys@1.1.1: + resolution: {integrity: sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==} + engines: {node: '>= 0.4'} + + object.assign@4.1.7: + resolution: {integrity: sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==} + engines: {node: '>= 0.4'} + + obuf@1.1.2: + resolution: {integrity: sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==} + + on-finished@2.4.1: + resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==} + engines: {node: '>= 0.8'} + + on-headers@1.1.0: + resolution: {integrity: sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==} + engines: {node: '>= 0.8'} + + onetime@5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + + open@10.2.0: + resolution: {integrity: sha512-YgBpdJHPyQ2UE5x+hlSXcnejzAvD0b22U2OuAP+8OnlJT+PjWPxtgmGqKKc+RgTM63U9gN0YzrYc71R2WT/hTA==} + engines: {node: '>=18'} + + open@8.4.2: + resolution: {integrity: sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==} + engines: {node: '>=12'} + + openapi-fetch@0.10.6: + resolution: {integrity: sha512-6xXfvIEL/POtLGOaFPsp3O+pDe+J3DZYxbD9BrsQHXOTeNK8z/gsWHT6adUy1KcpQOhmkerMzlQrJM6DbN55dQ==} + + openapi-typescript-helpers@0.0.11: + resolution: {integrity: sha512-xofUHlVFq+BMquf3nh9I8N2guHckW6mrDO/F3kaFgrL7MGbjldDnQ9TIT+rkH/+H0LiuO+RuZLnNmsJwsjwUKg==} + + openapi-typescript@6.7.6: + resolution: {integrity: sha512-c/hfooPx+RBIOPM09GSxABOZhYPblDoyaGhqBkD/59vtpN21jEuWKDlM0KYTvqJVlSYjKs0tBcIdeXKChlSPtw==} + hasBin: true + + opener@1.5.2: + resolution: {integrity: sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==} + hasBin: true + + optionator@0.9.4: + resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} + engines: {node: '>= 0.8.0'} + + outdent@0.5.0: + resolution: {integrity: sha512-/jHxFIzoMXdqPzTaCpFzAAWhpkSjZPF4Vsn6jAfNpmbH/ymsmd7Qc6VE9BGn0L6YMj6uwpQLxCECpus4ukKS9Q==} + + p-cancelable@3.0.0: + resolution: {integrity: sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw==} + engines: {node: '>=12.20'} + + p-filter@2.1.0: + resolution: {integrity: sha512-ZBxxZ5sL2HghephhpGAQdoskxplTwr7ICaehZwLIlfL6acuVgZPm8yBNuRAFBGEqtD/hmUeq9eqLg2ys9Xr/yw==} + engines: {node: '>=8'} + + p-finally@1.0.0: + resolution: {integrity: sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==} + engines: {node: '>=4'} + + p-limit@2.3.0: + resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==} + engines: {node: '>=6'} + + p-limit@3.1.0: + resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} + engines: {node: '>=10'} + + p-limit@4.0.0: + resolution: {integrity: sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + p-locate@4.1.0: + resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==} + engines: {node: '>=8'} + + p-locate@5.0.0: + resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} + engines: {node: '>=10'} + + p-locate@6.0.0: + resolution: {integrity: sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + p-map@2.1.0: + resolution: {integrity: sha512-y3b8Kpd8OAN444hxfBbFfj1FY/RjtTd8tzYwhUqNYXx0fXx2iX4maP4Qr6qhIKbQXI02wTLAda4fYUbDagTUFw==} + engines: {node: '>=6'} + + p-map@4.0.0: + resolution: {integrity: sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==} + engines: {node: '>=10'} + + p-queue@6.6.2: + resolution: {integrity: sha512-RwFpb72c/BhQLEXIZ5K2e+AhgNVmIejGlTgiB9MzZ0e93GRvqZ7uSi0dvRF7/XIXDeNkra2fNHBxTyPDGySpjQ==} + engines: {node: '>=8'} + + p-retry@6.2.1: + resolution: {integrity: sha512-hEt02O4hUct5wtwg4H4KcWgDdm+l1bOaEy/hWzd8xtXB9BqxTWBBhb+2ImAtH4Cv4rPjV76xN3Zumqk3k3AhhQ==} + engines: {node: '>=16.17'} + + p-timeout@3.2.0: + resolution: {integrity: sha512-rhIwUycgwwKcP9yTOOFK/AKsAopjjCakVqLHePO3CC6Mir1Z99xT+R63jZxAT5lFZLa2inS5h+ZS2GvR99/FBg==} + engines: {node: '>=8'} + + p-try@2.2.0: + resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==} + engines: {node: '>=6'} + + package-json-from-dist@1.0.1: + resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} + + package-json@8.1.1: + resolution: {integrity: sha512-cbH9IAIJHNj9uXi196JVsRlt7cHKak6u/e6AkL/bkRelZ7rlL3X1YKxsZwa36xipOEKAsdtmaG6aAJoM1fx2zA==} + engines: {node: '>=14.16'} + + package-manager-detector@0.2.11: + resolution: {integrity: sha512-BEnLolu+yuz22S56CU1SUKq3XC3PkwD5wv4ikR4MfGvnRVcmzXR9DwSlW2fEamyTPyXHomBJRzgapeuBvRNzJQ==} + + package-manager-detector@1.6.0: + resolution: {integrity: sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA==} + + param-case@3.0.4: + resolution: {integrity: sha512-RXlj7zCYokReqWpOPH9oYivUzLYZ5vAPIfEmCTNViosC78F8F0H9y7T7gG2M39ymgutxF5gcFEsyZQSph9Bp3A==} + + parent-module@1.0.1: + resolution: {integrity: sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==} + engines: {node: '>=6'} + + parse-entities@4.0.2: + resolution: {integrity: sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw==} + + parse-json@5.2.0: + resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} + engines: {node: '>=8'} + + parse-numeric-range@1.3.0: + resolution: {integrity: sha512-twN+njEipszzlMJd4ONUYgSfZPDxgHhT9Ahed5uTigpQn90FggW4SA/AIPq/6a149fTbE9qBEcSwE3FAEp6wQQ==} + + parse5-htmlparser2-tree-adapter@7.1.0: + resolution: {integrity: sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g==} + + parse5-parser-stream@7.1.2: + resolution: {integrity: sha512-JyeQc9iwFLn5TbvvqACIF/VXG6abODeB3Fwmv/TGdLk2LfbWkaySGY72at4+Ty7EkPZj854u4CrICqNk2qIbow==} + + parse5@7.3.0: + resolution: {integrity: sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==} + + parseurl@1.3.3: + resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} + engines: {node: '>= 0.8'} + + pascal-case@3.1.2: + resolution: {integrity: sha512-uWlGT3YSnK9x3BQJaOdcZwrnV6hPpd8jFH1/ucpiLRPh/2zCVJKS19E4GvYHvaCcACn3foXZ0cLB9Wrx1KGe5g==} + + path-data-parser@0.1.0: + resolution: {integrity: sha512-NOnmBpt5Y2RWbuv0LMzsayp3lVylAHLPUTut412ZA3l+C4uw4ZVkQbjShYCQ8TCpUMdPapr4YjUqLYD6v68j+w==} + + path-exists@4.0.0: + resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==} + engines: {node: '>=8'} + + path-exists@5.0.0: + resolution: {integrity: sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + path-is-inside@1.0.2: + resolution: {integrity: sha512-DUWJr3+ULp4zXmol/SZkFf3JGsS9/SIv+Y3Rt93/UjPpDpklB5f1er4O3POIbUuUJ3FXgqte2Q7SrU6zAqwk8w==} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-parse@1.0.7: + resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + + path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} + + path-to-regexp@0.1.12: + resolution: {integrity: sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==} + + path-to-regexp@1.9.0: + resolution: {integrity: sha512-xIp7/apCFJuUHdDLWe8O1HIkb0kQrOMb/0u6FXQjemHn/ii5LrIzU6bdECnsiTF/GjZkMEKg1xdiZwNqDYlZ6g==} + + path-to-regexp@3.3.0: + resolution: {integrity: sha512-qyCH421YQPS2WFDxDjftfc1ZR5WKQzVzqsp4n9M2kQhVOo/ByahFoUNJfl58kOcEGfQ//7weFTDhm+ss8Ecxgw==} + + path-type@4.0.0: + resolution: {integrity: sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==} + engines: {node: '>=8'} + + pathe@1.1.2: + resolution: {integrity: sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==} + + pathe@2.0.3: + resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} + + pathval@2.0.1: + resolution: {integrity: sha512-//nshmD55c46FuFw26xV/xFAaB5HF9Xdap7HJBBnrKdAd6/GxDBaNA1870O79+9ueg61cZLSVc+OaFlfmObYVQ==} + engines: {node: '>= 14.16'} + + picocolors@1.1.1: + resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + picomatch@4.0.3: + resolution: {integrity: sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==} + engines: {node: '>=12'} + + pify@4.0.1: + resolution: {integrity: sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==} + engines: {node: '>=6'} + + pirates@4.0.7: + resolution: {integrity: sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==} + engines: {node: '>= 6'} + + pkg-dir@7.0.0: + resolution: {integrity: sha512-Ie9z/WINcxxLp27BKOCHGde4ITq9UklYKDzVo1nhk5sqGEXU3FpkwP5GM2voTGJkGd9B3Otl+Q4uwSOeSUtOBA==} + engines: {node: '>=14.16'} + + pkg-types@1.3.1: + resolution: {integrity: sha512-/Jm5M4RvtBFVkKWRu2BLUTNP8/M2a+UwuAX+ae4770q1qVGtfjG+WTCupoZixokjmHiry8uI+dlY8KXYV5HVVQ==} + + pkijs@3.3.3: + resolution: {integrity: sha512-+KD8hJtqQMYoTuL1bbGOqxb4z+nZkTAwVdNtWwe8Tc2xNbEmdJYIYoc6Qt0uF55e6YW6KuTHw1DjQ18gMhzepw==} + engines: {node: '>=16.0.0'} + + points-on-curve@0.2.0: + resolution: {integrity: sha512-0mYKnYYe9ZcqMCWhUjItv/oHjvgEsfKvnUTg8sAtnHr3GVy7rGkXCb6d5cSyqrWqL4k81b9CPg3urd+T7aop3A==} + + points-on-path@0.2.1: + resolution: {integrity: sha512-25ClnWWuw7JbWZcgqY/gJ4FQWadKxGWk+3kR/7kD0tCaDtPPMj7oHu2ToLaVhfpnHrZzYby2w6tUA0eOIuUg8g==} + + postcss-attribute-case-insensitive@7.0.1: + resolution: {integrity: sha512-Uai+SupNSqzlschRyNx3kbCTWgY/2hcwtHEI/ej2LJWc9JJ77qKgGptd8DHwY1mXtZ7Aoh4z4yxfwMBue9eNgw==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-calc@9.0.1: + resolution: {integrity: sha512-TipgjGyzP5QzEhsOZUaIkeO5mKeMFpebWzRogWG/ysonUlnHcq5aJe0jOjpfzUU8PeSaBQnrE8ehR0QA5vs8PQ==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.2.2 + + postcss-clamp@4.1.0: + resolution: {integrity: sha512-ry4b1Llo/9zz+PKC+030KUnPITTJAHeOwjfAyyB60eT0AorGLdzp52s31OsPRHRf8NchkgFoG2y6fCfn1IV1Ow==} + engines: {node: '>=7.6.0'} + peerDependencies: + postcss: ^8.4.6 + + postcss-color-functional-notation@7.0.12: + resolution: {integrity: sha512-TLCW9fN5kvO/u38/uesdpbx3e8AkTYhMvDZYa9JpmImWuTE99bDQ7GU7hdOADIZsiI9/zuxfAJxny/khknp1Zw==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-color-hex-alpha@10.0.0: + resolution: {integrity: sha512-1kervM2cnlgPs2a8Vt/Qbe5cQ++N7rkYo/2rz2BkqJZIHQwaVuJgQH38REHrAi4uM0b1fqxMkWYmese94iMp3w==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-color-rebeccapurple@10.0.0: + resolution: {integrity: sha512-JFta737jSP+hdAIEhk1Vs0q0YF5P8fFcj+09pweS8ktuGuZ8pPlykHsk6mPxZ8awDl4TrcxUqJo9l1IhVr/OjQ==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-colormin@6.1.0: + resolution: {integrity: sha512-x9yX7DOxeMAR+BgGVnNSAxmAj98NX/YxEMNFP+SDCEeNLb2r3i6Hh1ksMsnW8Ub5SLCpbescQqn9YEbE9554Sw==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-convert-values@6.1.0: + resolution: {integrity: sha512-zx8IwP/ts9WvUM6NkVSkiU902QZL1bwPhaVaLynPtCsOTqp+ZKbNi+s6XJg3rfqpKGA/oc7Oxk5t8pOQJcwl/w==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-custom-media@11.0.6: + resolution: {integrity: sha512-C4lD4b7mUIw+RZhtY7qUbf4eADmb7Ey8BFA2px9jUbwg7pjTZDl4KY4bvlUV+/vXQvzQRfiGEVJyAbtOsCMInw==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-custom-properties@14.0.6: + resolution: {integrity: sha512-fTYSp3xuk4BUeVhxCSJdIPhDLpJfNakZKoiTDx7yRGCdlZrSJR7mWKVOBS4sBF+5poPQFMj2YdXx1VHItBGihQ==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-custom-selectors@8.0.5: + resolution: {integrity: sha512-9PGmckHQswiB2usSO6XMSswO2yFWVoCAuih1yl9FVcwkscLjRKjwsjM3t+NIWpSU2Jx3eOiK2+t4vVTQaoCHHg==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-dir-pseudo-class@9.0.1: + resolution: {integrity: sha512-tRBEK0MHYvcMUrAuYMEOa0zg9APqirBcgzi6P21OhxtJyJADo/SWBwY1CAwEohQ/6HDaa9jCjLRG7K3PVQYHEA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-discard-comments@6.0.2: + resolution: {integrity: sha512-65w/uIqhSBBfQmYnG92FO1mWZjJ4GL5b8atm5Yw2UgrwD7HiNiSSNwJor1eCFGzUgYnN/iIknhNRVqjrrpuglw==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-discard-duplicates@6.0.3: + resolution: {integrity: sha512-+JA0DCvc5XvFAxwx6f/e68gQu/7Z9ud584VLmcgto28eB8FqSFZwtrLwB5Kcp70eIoWP/HXqz4wpo8rD8gpsTw==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-discard-empty@6.0.3: + resolution: {integrity: sha512-znyno9cHKQsK6PtxL5D19Fj9uwSzC2mB74cpT66fhgOadEUPyXFkbgwm5tvc3bt3NAy8ltE5MrghxovZRVnOjQ==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-discard-overridden@6.0.2: + resolution: {integrity: sha512-j87xzI4LUggC5zND7KdjsI25APtyMuynXZSujByMaav2roV6OZX+8AaCUcZSWqckZpjAjRyFDdpqybgjFO0HJQ==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-discard-unused@6.0.5: + resolution: {integrity: sha512-wHalBlRHkaNnNwfC8z+ppX57VhvS+HWgjW508esjdaEYr3Mx7Gnn2xA4R/CKf5+Z9S5qsqC+Uzh4ueENWwCVUA==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-double-position-gradients@6.0.4: + resolution: {integrity: sha512-m6IKmxo7FxSP5nF2l63QbCC3r+bWpFUWmZXZf096WxG0m7Vl1Q1+ruFOhpdDRmKrRS+S3Jtk+TVk/7z0+BVK6g==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-focus-visible@10.0.1: + resolution: {integrity: sha512-U58wyjS/I1GZgjRok33aE8juW9qQgQUNwTSdxQGuShHzwuYdcklnvK/+qOWX1Q9kr7ysbraQ6ht6r+udansalA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-focus-within@9.0.1: + resolution: {integrity: sha512-fzNUyS1yOYa7mOjpci/bR+u+ESvdar6hk8XNK/TRR0fiGTp2QT5N+ducP0n3rfH/m9I7H/EQU6lsa2BrgxkEjw==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-font-variant@5.0.0: + resolution: {integrity: sha512-1fmkBaCALD72CK2a9i468mA/+tr9/1cBxRRMXOUaZqO43oWPR5imcyPjXwuv7PXbCid4ndlP5zWhidQVVa3hmA==} + peerDependencies: + postcss: ^8.1.0 + + postcss-gap-properties@6.0.0: + resolution: {integrity: sha512-Om0WPjEwiM9Ru+VhfEDPZJAKWUd0mV1HmNXqp2C29z80aQ2uP9UVhLc7e3aYMIor/S5cVhoPgYQ7RtfeZpYTRw==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-image-set-function@7.0.0: + resolution: {integrity: sha512-QL7W7QNlZuzOwBTeXEmbVckNt1FSmhQtbMRvGGqqU4Nf4xk6KUEQhAoWuMzwbSv5jxiRiSZ5Tv7eiDB9U87znA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-lab-function@7.0.12: + resolution: {integrity: sha512-tUcyRk1ZTPec3OuKFsqtRzW2Go5lehW29XA21lZ65XmzQkz43VY2tyWEC202F7W3mILOjw0voOiuxRGTsN+J9w==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-load-config@6.0.1: + resolution: {integrity: sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==} + engines: {node: '>= 18'} + peerDependencies: + jiti: '>=1.21.0' + postcss: '>=8.0.9' + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + jiti: + optional: true + postcss: + optional: true + tsx: + optional: true + yaml: + optional: true + + postcss-loader@7.3.4: + resolution: {integrity: sha512-iW5WTTBSC5BfsBJ9daFMPVrLT36MrNiC6fqOZTTaHjBNX6Pfd5p+hSBqe/fEeNd7pc13QiAyGt7VdGMw4eRC4A==} + engines: {node: '>= 14.15.0'} + peerDependencies: + postcss: ^7.0.0 || ^8.0.1 + webpack: ^5.0.0 + + postcss-logical@8.1.0: + resolution: {integrity: sha512-pL1hXFQ2fEXNKiNiAgtfA005T9FBxky5zkX6s4GZM2D8RkVgRqz3f4g1JUoq925zXv495qk8UNldDwh8uGEDoA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-merge-idents@6.0.3: + resolution: {integrity: sha512-1oIoAsODUs6IHQZkLQGO15uGEbK3EAl5wi9SS8hs45VgsxQfMnxvt+L+zIr7ifZFIH14cfAeVe2uCTa+SPRa3g==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-merge-longhand@6.0.5: + resolution: {integrity: sha512-5LOiordeTfi64QhICp07nzzuTDjNSO8g5Ksdibt44d+uvIIAE1oZdRn8y/W5ZtYgRH/lnLDlvi9F8btZcVzu3w==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-merge-rules@6.1.1: + resolution: {integrity: sha512-KOdWF0gju31AQPZiD+2Ar9Qjowz1LTChSjFFbS+e2sFgc4uHOp3ZvVX4sNeTlk0w2O31ecFGgrFzhO0RSWbWwQ==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-minify-font-values@6.1.0: + resolution: {integrity: sha512-gklfI/n+9rTh8nYaSJXlCo3nOKqMNkxuGpTn/Qm0gstL3ywTr9/WRKznE+oy6fvfolH6dF+QM4nCo8yPLdvGJg==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-minify-gradients@6.0.3: + resolution: {integrity: sha512-4KXAHrYlzF0Rr7uc4VrfwDJ2ajrtNEpNEuLxFgwkhFZ56/7gaE4Nr49nLsQDZyUe+ds+kEhf+YAUolJiYXF8+Q==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-minify-params@6.1.0: + resolution: {integrity: sha512-bmSKnDtyyE8ujHQK0RQJDIKhQ20Jq1LYiez54WiaOoBtcSuflfK3Nm596LvbtlFcpipMjgClQGyGr7GAs+H1uA==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-minify-selectors@6.0.4: + resolution: {integrity: sha512-L8dZSwNLgK7pjTto9PzWRoMbnLq5vsZSTu8+j1P/2GB8qdtGQfn+K1uSvFgYvgh83cbyxT5m43ZZhUMTJDSClQ==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-modules-extract-imports@3.1.0: + resolution: {integrity: sha512-k3kNe0aNFQDAZGbin48pL2VNidTF0w4/eASDsxlyspobzU3wZQLOGj7L9gfRe0Jo9/4uud09DsjFNH7winGv8Q==} + engines: {node: ^10 || ^12 || >= 14} + peerDependencies: + postcss: ^8.1.0 + + postcss-modules-local-by-default@4.2.0: + resolution: {integrity: sha512-5kcJm/zk+GJDSfw+V/42fJ5fhjL5YbFDl8nVdXkJPLLW+Vf9mTD5Xe0wqIaDnLuL2U6cDNpTr+UQ+v2HWIBhzw==} + engines: {node: ^10 || ^12 || >= 14} + peerDependencies: + postcss: ^8.1.0 + + postcss-modules-scope@3.2.1: + resolution: {integrity: sha512-m9jZstCVaqGjTAuny8MdgE88scJnCiQSlSrOWcTQgM2t32UBe+MUmFSO5t7VMSfAf/FJKImAxBav8ooCHJXCJA==} + engines: {node: ^10 || ^12 || >= 14} + peerDependencies: + postcss: ^8.1.0 + + postcss-modules-values@4.0.0: + resolution: {integrity: sha512-RDxHkAiEGI78gS2ofyvCsu7iycRv7oqw5xMWn9iMoR0N/7mf9D50ecQqUo5BZ9Zh2vH4bCUR/ktCqbB9m8vJjQ==} + engines: {node: ^10 || ^12 || >= 14} + peerDependencies: + postcss: ^8.1.0 + + postcss-nesting@13.0.2: + resolution: {integrity: sha512-1YCI290TX+VP0U/K/aFxzHzQWHWURL+CtHMSbex1lCdpXD1SoR2sYuxDu5aNI9lPoXpKTCggFZiDJbwylU0LEQ==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-normalize-charset@6.0.2: + resolution: {integrity: sha512-a8N9czmdnrjPHa3DeFlwqst5eaL5W8jYu3EBbTTkI5FHkfMhFZh1EGbku6jhHhIzTA6tquI2P42NtZ59M/H/kQ==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-normalize-display-values@6.0.2: + resolution: {integrity: sha512-8H04Mxsb82ON/aAkPeq8kcBbAtI5Q2a64X/mnRRfPXBq7XeogoQvReqxEfc0B4WPq1KimjezNC8flUtC3Qz6jg==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-normalize-positions@6.0.2: + resolution: {integrity: sha512-/JFzI441OAB9O7VnLA+RtSNZvQ0NCFZDOtp6QPFo1iIyawyXg0YI3CYM9HBy1WvwCRHnPep/BvI1+dGPKoXx/Q==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-normalize-repeat-style@6.0.2: + resolution: {integrity: sha512-YdCgsfHkJ2jEXwR4RR3Tm/iOxSfdRt7jplS6XRh9Js9PyCR/aka/FCb6TuHT2U8gQubbm/mPmF6L7FY9d79VwQ==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-normalize-string@6.0.2: + resolution: {integrity: sha512-vQZIivlxlfqqMp4L9PZsFE4YUkWniziKjQWUtsxUiVsSSPelQydwS8Wwcuw0+83ZjPWNTl02oxlIvXsmmG+CiQ==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-normalize-timing-functions@6.0.2: + resolution: {integrity: sha512-a+YrtMox4TBtId/AEwbA03VcJgtyW4dGBizPl7e88cTFULYsprgHWTbfyjSLyHeBcK/Q9JhXkt2ZXiwaVHoMzA==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-normalize-unicode@6.1.0: + resolution: {integrity: sha512-QVC5TQHsVj33otj8/JD869Ndr5Xcc/+fwRh4HAsFsAeygQQXm+0PySrKbr/8tkDKzW+EVT3QkqZMfFrGiossDg==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-normalize-url@6.0.2: + resolution: {integrity: sha512-kVNcWhCeKAzZ8B4pv/DnrU1wNh458zBNp8dh4y5hhxih5RZQ12QWMuQrDgPRw3LRl8mN9vOVfHl7uhvHYMoXsQ==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-normalize-whitespace@6.0.2: + resolution: {integrity: sha512-sXZ2Nj1icbJOKmdjXVT9pnyHQKiSAyuNQHSgRCUgThn2388Y9cGVDR+E9J9iAYbSbLHI+UUwLVl1Wzco/zgv0Q==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-opacity-percentage@3.0.0: + resolution: {integrity: sha512-K6HGVzyxUxd/VgZdX04DCtdwWJ4NGLG212US4/LA1TLAbHgmAsTWVR86o+gGIbFtnTkfOpb9sCRBx8K7HO66qQ==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-ordered-values@6.0.2: + resolution: {integrity: sha512-VRZSOB+JU32RsEAQrO94QPkClGPKJEL/Z9PCBImXMhIeK5KAYo6slP/hBYlLgrCjFxyqvn5VC81tycFEDBLG1Q==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-overflow-shorthand@6.0.0: + resolution: {integrity: sha512-BdDl/AbVkDjoTofzDQnwDdm/Ym6oS9KgmO7Gr+LHYjNWJ6ExORe4+3pcLQsLA9gIROMkiGVjjwZNoL/mpXHd5Q==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-page-break@3.0.4: + resolution: {integrity: sha512-1JGu8oCjVXLa9q9rFTo4MbeeA5FMe00/9C7lN4va606Rdb+HkxXtXsmEDrIraQ11fGz/WvKWa8gMuCKkrXpTsQ==} + peerDependencies: + postcss: ^8 + + postcss-place@10.0.0: + resolution: {integrity: sha512-5EBrMzat2pPAxQNWYavwAfoKfYcTADJ8AXGVPcUZ2UkNloUTWzJQExgrzrDkh3EKzmAx1evfTAzF9I8NGcc+qw==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-preset-env@10.6.1: + resolution: {integrity: sha512-yrk74d9EvY+W7+lO9Aj1QmjWY9q5NsKjK2V9drkOPZB/X6KZ0B3igKsHUYakb7oYVhnioWypQX3xGuePf89f3g==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-pseudo-class-any-link@10.0.1: + resolution: {integrity: sha512-3el9rXlBOqTFaMFkWDOkHUTQekFIYnaQY55Rsp8As8QQkpiSgIYEcF/6Ond93oHiDsGb4kad8zjt+NPlOC1H0Q==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-reduce-idents@6.0.3: + resolution: {integrity: sha512-G3yCqZDpsNPoQgbDUy3T0E6hqOQ5xigUtBQyrmq3tn2GxlyiL0yyl7H+T8ulQR6kOcHJ9t7/9H4/R2tv8tJbMA==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-reduce-initial@6.1.0: + resolution: {integrity: sha512-RarLgBK/CrL1qZags04oKbVbrrVK2wcxhvta3GCxrZO4zveibqbRPmm2VI8sSgCXwoUHEliRSbOfpR0b/VIoiw==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-reduce-transforms@6.0.2: + resolution: {integrity: sha512-sB+Ya++3Xj1WaT9+5LOOdirAxP7dJZms3GRcYheSPi1PiTMigsxHAdkrbItHxwYHr4kt1zL7mmcHstgMYT+aiA==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-replace-overflow-wrap@4.0.0: + resolution: {integrity: sha512-KmF7SBPphT4gPPcKZc7aDkweHiKEEO8cla/GjcBK+ckKxiZslIu3C4GCRW3DNfL0o7yW7kMQu9xlZ1kXRXLXtw==} + peerDependencies: + postcss: ^8.0.3 + + postcss-selector-not@8.0.1: + resolution: {integrity: sha512-kmVy/5PYVb2UOhy0+LqUYAhKj7DUGDpSWa5LZqlkWJaaAV+dxxsOG3+St0yNLu6vsKD7Dmqx+nWQt0iil89+WA==} + engines: {node: '>=18'} + peerDependencies: + postcss: ^8.4 + + postcss-selector-parser@6.1.2: + resolution: {integrity: sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==} + engines: {node: '>=4'} + + postcss-selector-parser@7.1.1: + resolution: {integrity: sha512-orRsuYpJVw8LdAwqqLykBj9ecS5/cRHlI5+nvTo8LcCKmzDmqVORXtOIYEEQuL9D4BxtA1lm5isAqzQZCoQ6Eg==} + engines: {node: '>=4'} + + postcss-sort-media-queries@5.2.0: + resolution: {integrity: sha512-AZ5fDMLD8SldlAYlvi8NIqo0+Z8xnXU2ia0jxmuhxAU+Lqt9K+AlmLNJ/zWEnE9x+Zx3qL3+1K20ATgNOr3fAA==} + engines: {node: '>=14.0.0'} + peerDependencies: + postcss: ^8.4.23 + + postcss-svgo@6.0.3: + resolution: {integrity: sha512-dlrahRmxP22bX6iKEjOM+c8/1p+81asjKT+V5lrgOH944ryx/OHpclnIbGsKVd3uWOXFLYJwCVf0eEkJGvO96g==} + engines: {node: ^14 || ^16 || >= 18} + peerDependencies: + postcss: ^8.4.31 + + postcss-unique-selectors@6.0.4: + resolution: {integrity: sha512-K38OCaIrO8+PzpArzkLKB42dSARtC2tmG6PvD4b1o1Q2E9Os8jzfWFfSy/rixsHwohtsDdFtAWGjFVFUdwYaMg==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss-value-parser@4.2.0: + resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} + + postcss-zindex@6.0.2: + resolution: {integrity: sha512-5BxW9l1evPB/4ZIc+2GobEBoKC+h8gPGCMi+jxsYvd2x0mjq7wazk6DrP71pStqxE9Foxh5TVnonbWpFZzXaYg==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + postcss@8.5.6: + resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} + engines: {node: ^10 || ^12 || >=14} + + prelude-ls@1.2.1: + resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} + engines: {node: '>= 0.8.0'} + + prettier@2.8.8: + resolution: {integrity: sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==} + engines: {node: '>=10.13.0'} + hasBin: true + + prettier@3.8.1: + resolution: {integrity: sha512-UOnG6LftzbdaHZcKoPFtOcCKztrQ57WkHDeRD9t/PTQtmT0NHSeWWepj6pS0z/N7+08BHFDQVUrfmfMRcZwbMg==} + engines: {node: '>=14'} + hasBin: true + + pretty-error@4.0.0: + resolution: {integrity: sha512-AoJ5YMAcXKYxKhuJGdcvse+Voc6v1RgnsR3nWcYU7q4t6z0Q6T86sv5Zq8VIRbOWWFpvdGE83LtdSMNd+6Y0xw==} + + pretty-time@1.1.0: + resolution: {integrity: sha512-28iF6xPQrP8Oa6uxE6a1biz+lWeTOAPKggvjB8HAs6nVMKZwf5bG++632Dx614hIWgUPkgivRfG+a8uAXGTIbA==} + engines: {node: '>=4'} + + prism-react-renderer@2.4.1: + resolution: {integrity: sha512-ey8Ls/+Di31eqzUxC46h8MksNuGx/n0AAC8uKpwFau4RPDYLuE3EXTp8N8G2vX2N7UC/+IXeNUnlWBGGcAG+Ig==} + peerDependencies: + react: '>=16.0.0' + + prismjs@1.30.0: + resolution: {integrity: sha512-DEvV2ZF2r2/63V+tK8hQvrR2ZGn10srHbXviTlcv7Kpzw8jWiNTqbVgjO3IY8RxrrOUF8VPMQQFysYYYv0YZxw==} + engines: {node: '>=6'} + + process-nextick-args@2.0.1: + resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} + + prompts@2.4.2: + resolution: {integrity: sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==} + engines: {node: '>= 6'} + + prop-types@15.8.1: + resolution: {integrity: sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==} + + property-information@7.1.0: + resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==} + + proto-list@1.2.4: + resolution: {integrity: sha512-vtK/94akxsTMhe0/cbfpR+syPuszcuwhqVjJq26CuNDgFGj682oRBXOP5MJpv2r7JtE8MsiepGIqvvOTBwn2vA==} + + proxy-addr@2.0.7: + resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} + engines: {node: '>= 0.10'} + + punycode.js@2.3.1: + resolution: {integrity: sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==} + engines: {node: '>=6'} + + punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + + pupa@3.3.0: + resolution: {integrity: sha512-LjgDO2zPtoXP2wJpDjZrGdojii1uqO0cnwKoIoUzkfS98HDmbeiGmYiXo3lXeFlq2xvne1QFQhwYXSUCLKtEuA==} + engines: {node: '>=12.20'} + + pvtsutils@1.3.6: + resolution: {integrity: sha512-PLgQXQ6H2FWCaeRak8vvk1GW462lMxB5s3Jm673N82zI4vqtVUPuZdffdZbPDFRoU8kAhItWFtPCWiPpp4/EDg==} + + pvutils@1.1.5: + resolution: {integrity: sha512-KTqnxsgGiQ6ZAzZCVlJH5eOjSnvlyEgx1m8bkRJfOhmGRqfo5KLvmAlACQkrjEtOQ4B7wF9TdSLIs9O90MX9xA==} + engines: {node: '>=16.0.0'} + + qs@6.14.1: + resolution: {integrity: sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==} + engines: {node: '>=0.6'} + + quansync@0.2.11: + resolution: {integrity: sha512-AifT7QEbW9Nri4tAwR5M/uzpBuqfZf+zwaEM/QkzEjj7NBuFD2rBuy0K3dE+8wltbezDV7JMA0WfnCPYRSYbXA==} + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + quick-lru@5.1.1: + resolution: {integrity: sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==} + engines: {node: '>=10'} + + randombytes@2.1.0: + resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==} + + range-parser@1.2.0: + resolution: {integrity: sha512-kA5WQoNVo4t9lNx2kQNFCxKeBl5IbbSNBl1M/tLkw9WCn+hxNBAW5Qh8gdhs63CJnhjJ2zQWFoqPJP2sK1AV5A==} + engines: {node: '>= 0.6'} + + range-parser@1.2.1: + resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==} + engines: {node: '>= 0.6'} + + raw-body@2.5.3: + resolution: {integrity: sha512-s4VSOf6yN0rvbRZGxs8Om5CWj6seneMwK3oDb4lWDH0UPhWcxwOWw5+qk24bxq87szX1ydrwylIOp2uG1ojUpA==} + engines: {node: '>= 0.8'} + + rc@1.2.8: + resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==} + hasBin: true + + react-dom@18.3.1: + resolution: {integrity: sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==} + peerDependencies: + react: ^18.3.1 + + react-fast-compare@3.2.2: + resolution: {integrity: sha512-nsO+KSNgo1SbJqJEYRE9ERzo7YtYbou/OqjSQKxV7jcKox7+usiUVZOAC+XnDOABXggQTno0Y1CpVnuWEc1boQ==} + + react-is@16.13.1: + resolution: {integrity: sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==} + + react-json-view-lite@2.5.0: + resolution: {integrity: sha512-tk7o7QG9oYyELWHL8xiMQ8x4WzjCzbWNyig3uexmkLb54r8jO0yH3WCWx8UZS0c49eSA4QUmG5caiRJ8fAn58g==} + engines: {node: '>=18'} + peerDependencies: + react: ^18.0.0 || ^19.0.0 + + react-loadable-ssr-addon-v5-slorber@1.0.1: + resolution: {integrity: sha512-lq3Lyw1lGku8zUEJPDxsNm1AfYHBrO9Y1+olAYwpUJ2IGFBskM0DMKok97A6LWUpHm+o7IvQBOWu9MLenp9Z+A==} + engines: {node: '>=10.13.0'} + peerDependencies: + react-loadable: '*' + webpack: '>=4.41.1 || 5.x' + + react-router-config@5.1.1: + resolution: {integrity: sha512-DuanZjaD8mQp1ppHjgnnUnyOlqYXZVjnov/JzFhjLEwd3Z4dYjMSnqrEzzGThH47vpCOqPPwJM2FtthLeJ8Pbg==} + peerDependencies: + react: '>=15' + react-router: '>=5' + + react-router-dom@5.3.4: + resolution: {integrity: sha512-m4EqFMHv/Ih4kpcBCONHbkT68KoAeHN4p3lAGoNryfHi0dMy0kCzEZakiKRsvg5wHZ/JLrLW8o8KomWiz/qbYQ==} + peerDependencies: + react: '>=15' + + react-router@5.3.4: + resolution: {integrity: sha512-Ys9K+ppnJah3QuaRiLxk+jDWOR1MekYQrlytiXxC1RyfbdsZkS5pvKAzCCr031xHixZwpnsYNT5xysdFHQaYsA==} + peerDependencies: + react: '>=15' + + react@18.3.1: + resolution: {integrity: sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==} + engines: {node: '>=0.10.0'} + + read-yaml-file@1.1.0: + resolution: {integrity: sha512-VIMnQi/Z4HT2Fxuwg5KrY174U1VdUIASQVWXXyqtNRtxSr9IYkn1rsI6Tb6HsrHCmB7gVpNwX6JxPTHcH6IoTA==} + engines: {node: '>=6'} + + readable-stream@2.3.8: + resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==} + + readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + + readdirp@3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + + readdirp@4.1.2: + resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==} + engines: {node: '>= 14.18.0'} + + recma-build-jsx@1.0.0: + resolution: {integrity: sha512-8GtdyqaBcDfva+GUKDr3nev3VpKAhup1+RvkMvUxURHpW7QyIvk9F5wz7Vzo06CEMSilw6uArgRqhpiUcWp8ew==} + + recma-jsx@1.0.1: + resolution: {integrity: sha512-huSIy7VU2Z5OLv6oFLosQGGDqPqdO1iq6bWNAdhzMxSJP7RAso4fCZ1cKu8j9YHCZf3TPrq4dw3okhrylgcd7w==} + peerDependencies: + acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 + + recma-parse@1.0.0: + resolution: {integrity: sha512-OYLsIGBB5Y5wjnSnQW6t3Xg7q3fQ7FWbw/vcXtORTnyaSFscOtABg+7Pnz6YZ6c27fG1/aN8CjfwoUEUIdwqWQ==} + + recma-stringify@1.0.0: + resolution: {integrity: sha512-cjwII1MdIIVloKvC9ErQ+OgAtwHBmcZ0Bg4ciz78FtbT8In39aAYbaA7zvxQ61xVMSPE8WxhLwLbhif4Js2C+g==} + + reflect-metadata@0.2.2: + resolution: {integrity: sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q==} + + regenerate-unicode-properties@10.2.2: + resolution: {integrity: sha512-m03P+zhBeQd1RGnYxrGyDAPpWX/epKirLrp8e3qevZdVkKtnCrjjWczIbYc8+xd6vcTStVlqfycTx1KR4LOr0g==} + engines: {node: '>=4'} + + regenerate@1.4.2: + resolution: {integrity: sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==} + + regexpu-core@6.4.0: + resolution: {integrity: sha512-0ghuzq67LI9bLXpOX/ISfve/Mq33a4aFRzoQYhnnok1JOFpmE/A2TBGkNVenOGEeSBCjIiWcc6MVOG5HEQv0sA==} + engines: {node: '>=4'} + + registry-auth-token@5.1.1: + resolution: {integrity: sha512-P7B4+jq8DeD2nMsAcdfaqHbssgHtZ7Z5+++a5ask90fvmJ8p5je4mOa+wzu+DB4vQ5tdJV/xywY+UnVFeQLV5Q==} + engines: {node: '>=14'} + + registry-url@6.0.1: + resolution: {integrity: sha512-+crtS5QjFRqFCoQmvGduwYWEBng99ZvmFvF+cUJkGYF1L1BfU8C6Zp9T7f5vPAwyLkUExpvK+ANVZmGU49qi4Q==} + engines: {node: '>=12'} + + regjsgen@0.8.0: + resolution: {integrity: sha512-RvwtGe3d7LvWiDQXeQw8p5asZUmfU1G/l6WbUXeHta7Y2PEIvBTwH6E2EfmYUK8pxcxEdEmaomqyp0vZZ7C+3Q==} + + regjsparser@0.13.0: + resolution: {integrity: sha512-NZQZdC5wOE/H3UT28fVGL+ikOZcEzfMGk/c3iN9UGxzWHMa1op7274oyiUVrAG4B2EuFhus8SvkaYnhvW92p9Q==} + hasBin: true + + rehype-raw@7.0.0: + resolution: {integrity: sha512-/aE8hCfKlQeA8LmyeyQvQF3eBiLRGNlfBJEvWH7ivp9sBqs7TNqBL5X3v157rM4IFETqDnIOO+z5M/biZbo9Ww==} + + rehype-recma@1.0.0: + resolution: {integrity: sha512-lqA4rGUf1JmacCNWWZx0Wv1dHqMwxzsDWYMTowuplHF3xH0N/MmrZ/G3BDZnzAkRmxDadujCjaKM2hqYdCBOGw==} + + relateurl@0.2.7: + resolution: {integrity: sha512-G08Dxvm4iDN3MLM0EsP62EDV9IuhXPR6blNz6Utcp7zyV3tr4HVNINt6MpaRWbxoOHT3Q7YN2P+jaHX8vUbgog==} + engines: {node: '>= 0.10'} + + remark-directive@3.0.1: + resolution: {integrity: sha512-gwglrEQEZcZYgVyG1tQuA+h58EZfq5CSULw7J90AFuCTyib1thgHPoqQ+h9iFvU6R+vnZ5oNFQR5QKgGpk741A==} + + remark-emoji@4.0.1: + resolution: {integrity: sha512-fHdvsTR1dHkWKev9eNyhTo4EFwbUvJ8ka9SgeWkMPYFX4WoI7ViVBms3PjlQYgw5TLvNQso3GUB/b/8t3yo+dg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + + remark-frontmatter@5.0.0: + resolution: {integrity: sha512-XTFYvNASMe5iPN0719nPrdItC9aU0ssC4v14mH1BCi1u0n1gAocqcujWUrByftZTbLhRtiKRyjYTSIOcr69UVQ==} + + remark-gfm@4.0.1: + resolution: {integrity: sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==} + + remark-mdx@3.1.1: + resolution: {integrity: sha512-Pjj2IYlUY3+D8x00UJsIOg5BEvfMyeI+2uLPn9VO9Wg4MEtN/VTIq2NEJQfde9PnX15KgtHyl9S0BcTnWrIuWg==} + + remark-parse@11.0.0: + resolution: {integrity: sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==} + + remark-rehype@11.1.2: + resolution: {integrity: sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==} + + remark-stringify@11.0.0: + resolution: {integrity: sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==} + + renderkid@3.0.0: + resolution: {integrity: sha512-q/7VIQA8lmM1hF+jn+sFSPWGlMkSAeNYcPLmDQx2zzuiDfaLrOmumR8iaUKlenFgh0XRPIUeSPlH3A+AW3Z5pg==} + + repeat-string@1.6.1: + resolution: {integrity: sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==} + engines: {node: '>=0.10'} + + require-from-string@2.0.2: + resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} + engines: {node: '>=0.10.0'} + + require-like@0.1.2: + resolution: {integrity: sha512-oyrU88skkMtDdauHDuKVrgR+zuItqr6/c//FXzvmxRGMexSDc6hNvJInGW3LL46n+8b50RykrvwSUIIQH2LQ5A==} + + requires-port@1.0.0: + resolution: {integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==} + + resolve-alpn@1.2.1: + resolution: {integrity: sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==} + + resolve-from@4.0.0: + resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} + engines: {node: '>=4'} + + resolve-from@5.0.0: + resolution: {integrity: sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==} + engines: {node: '>=8'} + + resolve-pathname@3.0.0: + resolution: {integrity: sha512-C7rARubxI8bXFNB/hqcp/4iUeIXJhJZvFPFPiSPRnhU5UPxzMFIl+2E6yY6c4k9giDJAhtV+enfA+G89N6Csng==} + + resolve@1.22.11: + resolution: {integrity: sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==} + engines: {node: '>= 0.4'} + hasBin: true + + responselike@3.0.0: + resolution: {integrity: sha512-40yHxbNcl2+rzXvZuVkrYohathsSJlMTXKryG5y8uciHv1+xDLHQpgjG64JUO9nrEq2jGLH6IZ8BcZyw3wrweg==} + engines: {node: '>=14.16'} + + retry@0.13.1: + resolution: {integrity: sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==} + engines: {node: '>= 4'} + + reusify@1.1.0: + resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + robust-predicates@3.0.2: + resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==} + + rollup@4.57.1: + resolution: {integrity: sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + + roughjs@4.6.6: + resolution: {integrity: sha512-ZUz/69+SYpFN/g/lUlo2FXcIjRkSu3nDarreVdGGndHEBJ6cXPdKguS8JGxwj5HA5xIbVKSmLgr5b3AWxtRfvQ==} + + rtlcss@4.3.0: + resolution: {integrity: sha512-FI+pHEn7Wc4NqKXMXFM+VAYKEj/mRIcW4h24YVwVtyjI+EqGrLc2Hx/Ny0lrZ21cBWU2goLy36eqMcNj3AQJig==} + engines: {node: '>=12.0.0'} + hasBin: true + + run-applescript@7.1.0: + resolution: {integrity: sha512-DPe5pVFaAsinSaV6QjQ6gdiedWDcRCbUuiQfQa2wmWV7+xC9bGulGI8+TdRmoFkAPaBXk8CrAbnlY2ISniJ47Q==} + engines: {node: '>=18'} + + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + rw@1.3.3: + resolution: {integrity: sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==} + + safe-buffer@5.1.2: + resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + + sax@1.4.4: + resolution: {integrity: sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==} + engines: {node: '>=11.0.0'} + + scheduler@0.23.2: + resolution: {integrity: sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==} + + schema-dts@1.1.5: + resolution: {integrity: sha512-RJr9EaCmsLzBX2NDiO5Z3ux2BVosNZN5jo0gWgsyKvxKIUL5R3swNvoorulAeL9kLB0iTSX7V6aokhla2m7xbg==} + + schema-utils@3.3.0: + resolution: {integrity: sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==} + engines: {node: '>= 10.13.0'} + + schema-utils@4.3.3: + resolution: {integrity: sha512-eflK8wEtyOE6+hsaRVPxvUKYCpRgzLqDTb8krvAsRIwOGlHoSgYLgBXoubGgLd2fT41/OUYdb48v4k4WWHQurA==} + engines: {node: '>= 10.13.0'} + + section-matter@1.0.0: + resolution: {integrity: sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==} + engines: {node: '>=4'} + + select-hose@2.0.0: + resolution: {integrity: sha512-mEugaLK+YfkijB4fx0e6kImuJdCIt2LxCRcbEYPqRGCs4F2ogyfZU5IAZRdjCP8JPq2AtdNoC/Dux63d9Kiryg==} + + selfsigned@5.5.0: + resolution: {integrity: sha512-ftnu3TW4+3eBfLRFnDEkzGxSF/10BJBkaLJuBHZX0kiPS7bRdlpZGu6YGt4KngMkdTwJE6MbjavFpqHvqVt+Ew==} + engines: {node: '>=18'} + + semver-diff@4.0.0: + resolution: {integrity: sha512-0Ju4+6A8iOnpL/Thra7dZsSlOHYAHIeMxfhWQRI1/VLcT3WDBZKKtQt/QkBOsiIN9ZpuvHE6cGZ0x4glCMmfiA==} + engines: {node: '>=12'} + + semver@6.3.1: + resolution: {integrity: sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==} + hasBin: true + + semver@7.7.3: + resolution: {integrity: sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==} + engines: {node: '>=10'} + hasBin: true + + send@0.19.2: + resolution: {integrity: sha512-VMbMxbDeehAxpOtWJXlcUS5E8iXh6QmN+BkRX1GARS3wRaXEEgzCcB10gTQazO42tpNIya8xIyNx8fll1OFPrg==} + engines: {node: '>= 0.8.0'} + + serialize-javascript@6.0.2: + resolution: {integrity: sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==} + + serve-handler@6.1.6: + resolution: {integrity: sha512-x5RL9Y2p5+Sh3D38Fh9i/iQ5ZK+e4xuXRd/pGbM4D13tgo/MGwbttUk8emytcr1YYzBYs+apnUngBDFYfpjPuQ==} + + serve-index@1.9.2: + resolution: {integrity: sha512-KDj11HScOaLmrPxl70KYNW1PksP4Nb/CLL2yvC+Qd2kHMPEEpfc4Re2e4FOay+bC/+XQl/7zAcWON3JVo5v3KQ==} + engines: {node: '>= 0.8.0'} + + serve-static@1.16.3: + resolution: {integrity: sha512-x0RTqQel6g5SY7Lg6ZreMmsOzncHFU7nhnRWkKgWuMTu5NN0DR5oruckMqRvacAN9d5w6ARnRBXl9xhDCgfMeA==} + engines: {node: '>= 0.8.0'} + + set-function-length@1.2.2: + resolution: {integrity: sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==} + engines: {node: '>= 0.4'} + + setprototypeof@1.2.0: + resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==} + + shallow-clone@3.0.1: + resolution: {integrity: sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==} + engines: {node: '>=8'} + + shallowequal@1.1.0: + resolution: {integrity: sha512-y0m1JoUZSlPAjXVtPPW70aZWfIL/dSP7AFkRnniLCrK/8MDKog3TySTBmckD+RObVxH0v4Tox67+F14PdED2oQ==} + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + shell-quote@1.8.3: + resolution: {integrity: sha512-ObmnIF4hXNg1BqhnHmgbDETF8dLPCggZWBjkQfhZpbszZnYur5DUljTcCHii5LC3J5E0yeO/1LIMyH+UvHQgyw==} + engines: {node: '>= 0.4'} + + side-channel-list@1.0.0: + resolution: {integrity: sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==} + engines: {node: '>= 0.4'} + + side-channel-map@1.0.1: + resolution: {integrity: sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==} + engines: {node: '>= 0.4'} + + side-channel-weakmap@1.0.2: + resolution: {integrity: sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==} + engines: {node: '>= 0.4'} + + side-channel@1.1.0: + resolution: {integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==} + engines: {node: '>= 0.4'} + + siginfo@2.0.0: + resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} + + signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + + sirv@2.0.4: + resolution: {integrity: sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==} + engines: {node: '>= 10'} + + sisteransi@1.0.5: + resolution: {integrity: sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==} + + sitemap@7.1.2: + resolution: {integrity: sha512-ARCqzHJ0p4gWt+j7NlU5eDlIO9+Rkr/JhPFZKKQ1l5GCus7rJH4UdrlVAh0xC/gDS/Qir2UMxqYNHtsKr2rpCw==} + engines: {node: '>=12.0.0', npm: '>=5.6.0'} + hasBin: true + + skin-tone@2.0.0: + resolution: {integrity: sha512-kUMbT1oBJCpgrnKoSr0o6wPtvRWT9W9UKvGLwfJYO2WuahZRHOpEyL1ckyMGgMWh0UdpmaoFqKKD29WTomNEGA==} + engines: {node: '>=8'} + + slash@3.0.0: + resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} + engines: {node: '>=8'} + + slash@4.0.0: + resolution: {integrity: sha512-3dOsAHXXUkQTpOYcoAxLIorMTp4gIQr5IW3iVb7A7lFIp0VHhnynm9izx6TssdrIcVIESAlVjtnO2K8bg+Coew==} + engines: {node: '>=12'} + + snake-case@3.0.4: + resolution: {integrity: sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==} + + sockjs@0.3.24: + resolution: {integrity: sha512-GJgLTZ7vYb/JtPSSZ10hsOYIvEYsjbNU+zPdIHcUaWVNUEPivzxku31865sSSud0Da0W4lEeOPlmw93zLQchuQ==} + + sort-css-media-queries@2.2.0: + resolution: {integrity: sha512-0xtkGhWCC9MGt/EzgnvbbbKhqWjl1+/rncmhTh5qCpbYguXh6S/qwePfv/JQ8jePXXmqingylxoC49pCkSPIbA==} + engines: {node: '>= 6.3.0'} + + source-map-js@1.2.1: + resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} + engines: {node: '>=0.10.0'} + + source-map-support@0.5.21: + resolution: {integrity: sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==} + + source-map@0.6.1: + resolution: {integrity: sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==} + engines: {node: '>=0.10.0'} + + source-map@0.7.6: + resolution: {integrity: sha512-i5uvt8C3ikiWeNZSVZNWcfZPItFQOsYTUAOkcUPGd8DqDy1uOUikjt5dG+uRlwyvR108Fb9DOd4GvXfT0N2/uQ==} + engines: {node: '>= 12'} + + space-separated-tokens@2.0.2: + resolution: {integrity: sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==} + + spawndamnit@3.0.1: + resolution: {integrity: sha512-MmnduQUuHCoFckZoWnXsTg7JaiLBJrKFj9UI2MbRPGaJeVpsLcVBu6P/IGZovziM/YBsellCmsprgNA+w0CzVg==} + + spdy-transport@3.0.0: + resolution: {integrity: sha512-hsLVFE5SjA6TCisWeJXFKniGGOpBgMLmerfO2aCyCU5s7nJ/rpAepqmFifv/GCbSbueEeAJJnmSQ2rKC/g8Fcw==} + + spdy@4.0.2: + resolution: {integrity: sha512-r46gZQZQV+Kl9oItvl1JZZqJKGr+oEkB08A6BzkiR7593/7IbtuncXHd2YoYeTsG4157ZssMu9KYvUHLcjcDoA==} + engines: {node: '>=6.0.0'} + + sprintf-js@1.0.3: + resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} + + srcset@4.0.0: + resolution: {integrity: sha512-wvLeHgcVHKO8Sc/H/5lkGreJQVeYMm9rlmt8PuR1xE31rIuXhuzznUUqAt8MqLhB3MqJdFzlNAfpcWnxiFUcPw==} + engines: {node: '>=12'} + + stackback@0.0.2: + resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} + + statuses@1.5.0: + resolution: {integrity: sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA==} + engines: {node: '>= 0.6'} + + statuses@2.0.2: + resolution: {integrity: sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==} + engines: {node: '>= 0.8'} + + std-env@3.10.0: + resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + string_decoder@1.1.1: + resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} + + string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + + stringify-entities@4.0.4: + resolution: {integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==} + + stringify-object@3.3.0: + resolution: {integrity: sha512-rHqiFh1elqCQ9WPLIC8I0Q/g/wj5J1eMkyoiD6eoQApWHP0FtlK7rqnhmabL5VUY9JQCcqwwvlOaSuutekgyrw==} + engines: {node: '>=4'} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-ansi@7.1.2: + resolution: {integrity: sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==} + engines: {node: '>=12'} + + strip-bom-string@1.0.0: + resolution: {integrity: sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==} + engines: {node: '>=0.10.0'} + + strip-bom@3.0.0: + resolution: {integrity: sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==} + engines: {node: '>=4'} + + strip-final-newline@2.0.0: + resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==} + engines: {node: '>=6'} + + strip-json-comments@2.0.1: + resolution: {integrity: sha512-4gB8na07fecVVkOI6Rs4e7T6NOTki5EmL7TUduTs6bu3EdnSycntVJ4re8kgZA+wx9IueI2Y11bfbgwtzuE0KQ==} + engines: {node: '>=0.10.0'} + + strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + + style-to-js@1.1.21: + resolution: {integrity: sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==} + + style-to-object@1.0.14: + resolution: {integrity: sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw==} + + stylehacks@6.1.1: + resolution: {integrity: sha512-gSTTEQ670cJNoaeIp9KX6lZmm8LJ3jPB5yJmX8Zq/wQxOsAFXV3qjWzHas3YYk1qesuVIyYWWUpZ0vSE/dTSGg==} + engines: {node: ^14 || ^16 || >=18.0} + peerDependencies: + postcss: ^8.4.31 + + stylis@4.3.6: + resolution: {integrity: sha512-yQ3rwFWRfwNUY7H5vpU0wfdkNSnvnJinhF9830Swlaxl03zsOjCfmX0ugac+3LtK0lYSgwL/KXc8oYL3mG4YFQ==} + + sucrase@3.35.1: + resolution: {integrity: sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==} + engines: {node: '>=16 || 14 >=14.17'} + hasBin: true + + supports-color@7.2.0: + resolution: {integrity: sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==} + engines: {node: '>=8'} + + supports-color@8.1.1: + resolution: {integrity: sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==} + engines: {node: '>=10'} + + supports-color@9.4.0: + resolution: {integrity: sha512-VL+lNrEoIXww1coLPOmiEmK/0sGigko5COxI09KzHc2VJXJsQ37UaQ+8quuxjDeA7+KnLGTWRyOXSLLR2Wb4jw==} + engines: {node: '>=12'} + + supports-preserve-symlinks-flag@1.0.0: + resolution: {integrity: sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==} + engines: {node: '>= 0.4'} + + svg-parser@2.0.4: + resolution: {integrity: sha512-e4hG1hRwoOdRb37cIMSgzNsxyzKfayW6VOflrwvR+/bzrkyxY/31WkbgnQpgtrNp1SdpJvpUAGTa/ZoiPNDuRQ==} + + svgo@3.3.2: + resolution: {integrity: sha512-OoohrmuUlBs8B8o6MB2Aevn+pRIH9zDALSR+6hhqVfa6fRwG/Qw9VUMSMW9VNg2CFc/MTIfabtdOVl9ODIJjpw==} + engines: {node: '>=14.0.0'} + hasBin: true + + tapable@2.3.0: + resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==} + engines: {node: '>=6'} + + term-size@2.2.1: + resolution: {integrity: sha512-wK0Ri4fOGjv/XPy8SBHZChl8CM7uMc5VML7SqiQ0zG7+J5Vr+RMQDoHa2CNT6KHUnTGIXH34UDMkPzAUyapBZg==} + engines: {node: '>=8'} + + terser-webpack-plugin@5.3.16: + resolution: {integrity: sha512-h9oBFCWrq78NyWWVcSwZarJkZ01c2AyGrzs1crmHZO3QUg9D61Wu4NPjBy69n7JqylFF5y+CsUZYmYEIZ3mR+Q==} + engines: {node: '>= 10.13.0'} + peerDependencies: + '@swc/core': '*' + esbuild: '*' + uglify-js: '*' + webpack: ^5.1.0 + peerDependenciesMeta: + '@swc/core': + optional: true + esbuild: + optional: true + uglify-js: + optional: true + + terser@5.46.0: + resolution: {integrity: sha512-jTwoImyr/QbOWFFso3YoU3ik0jBBDJ6JTOQiy/J2YxVJdZCc+5u7skhNwiOR3FQIygFqVUPHl7qbbxtjW2K3Qg==} + engines: {node: '>=10'} + hasBin: true + + test-exclude@7.0.1: + resolution: {integrity: sha512-pFYqmTw68LXVjeWJMST4+borgQP2AyMNbg1BpZh9LbyhUeNkeaPF9gzfPGUAnSMV3qPYdWUwDIjjCLiSDOl7vg==} + engines: {node: '>=18'} + + thenify-all@1.6.0: + resolution: {integrity: sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==} + engines: {node: '>=0.8'} + + thenify@3.3.1: + resolution: {integrity: sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==} + + thingies@2.5.0: + resolution: {integrity: sha512-s+2Bwztg6PhWUD7XMfeYm5qliDdSiZm7M7n8KjTkIsm3l/2lgVRc2/Gx/v+ZX8lT4FMA+i8aQvhcWylldc+ZNw==} + engines: {node: '>=10.18'} + peerDependencies: + tslib: ^2 + + thunky@1.1.0: + resolution: {integrity: sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==} + + tiny-invariant@1.3.3: + resolution: {integrity: sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==} + + tiny-warning@1.0.3: + resolution: {integrity: sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==} + + tinybench@2.9.0: + resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} + + tinyexec@0.3.2: + resolution: {integrity: sha512-KQQR9yN7R5+OSwaK0XQoj22pwHoTlgYqmUscPYoknOoWCWfj/5/ABTMRi69FrKU5ffPVh5QcFikpWJI/P1ocHA==} + + tinyexec@1.0.2: + resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==} + engines: {node: '>=18'} + + tinyglobby@0.2.15: + resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==} + engines: {node: '>=12.0.0'} + + tinypool@1.1.1: + resolution: {integrity: sha512-Zba82s87IFq9A9XmjiX5uZA/ARWDrB03OHlq+Vw1fSdt0I+4/Kutwy8BP4Y/y/aORMo61FQ0vIb5j44vSo5Pkg==} + engines: {node: ^18.0.0 || >=20.0.0} + + tinyrainbow@1.2.0: + resolution: {integrity: sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==} + engines: {node: '>=14.0.0'} + + tinyspy@3.0.2: + resolution: {integrity: sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==} + engines: {node: '>=14.0.0'} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + toidentifier@1.0.1: + resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==} + engines: {node: '>=0.6'} + + totalist@3.0.1: + resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==} + engines: {node: '>=6'} + + tree-dump@1.1.0: + resolution: {integrity: sha512-rMuvhU4MCDbcbnleZTFezWsaZXRFemSqAM+7jPnzUl1fo9w3YEKOxAeui0fz3OI4EU4hf23iyA7uQRVko+UaBA==} + engines: {node: '>=10.0'} + peerDependencies: + tslib: '2' + + tree-kill@1.2.2: + resolution: {integrity: sha512-L0Orpi8qGpRG//Nd+H90vFB+3iHnue1zSSGmNOOCh1GLJ7rUKVwV2HvijphGQS2UmhUZewS9VgvxYIdgr+fG1A==} + hasBin: true + + trim-lines@3.0.1: + resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==} + + trough@2.2.0: + resolution: {integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==} + + ts-api-utils@2.4.0: + resolution: {integrity: sha512-3TaVTaAv2gTiMB35i3FiGJaRfwb3Pyn/j3m/bfAvGe8FB7CF6u+LMYqYlDh7reQf7UNvoTvdfAqHGmPGOSsPmA==} + engines: {node: '>=18.12'} + peerDependencies: + typescript: '>=4.8.4' + + ts-dedent@2.2.0: + resolution: {integrity: sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==} + engines: {node: '>=6.10'} + + ts-interface-checker@0.1.13: + resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} + + tslib@1.14.1: + resolution: {integrity: sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==} + + tslib@2.8.1: + resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==} + + tsup@8.5.1: + resolution: {integrity: sha512-xtgkqwdhpKWr3tKPmCkvYmS9xnQK3m3XgxZHwSUjvfTjp7YfXe5tT3GgWi0F2N+ZSMsOeWeZFh7ZZFg5iPhing==} + engines: {node: '>=18'} + hasBin: true + peerDependencies: + '@microsoft/api-extractor': ^7.36.0 + '@swc/core': ^1 + postcss: ^8.4.12 + typescript: '>=4.5.0' + peerDependenciesMeta: + '@microsoft/api-extractor': + optional: true + '@swc/core': + optional: true + postcss: + optional: true + typescript: + optional: true + + tsyringe@4.10.0: + resolution: {integrity: sha512-axr3IdNuVIxnaK5XGEUFTu3YmAQ6lllgrvqfEoR16g/HGnYY/6We4oWENtAnzK6/LpJ2ur9PAb80RBt7/U4ugw==} + engines: {node: '>= 6.0.0'} + + turbo-darwin-64@2.8.1: + resolution: {integrity: sha512-FQ6Uqxty/H1Nvn1dpBe8KUlMRclTuiyNSc1PCeDL/ad7M9ykpWutB51YpMpf9ibTA32M6wLdIRf+D96W6hDAtQ==} + cpu: [x64] + os: [darwin] + + turbo-darwin-arm64@2.8.1: + resolution: {integrity: sha512-4bCcEpGP2/aSXmeN2gl5SuAmS1q5ykjubnFvSoXjQoCKtDOV+vc4CTl/DduZzUUutCVUWXjl8OyfIQ+DGCaV4A==} + cpu: [arm64] + os: [darwin] + + turbo-linux-64@2.8.1: + resolution: {integrity: sha512-m99JRlWlEgXPR7mkThAbKh6jbTmWSOXM/c6rt8yd4Uxh0+wjq7+DYcQbead6aoOqmCP9akswZ8EXIv1ogKBblg==} + cpu: [x64] + os: [linux] + + turbo-linux-arm64@2.8.1: + resolution: {integrity: sha512-AsPlza3AsavJdl2o7FE67qyv0aLfmT1XwFQGzvwpoAO6Bj7S4a03tpUchZKNuGjNAkKVProQRFnB7PgUAScFXA==} + cpu: [arm64] + os: [linux] + + turbo-windows-64@2.8.1: + resolution: {integrity: sha512-GdqNO6bYShRsr79B+2G/2ssjLEp9uBTvLBJSWRtRCiac/SEmv8T6RYv9hu+h5oGbFALtnKNp6BQBw78RJURsPw==} + cpu: [x64] + os: [win32] + + turbo-windows-arm64@2.8.1: + resolution: {integrity: sha512-n40E6IpkzrShRo3yMdRpgnn1/sAbGC6tZXwyNu8fe9RsufeD7KBiaoRSvw8xLyqV3pd2yoTL2rdCXq24MnTCWA==} + cpu: [arm64] + os: [win32] + + turbo@2.8.1: + resolution: {integrity: sha512-pbSMlRflA0RAuk/0jnAt8pzOYh1+sKaT8nVtcs75OFGVWD0evleQRmKtHJJV42QOhaC3Hx9mUUSOom/irasbjA==} + hasBin: true + + type-check@0.4.0: + resolution: {integrity: sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==} + engines: {node: '>= 0.8.0'} + + type-fest@0.21.3: + resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} + engines: {node: '>=10'} + + type-fest@1.4.0: + resolution: {integrity: sha512-yGSza74xk0UG8k+pLh5oeoYirvIiWo5t0/o3zHHAO2tRDiZcxWP7fywNlXhqb6/r6sWvwi+RsyQMWhVLe4BVuA==} + engines: {node: '>=10'} + + type-fest@2.19.0: + resolution: {integrity: sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==} + engines: {node: '>=12.20'} + + type-is@1.6.18: + resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==} + engines: {node: '>= 0.6'} + + typedarray-to-buffer@3.1.5: + resolution: {integrity: sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==} + + typescript@5.9.3: + resolution: {integrity: sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==} + engines: {node: '>=14.17'} + hasBin: true + + uc.micro@2.1.0: + resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==} + + ufo@1.6.3: + resolution: {integrity: sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q==} + + undici-types@6.21.0: + resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==} + + undici@5.29.0: + resolution: {integrity: sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==} + engines: {node: '>=14.0'} + + undici@7.19.2: + resolution: {integrity: sha512-4VQSpGEGsWzk0VYxyB/wVX/Q7qf9t5znLRgs0dzszr9w9Fej/8RVNQ+S20vdXSAyra/bJ7ZQfGv6ZMj7UEbzSg==} + engines: {node: '>=20.18.1'} + + unicode-canonical-property-names-ecmascript@2.0.1: + resolution: {integrity: sha512-dA8WbNeb2a6oQzAQ55YlT5vQAWGV9WXOsi3SskE3bcCdM0P4SDd+24zS/OCacdRq5BkdsRj9q3Pg6YyQoxIGqg==} + engines: {node: '>=4'} + + unicode-emoji-modifier-base@1.0.0: + resolution: {integrity: sha512-yLSH4py7oFH3oG/9K+XWrz1pSi3dfUrWEnInbxMfArOfc1+33BlGPQtLsOYwvdMy11AwUBetYuaRxSPqgkq+8g==} + engines: {node: '>=4'} + + unicode-match-property-ecmascript@2.0.0: + resolution: {integrity: sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==} + engines: {node: '>=4'} + + unicode-match-property-value-ecmascript@2.2.1: + resolution: {integrity: sha512-JQ84qTuMg4nVkx8ga4A16a1epI9H6uTXAknqxkGF/aFfRLw1xC/Bp24HNLaZhHSkWd3+84t8iXnp1J0kYcZHhg==} + engines: {node: '>=4'} + + unicode-property-aliases-ecmascript@2.2.0: + resolution: {integrity: sha512-hpbDzxUY9BFwX+UeBnxv3Sh1q7HFxj48DTmXchNgRa46lO8uj3/1iEn3MiNUYTg1g9ctIqXCCERn8gYZhHC5lQ==} + engines: {node: '>=4'} + + unified@11.0.5: + resolution: {integrity: sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==} + + unique-string@3.0.0: + resolution: {integrity: sha512-VGXBUVwxKMBUznyffQweQABPRRW1vHZAbadFZud4pLFAqRGvv/96vafgjWFqzourzr8YonlQiPgH0YCJfawoGQ==} + engines: {node: '>=12'} + + unist-util-is@6.0.1: + resolution: {integrity: sha512-LsiILbtBETkDz8I9p1dQ0uyRUWuaQzd/cuEeS1hoRSyW5E5XGmTzlwY1OrNzzakGowI9Dr/I8HVaw4hTtnxy8g==} + + unist-util-position-from-estree@2.0.0: + resolution: {integrity: sha512-KaFVRjoqLyF6YXCbVLNad/eS4+OfPQQn2yOd7zF/h5T/CSL2v8NpN6a5TPvtbXthAGw5nG+PuTtq+DdIZr+cRQ==} + + unist-util-position@5.0.0: + resolution: {integrity: sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==} + + unist-util-stringify-position@4.0.0: + resolution: {integrity: sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==} + + unist-util-visit-parents@6.0.2: + resolution: {integrity: sha512-goh1s1TBrqSqukSc8wrjwWhL0hiJxgA8m4kFxGlQ+8FYQ3C/m11FcTs4YYem7V664AhHVvgoQLk890Ssdsr2IQ==} + + unist-util-visit@5.1.0: + resolution: {integrity: sha512-m+vIdyeCOpdr/QeQCu2EzxX/ohgS8KbnPDgFni4dQsfSCtpz8UqDyY5GjRru8PDKuYn7Fq19j1CQ+nJSsGKOzg==} + + universalify@0.1.2: + resolution: {integrity: sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==} + engines: {node: '>= 4.0.0'} + + universalify@2.0.1: + resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} + engines: {node: '>= 10.0.0'} + + unpipe@1.0.0: + resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} + engines: {node: '>= 0.8'} + + update-browserslist-db@1.2.3: + resolution: {integrity: sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==} + hasBin: true + peerDependencies: + browserslist: '>= 4.21.0' + + update-notifier@6.0.2: + resolution: {integrity: sha512-EDxhTEVPZZRLWYcJ4ZXjGFN0oP7qYvbXWzEgRm/Yql4dHX5wDbvh89YHP6PK1lzZJYrMtXUuZZz8XGK+U6U1og==} + engines: {node: '>=14.16'} + + uri-js@4.4.1: + resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + + url-loader@4.1.1: + resolution: {integrity: sha512-3BTV812+AVHHOJQO8O5MkWgZ5aosP7GnROJwvzLS9hWDj00lZ6Z0wNak423Lp9PBZN05N+Jk/N5Si8jRAlGyWA==} + engines: {node: '>= 10.13.0'} + peerDependencies: + file-loader: '*' + webpack: ^4.0.0 || ^5.0.0 + peerDependenciesMeta: + file-loader: + optional: true + + util-deprecate@1.0.2: + resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + + utila@0.4.0: + resolution: {integrity: sha512-Z0DbgELS9/L/75wZbro8xAnT50pBVFQZ+hUEueGDU5FN51YSCYM+jdxsfCiHjwNP/4LCDD0i/graKpeBnOXKRA==} + + utility-types@3.11.0: + resolution: {integrity: sha512-6Z7Ma2aVEWisaL6TvBCy7P8rm2LQoPv6dJ7ecIaIixHcwfbJ0x7mWdbcwlIM5IGQxPZSFYeqRCqlOOeKoJYMkw==} + engines: {node: '>= 4'} + + utils-merge@1.0.1: + resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==} + engines: {node: '>= 0.4.0'} + + uuid@11.1.0: + resolution: {integrity: sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==} + hasBin: true + + uuid@8.3.2: + resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==} + hasBin: true + + value-equal@1.0.1: + resolution: {integrity: sha512-NOJ6JZCAWr0zlxZt+xqCHNTEKOsrks2HQd4MqhP1qy4z1SkbEP467eNx6TgDKXMvUOb+OENfJCZwM+16n7fRfw==} + + vary@1.1.2: + resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} + engines: {node: '>= 0.8'} + + vfile-location@5.0.3: + resolution: {integrity: sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==} + + vfile-message@4.0.3: + resolution: {integrity: sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==} + + vfile@6.0.3: + resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==} + + vite-node@2.1.9: + resolution: {integrity: sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + + vite@5.4.21: + resolution: {integrity: sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@types/node': ^18.0.0 || >=20.0.0 + less: '*' + lightningcss: ^1.21.0 + sass: '*' + sass-embedded: '*' + stylus: '*' + sugarss: '*' + terser: ^5.4.0 + peerDependenciesMeta: + '@types/node': + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + + vitest@2.1.9: + resolution: {integrity: sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@types/node': ^18.0.0 || >=20.0.0 + '@vitest/browser': 2.1.9 + '@vitest/ui': 2.1.9 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@types/node': + optional: true + '@vitest/browser': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + + vscode-jsonrpc@8.2.0: + resolution: {integrity: sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==} + engines: {node: '>=14.0.0'} + + vscode-languageserver-protocol@3.17.5: + resolution: {integrity: sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==} + + vscode-languageserver-textdocument@1.0.12: + resolution: {integrity: sha512-cxWNPesCnQCcMPeenjKKsOCKQZ/L6Tv19DTRIGuLWe32lyzWhihGVJ/rcckZXJxfdKCFvRLS3fpBIsV/ZGX4zA==} + + vscode-languageserver-types@3.17.5: + resolution: {integrity: sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==} + + vscode-languageserver@9.0.1: + resolution: {integrity: sha512-woByF3PDpkHFUreUa7Hos7+pUWdeWMXRd26+ZX2A8cFx6v/JPTtd4/uN0/jB6XQHYaOlHbio03NTHCqrgG5n7g==} + hasBin: true + + vscode-uri@3.0.8: + resolution: {integrity: sha512-AyFQ0EVmsOZOlAnxoFOGOq1SQDWAB7C6aqMGS23svWAllfOaxbuFvcT8D1i8z3Gyn8fraVeZNNmN6e9bxxXkKw==} + + watchpack@2.5.1: + resolution: {integrity: sha512-Zn5uXdcFNIA1+1Ei5McRd+iRzfhENPCe7LeABkJtNulSxjma+l7ltNx55BWZkRlwRnpOgHqxnjyaDgJnNXnqzg==} + engines: {node: '>=10.13.0'} + + wbuf@1.7.3: + resolution: {integrity: sha512-O84QOnr0icsbFGLS0O3bI5FswxzRr8/gHwWkDlQFskhSPryQXvrTMxjxGP4+iWYoauLoBvfDpkrOauZ+0iZpDA==} + + web-namespaces@2.0.1: + resolution: {integrity: sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==} + + webpack-bundle-analyzer@4.10.2: + resolution: {integrity: sha512-vJptkMm9pk5si4Bv922ZbKLV8UTT4zib4FPgXMhgzUny0bfDDkLXAVQs3ly3fS4/TN9ROFtb0NFrm04UXFE/Vw==} + engines: {node: '>= 10.13.0'} + hasBin: true + + webpack-dev-middleware@7.4.5: + resolution: {integrity: sha512-uxQ6YqGdE4hgDKNf7hUiPXOdtkXvBJXrfEGYSx7P7LC8hnUYGK70X6xQXUvXeNyBDDcsiQXpG2m3G9vxowaEuA==} + engines: {node: '>= 18.12.0'} + peerDependencies: + webpack: ^5.0.0 + peerDependenciesMeta: + webpack: + optional: true + + webpack-dev-server@5.2.3: + resolution: {integrity: sha512-9Gyu2F7+bg4Vv+pjbovuYDhHX+mqdqITykfzdM9UyKqKHlsE5aAjRhR+oOEfXW5vBeu8tarzlJFIZva4ZjAdrQ==} + engines: {node: '>= 18.12.0'} + hasBin: true + peerDependencies: + webpack: ^5.0.0 + webpack-cli: '*' + peerDependenciesMeta: + webpack: + optional: true + webpack-cli: + optional: true + + webpack-merge@5.10.0: + resolution: {integrity: sha512-+4zXKdx7UnO+1jaN4l2lHVD+mFvnlZQP/6ljaJVb4SZiwIKeUnrT5l0gkT8z+n4hKpC+jpOv6O9R+gLtag7pSA==} + engines: {node: '>=10.0.0'} + + webpack-merge@6.0.1: + resolution: {integrity: sha512-hXXvrjtx2PLYx4qruKl+kyRSLc52V+cCvMxRjmKwoA+CBbbF5GfIBtR6kCvl0fYGqTUPKB+1ktVmTHqMOzgCBg==} + engines: {node: '>=18.0.0'} + + webpack-sources@3.3.3: + resolution: {integrity: sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg==} + engines: {node: '>=10.13.0'} + + webpack@5.104.1: + resolution: {integrity: sha512-Qphch25abbMNtekmEGJmeRUhLDbe+QfiWTiqpKYkpCOWY64v9eyl+KRRLmqOFA2AvKPpc9DC6+u2n76tQLBoaA==} + engines: {node: '>=10.13.0'} + hasBin: true + peerDependencies: + webpack-cli: '*' + peerDependenciesMeta: + webpack-cli: + optional: true + + webpackbar@6.0.1: + resolution: {integrity: sha512-TnErZpmuKdwWBdMoexjio3KKX6ZtoKHRVvLIU0A47R0VVBDtx3ZyOJDktgYixhoJokZTYTt1Z37OkO9pnGJa9Q==} + engines: {node: '>=14.21.3'} + peerDependencies: + webpack: 3 || 4 || 5 + + websocket-driver@0.7.4: + resolution: {integrity: sha512-b17KeDIQVjvb0ssuSDF2cYXSg2iztliJ4B9WdsuB6J952qCPKmnVq4DyW5motImXHDC1cBT/1UezrJVsKw5zjg==} + engines: {node: '>=0.8.0'} + + websocket-extensions@0.1.4: + resolution: {integrity: sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==} + engines: {node: '>=0.8.0'} + + whatwg-encoding@3.1.1: + resolution: {integrity: sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==} + engines: {node: '>=18'} + deprecated: Use @exodus/bytes instead for a more spec-conformant and faster implementation + + whatwg-mimetype@4.0.0: + resolution: {integrity: sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==} + engines: {node: '>=18'} + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + why-is-node-running@2.3.0: + resolution: {integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==} + engines: {node: '>=8'} + hasBin: true + + widest-line@4.0.1: + resolution: {integrity: sha512-o0cyEG0e8GPzT4iGHphIOh0cJOV8fivsXxddQasHPHfoZf1ZexrfeA21w2NaEN1RHE+fXlfISmOE8R9N3u3Qig==} + engines: {node: '>=12'} + + wildcard@2.0.1: + resolution: {integrity: sha512-CC1bOL87PIWSBhDcTrdeLo6eGT7mCFtrg0uIJtqJUFyK+eJnzl8A1niH56uu7KMa5XFrtiV+AQuHO3n7DsHnLQ==} + + word-wrap@1.2.5: + resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} + engines: {node: '>=0.10.0'} + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + + write-file-atomic@3.0.3: + resolution: {integrity: sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==} + + ws@7.5.10: + resolution: {integrity: sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ==} + engines: {node: '>=8.3.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + ws@8.19.0: + resolution: {integrity: sha512-blAT2mjOEIi0ZzruJfIhb3nps74PRWTCz1IjglWEEpQl5XS/UNama6u2/rjFkDDouqr4L67ry+1aGIALViWjDg==} + engines: {node: '>=10.0.0'} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: '>=5.0.2' + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true + + wsl-utils@0.1.0: + resolution: {integrity: sha512-h3Fbisa2nKGPxCpm89Hk33lBLsnaGBvctQopaBSOW/uIs6FTe1ATyAnKFJrzVs9vpGdsTe73WF3V4lIsk4Gacw==} + engines: {node: '>=18'} + + xdg-basedir@5.1.0: + resolution: {integrity: sha512-GCPAHLvrIH13+c0SuacwvRYj2SxJXQ4kaVTT5xgL3kPrz56XxkF21IGhjSE1+W0aw7gpBWRGXLCPnPby6lSpmQ==} + engines: {node: '>=12'} + + xml-js@1.6.11: + resolution: {integrity: sha512-7rVi2KMfwfWFl+GpPg6m80IVMWXLRjO+PxTq7V2CDhoGak0wzYzFgUY2m4XJ47OGdXd8eLE8EmwfAmdjw7lC1g==} + hasBin: true + + yallist@3.1.1: + resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} + + yaml@2.8.2: + resolution: {integrity: sha512-mplynKqc1C2hTVYxd0PU2xQAc22TI1vShAYGksCCfxbn/dFwnHTNi1bvYsBTkhdUNtGIf5xNOg938rrSSYvS9A==} + engines: {node: '>= 14.6'} + hasBin: true + + yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + + yocto-queue@0.1.0: + resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} + engines: {node: '>=10'} + + yocto-queue@1.2.2: + resolution: {integrity: sha512-4LCcse/U2MHZ63HAJVE+v71o7yOdIe4cZ70Wpf8D/IyjDKYQLV5GD46B+hSTjJsvV5PztjvHoU580EftxjDZFQ==} + engines: {node: '>=12.20'} + + zwitch@2.0.4: + resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} + +snapshots: + + '@algolia/abtesting@1.13.0': + dependencies: + '@algolia/client-common': 5.47.0 + '@algolia/requester-browser-xhr': 5.47.0 + '@algolia/requester-fetch': 5.47.0 + '@algolia/requester-node-http': 5.47.0 + + '@algolia/client-abtesting@5.47.0': + dependencies: + '@algolia/client-common': 5.47.0 + '@algolia/requester-browser-xhr': 5.47.0 + '@algolia/requester-fetch': 5.47.0 + '@algolia/requester-node-http': 5.47.0 + + '@algolia/client-analytics@5.47.0': + dependencies: + '@algolia/client-common': 5.47.0 + '@algolia/requester-browser-xhr': 5.47.0 + '@algolia/requester-fetch': 5.47.0 + '@algolia/requester-node-http': 5.47.0 + + '@algolia/client-common@5.47.0': {} + + '@algolia/client-insights@5.47.0': + dependencies: + '@algolia/client-common': 5.47.0 + '@algolia/requester-browser-xhr': 5.47.0 + '@algolia/requester-fetch': 5.47.0 + '@algolia/requester-node-http': 5.47.0 + + '@algolia/client-personalization@5.47.0': + dependencies: + '@algolia/client-common': 5.47.0 + '@algolia/requester-browser-xhr': 5.47.0 + '@algolia/requester-fetch': 5.47.0 + '@algolia/requester-node-http': 5.47.0 + + '@algolia/client-query-suggestions@5.47.0': + dependencies: + '@algolia/client-common': 5.47.0 + '@algolia/requester-browser-xhr': 5.47.0 + '@algolia/requester-fetch': 5.47.0 + '@algolia/requester-node-http': 5.47.0 + + '@algolia/client-search@5.47.0': + dependencies: + '@algolia/client-common': 5.47.0 + '@algolia/requester-browser-xhr': 5.47.0 + '@algolia/requester-fetch': 5.47.0 + '@algolia/requester-node-http': 5.47.0 + + '@algolia/events@4.0.1': {} + + '@algolia/ingestion@1.47.0': + dependencies: + '@algolia/client-common': 5.47.0 + '@algolia/requester-browser-xhr': 5.47.0 + '@algolia/requester-fetch': 5.47.0 + '@algolia/requester-node-http': 5.47.0 + + '@algolia/monitoring@1.47.0': + dependencies: + '@algolia/client-common': 5.47.0 + '@algolia/requester-browser-xhr': 5.47.0 + '@algolia/requester-fetch': 5.47.0 + '@algolia/requester-node-http': 5.47.0 + + '@algolia/recommend@5.47.0': + dependencies: + '@algolia/client-common': 5.47.0 + '@algolia/requester-browser-xhr': 5.47.0 + '@algolia/requester-fetch': 5.47.0 + '@algolia/requester-node-http': 5.47.0 + + '@algolia/requester-browser-xhr@5.47.0': + dependencies: + '@algolia/client-common': 5.47.0 + + '@algolia/requester-fetch@5.47.0': + dependencies: + '@algolia/client-common': 5.47.0 + + '@algolia/requester-node-http@5.47.0': + dependencies: + '@algolia/client-common': 5.47.0 + + '@ampproject/remapping@2.3.0': + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + + '@antfu/install-pkg@1.1.0': + dependencies: + package-manager-detector: 1.6.0 + tinyexec: 1.0.2 + + '@babel/code-frame@7.29.0': + dependencies: + '@babel/helper-validator-identifier': 7.28.5 + js-tokens: 4.0.0 + picocolors: 1.1.1 + + '@babel/compat-data@7.29.0': {} + + '@babel/core@7.29.0': + dependencies: + '@babel/code-frame': 7.29.0 + '@babel/generator': 7.29.0 + '@babel/helper-compilation-targets': 7.28.6 + '@babel/helper-module-transforms': 7.28.6(@babel/core@7.29.0) + '@babel/helpers': 7.28.6 + '@babel/parser': 7.29.0 + '@babel/template': 7.28.6 + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + '@jridgewell/remapping': 2.3.5 + convert-source-map: 2.0.0 + debug: 4.4.3 + gensync: 1.0.0-beta.2 + json5: 2.2.3 + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + + '@babel/generator@7.29.0': + dependencies: + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + jsesc: 3.1.0 + + '@babel/helper-annotate-as-pure@7.27.3': + dependencies: + '@babel/types': 7.29.0 + + '@babel/helper-compilation-targets@7.28.6': + dependencies: + '@babel/compat-data': 7.29.0 + '@babel/helper-validator-option': 7.27.1 + browserslist: 4.28.1 + lru-cache: 5.1.1 + semver: 6.3.1 + + '@babel/helper-create-class-features-plugin@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-annotate-as-pure': 7.27.3 + '@babel/helper-member-expression-to-functions': 7.28.5 + '@babel/helper-optimise-call-expression': 7.27.1 + '@babel/helper-replace-supers': 7.28.6(@babel/core@7.29.0) + '@babel/helper-skip-transparent-expression-wrappers': 7.27.1 + '@babel/traverse': 7.29.0 + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + + '@babel/helper-create-regexp-features-plugin@7.28.5(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-annotate-as-pure': 7.27.3 + regexpu-core: 6.4.0 + semver: 6.3.1 + + '@babel/helper-define-polyfill-provider@0.6.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-compilation-targets': 7.28.6 + '@babel/helper-plugin-utils': 7.28.6 + debug: 4.4.3 + lodash.debounce: 4.0.8 + resolve: 1.22.11 + transitivePeerDependencies: + - supports-color + + '@babel/helper-globals@7.28.0': {} + + '@babel/helper-member-expression-to-functions@7.28.5': + dependencies: + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-module-imports@7.28.6': + dependencies: + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-module-transforms@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-module-imports': 7.28.6 + '@babel/helper-validator-identifier': 7.28.5 + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-optimise-call-expression@7.27.1': + dependencies: + '@babel/types': 7.29.0 + + '@babel/helper-plugin-utils@7.28.6': {} + + '@babel/helper-remap-async-to-generator@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-annotate-as-pure': 7.27.3 + '@babel/helper-wrap-function': 7.28.6 + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-replace-supers@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-member-expression-to-functions': 7.28.5 + '@babel/helper-optimise-call-expression': 7.27.1 + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-skip-transparent-expression-wrappers@7.27.1': + dependencies: + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helper-string-parser@7.27.1': {} + + '@babel/helper-validator-identifier@7.28.5': {} + + '@babel/helper-validator-option@7.27.1': {} + + '@babel/helper-wrap-function@7.28.6': + dependencies: + '@babel/template': 7.28.6 + '@babel/traverse': 7.29.0 + '@babel/types': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/helpers@7.28.6': + dependencies: + '@babel/template': 7.28.6 + '@babel/types': 7.29.0 + + '@babel/parser@7.29.0': + dependencies: + '@babel/types': 7.29.0 + + '@babel/plugin-bugfix-firefox-class-in-computed-class-key@7.28.5(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-bugfix-safari-class-field-initializer-scope@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.27.1 + '@babel/plugin-transform-optional-chaining': 7.28.6(@babel/core@7.29.0) + transitivePeerDependencies: + - supports-color + + '@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-proposal-private-property-in-object@7.21.0-placeholder-for-preset-env.2(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + + '@babel/plugin-syntax-dynamic-import@7.8.3(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-import-assertions@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-import-attributes@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-jsx@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-typescript@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-syntax-unicode-sets-regex@7.18.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-arrow-functions@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-async-generator-functions@7.29.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-remap-async-to-generator': 7.27.1(@babel/core@7.29.0) + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-async-to-generator@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-module-imports': 7.28.6 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-remap-async-to-generator': 7.27.1(@babel/core@7.29.0) + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-block-scoped-functions@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-block-scoping@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-class-properties@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-create-class-features-plugin': 7.28.6(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-class-static-block@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-create-class-features-plugin': 7.28.6(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-classes@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-annotate-as-pure': 7.27.3 + '@babel/helper-compilation-targets': 7.28.6 + '@babel/helper-globals': 7.28.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-replace-supers': 7.28.6(@babel/core@7.29.0) + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-computed-properties@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/template': 7.28.6 + + '@babel/plugin-transform-destructuring@7.28.5(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-dotall-regex@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-duplicate-keys@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-duplicate-named-capturing-groups-regex@7.29.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-dynamic-import@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-explicit-resource-management@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/plugin-transform-destructuring': 7.28.5(@babel/core@7.29.0) + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-exponentiation-operator@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-export-namespace-from@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-for-of@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.27.1 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-function-name@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-compilation-targets': 7.28.6 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-json-strings@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-literals@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-logical-assignment-operators@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-member-expression-literals@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-modules-amd@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-module-transforms': 7.28.6(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-modules-commonjs@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-module-transforms': 7.28.6(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-modules-systemjs@7.29.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-module-transforms': 7.28.6(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-validator-identifier': 7.28.5 + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-modules-umd@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-module-transforms': 7.28.6(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-named-capturing-groups-regex@7.29.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-new-target@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-nullish-coalescing-operator@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-numeric-separator@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-object-rest-spread@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-compilation-targets': 7.28.6 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/plugin-transform-destructuring': 7.28.5(@babel/core@7.29.0) + '@babel/plugin-transform-parameters': 7.27.7(@babel/core@7.29.0) + '@babel/traverse': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-object-super@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-replace-supers': 7.28.6(@babel/core@7.29.0) + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-optional-catch-binding@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-optional-chaining@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.27.1 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-parameters@7.27.7(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-private-methods@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-create-class-features-plugin': 7.28.6(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-private-property-in-object@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-annotate-as-pure': 7.27.3 + '@babel/helper-create-class-features-plugin': 7.28.6(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-property-literals@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-react-constant-elements@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-react-display-name@7.28.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-react-jsx-development@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/plugin-transform-react-jsx': 7.28.6(@babel/core@7.29.0) + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-react-jsx@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-annotate-as-pure': 7.27.3 + '@babel/helper-module-imports': 7.28.6 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/plugin-syntax-jsx': 7.28.6(@babel/core@7.29.0) + '@babel/types': 7.29.0 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-react-pure-annotations@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-annotate-as-pure': 7.27.3 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-regenerator@7.29.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-regexp-modifiers@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-reserved-words@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-runtime@7.29.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-module-imports': 7.28.6 + '@babel/helper-plugin-utils': 7.28.6 + babel-plugin-polyfill-corejs2: 0.4.15(@babel/core@7.29.0) + babel-plugin-polyfill-corejs3: 0.13.0(@babel/core@7.29.0) + babel-plugin-polyfill-regenerator: 0.6.6(@babel/core@7.29.0) + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-shorthand-properties@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-spread@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.27.1 + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-sticky-regex@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-template-literals@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-typeof-symbol@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-typescript@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-annotate-as-pure': 7.27.3 + '@babel/helper-create-class-features-plugin': 7.28.6(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-skip-transparent-expression-wrappers': 7.27.1 + '@babel/plugin-syntax-typescript': 7.28.6(@babel/core@7.29.0) + transitivePeerDependencies: + - supports-color + + '@babel/plugin-transform-unicode-escapes@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-unicode-property-regex@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-unicode-regex@7.27.1(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/plugin-transform-unicode-sets-regex@7.28.6(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-create-regexp-features-plugin': 7.28.5(@babel/core@7.29.0) + '@babel/helper-plugin-utils': 7.28.6 + + '@babel/preset-env@7.29.0(@babel/core@7.29.0)': + dependencies: + '@babel/compat-data': 7.29.0 + '@babel/core': 7.29.0 + '@babel/helper-compilation-targets': 7.28.6 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-validator-option': 7.27.1 + '@babel/plugin-bugfix-firefox-class-in-computed-class-key': 7.28.5(@babel/core@7.29.0) + '@babel/plugin-bugfix-safari-class-field-initializer-scope': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-proposal-private-property-in-object': 7.21.0-placeholder-for-preset-env.2(@babel/core@7.29.0) + '@babel/plugin-syntax-import-assertions': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-syntax-import-attributes': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-syntax-unicode-sets-regex': 7.18.6(@babel/core@7.29.0) + '@babel/plugin-transform-arrow-functions': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-async-generator-functions': 7.29.0(@babel/core@7.29.0) + '@babel/plugin-transform-async-to-generator': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-block-scoped-functions': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-block-scoping': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-class-properties': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-class-static-block': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-classes': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-computed-properties': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-destructuring': 7.28.5(@babel/core@7.29.0) + '@babel/plugin-transform-dotall-regex': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-duplicate-keys': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-duplicate-named-capturing-groups-regex': 7.29.0(@babel/core@7.29.0) + '@babel/plugin-transform-dynamic-import': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-explicit-resource-management': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-exponentiation-operator': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-export-namespace-from': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-for-of': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-function-name': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-json-strings': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-literals': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-logical-assignment-operators': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-member-expression-literals': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-modules-amd': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-modules-commonjs': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-modules-systemjs': 7.29.0(@babel/core@7.29.0) + '@babel/plugin-transform-modules-umd': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-named-capturing-groups-regex': 7.29.0(@babel/core@7.29.0) + '@babel/plugin-transform-new-target': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-nullish-coalescing-operator': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-numeric-separator': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-object-rest-spread': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-object-super': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-optional-catch-binding': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-optional-chaining': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-parameters': 7.27.7(@babel/core@7.29.0) + '@babel/plugin-transform-private-methods': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-private-property-in-object': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-property-literals': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-regenerator': 7.29.0(@babel/core@7.29.0) + '@babel/plugin-transform-regexp-modifiers': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-reserved-words': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-shorthand-properties': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-spread': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-sticky-regex': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-template-literals': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-typeof-symbol': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-unicode-escapes': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-unicode-property-regex': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-unicode-regex': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-unicode-sets-regex': 7.28.6(@babel/core@7.29.0) + '@babel/preset-modules': 0.1.6-no-external-plugins(@babel/core@7.29.0) + babel-plugin-polyfill-corejs2: 0.4.15(@babel/core@7.29.0) + babel-plugin-polyfill-corejs3: 0.14.0(@babel/core@7.29.0) + babel-plugin-polyfill-regenerator: 0.6.6(@babel/core@7.29.0) + core-js-compat: 3.48.0 + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + + '@babel/preset-modules@0.1.6-no-external-plugins(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/types': 7.29.0 + esutils: 2.0.3 + + '@babel/preset-react@7.28.5(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-validator-option': 7.27.1 + '@babel/plugin-transform-react-display-name': 7.28.0(@babel/core@7.29.0) + '@babel/plugin-transform-react-jsx': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-react-jsx-development': 7.27.1(@babel/core@7.29.0) + '@babel/plugin-transform-react-pure-annotations': 7.27.1(@babel/core@7.29.0) + transitivePeerDependencies: + - supports-color + + '@babel/preset-typescript@7.28.5(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-plugin-utils': 7.28.6 + '@babel/helper-validator-option': 7.27.1 + '@babel/plugin-syntax-jsx': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-modules-commonjs': 7.28.6(@babel/core@7.29.0) + '@babel/plugin-transform-typescript': 7.28.6(@babel/core@7.29.0) + transitivePeerDependencies: + - supports-color + + '@babel/runtime-corejs3@7.29.0': + dependencies: + core-js-pure: 3.48.0 + + '@babel/runtime@7.28.6': {} + + '@babel/template@7.28.6': + dependencies: + '@babel/code-frame': 7.29.0 + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 + + '@babel/traverse@7.29.0': + dependencies: + '@babel/code-frame': 7.29.0 + '@babel/generator': 7.29.0 + '@babel/helper-globals': 7.28.0 + '@babel/parser': 7.29.0 + '@babel/template': 7.28.6 + '@babel/types': 7.29.0 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + '@babel/types@7.29.0': + dependencies: + '@babel/helper-string-parser': 7.27.1 + '@babel/helper-validator-identifier': 7.28.5 + + '@bcoe/v8-coverage@0.2.3': {} + + '@braintree/sanitize-url@7.1.2': {} + + '@changesets/apply-release-plan@7.0.14': + dependencies: + '@changesets/config': 3.1.2 + '@changesets/get-version-range-type': 0.4.0 + '@changesets/git': 3.0.4 + '@changesets/should-skip-package': 0.1.2 + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + detect-indent: 6.1.0 + fs-extra: 7.0.1 + lodash.startcase: 4.4.0 + outdent: 0.5.0 + prettier: 2.8.8 + resolve-from: 5.0.0 + semver: 7.7.3 + + '@changesets/assemble-release-plan@6.0.9': + dependencies: + '@changesets/errors': 0.2.0 + '@changesets/get-dependents-graph': 2.1.3 + '@changesets/should-skip-package': 0.1.2 + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + semver: 7.7.3 + + '@changesets/changelog-git@0.2.1': + dependencies: + '@changesets/types': 6.1.0 + + '@changesets/cli@2.29.8(@types/node@20.19.30)': + dependencies: + '@changesets/apply-release-plan': 7.0.14 + '@changesets/assemble-release-plan': 6.0.9 + '@changesets/changelog-git': 0.2.1 + '@changesets/config': 3.1.2 + '@changesets/errors': 0.2.0 + '@changesets/get-dependents-graph': 2.1.3 + '@changesets/get-release-plan': 4.0.14 + '@changesets/git': 3.0.4 + '@changesets/logger': 0.1.1 + '@changesets/pre': 2.0.2 + '@changesets/read': 0.6.6 + '@changesets/should-skip-package': 0.1.2 + '@changesets/types': 6.1.0 + '@changesets/write': 0.4.0 + '@inquirer/external-editor': 1.0.3(@types/node@20.19.30) + '@manypkg/get-packages': 1.1.3 + ansi-colors: 4.1.3 + ci-info: 3.9.0 + enquirer: 2.4.1 + fs-extra: 7.0.1 + mri: 1.2.0 + p-limit: 2.3.0 + package-manager-detector: 0.2.11 + picocolors: 1.1.1 + resolve-from: 5.0.0 + semver: 7.7.3 + spawndamnit: 3.0.1 + term-size: 2.2.1 + transitivePeerDependencies: + - '@types/node' + + '@changesets/config@3.1.2': + dependencies: + '@changesets/errors': 0.2.0 + '@changesets/get-dependents-graph': 2.1.3 + '@changesets/logger': 0.1.1 + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + fs-extra: 7.0.1 + micromatch: 4.0.8 + + '@changesets/errors@0.2.0': + dependencies: + extendable-error: 0.1.7 + + '@changesets/get-dependents-graph@2.1.3': + dependencies: + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + picocolors: 1.1.1 + semver: 7.7.3 + + '@changesets/get-release-plan@4.0.14': + dependencies: + '@changesets/assemble-release-plan': 6.0.9 + '@changesets/config': 3.1.2 + '@changesets/pre': 2.0.2 + '@changesets/read': 0.6.6 + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + + '@changesets/get-version-range-type@0.4.0': {} + + '@changesets/git@3.0.4': + dependencies: + '@changesets/errors': 0.2.0 + '@manypkg/get-packages': 1.1.3 + is-subdir: 1.2.0 + micromatch: 4.0.8 + spawndamnit: 3.0.1 + + '@changesets/logger@0.1.1': + dependencies: + picocolors: 1.1.1 + + '@changesets/parse@0.4.2': + dependencies: + '@changesets/types': 6.1.0 + js-yaml: 4.1.1 + + '@changesets/pre@2.0.2': + dependencies: + '@changesets/errors': 0.2.0 + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + fs-extra: 7.0.1 + + '@changesets/read@0.6.6': + dependencies: + '@changesets/git': 3.0.4 + '@changesets/logger': 0.1.1 + '@changesets/parse': 0.4.2 + '@changesets/types': 6.1.0 + fs-extra: 7.0.1 + p-filter: 2.1.0 + picocolors: 1.1.1 + + '@changesets/should-skip-package@0.1.2': + dependencies: + '@changesets/types': 6.1.0 + '@manypkg/get-packages': 1.1.3 + + '@changesets/types@4.1.0': {} + + '@changesets/types@6.1.0': {} + + '@changesets/write@0.4.0': + dependencies: + '@changesets/types': 6.1.0 + fs-extra: 7.0.1 + human-id: 4.1.3 + prettier: 2.8.8 + + '@chevrotain/cst-dts-gen@11.0.3': + dependencies: + '@chevrotain/gast': 11.0.3 + '@chevrotain/types': 11.0.3 + lodash-es: 4.17.21 + + '@chevrotain/gast@11.0.3': + dependencies: + '@chevrotain/types': 11.0.3 + lodash-es: 4.17.21 + + '@chevrotain/regexp-to-ast@11.0.3': {} + + '@chevrotain/types@11.0.3': {} + + '@chevrotain/utils@11.0.3': {} + + '@colors/colors@1.5.0': + optional: true + + '@csstools/cascade-layer-name-parser@2.0.5(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)': + dependencies: + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + + '@csstools/color-helpers@5.1.0': {} + + '@csstools/css-calc@2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)': + dependencies: + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + + '@csstools/css-color-parser@3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)': + dependencies: + '@csstools/color-helpers': 5.1.0 + '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + + '@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4)': + dependencies: + '@csstools/css-tokenizer': 3.0.4 + + '@csstools/css-tokenizer@3.0.4': {} + + '@csstools/media-query-list-parser@4.0.3(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4)': + dependencies: + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + + '@csstools/postcss-alpha-function@1.0.1(postcss@8.5.6)': + dependencies: + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + '@csstools/postcss-cascade-layers@5.0.2(postcss@8.5.6)': + dependencies: + '@csstools/selector-specificity': 5.0.0(postcss-selector-parser@7.1.1) + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + + '@csstools/postcss-color-function-display-p3-linear@1.0.1(postcss@8.5.6)': + dependencies: + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + '@csstools/postcss-color-function@4.0.12(postcss@8.5.6)': + dependencies: + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + '@csstools/postcss-color-mix-function@3.0.12(postcss@8.5.6)': + dependencies: + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + '@csstools/postcss-color-mix-variadic-function-arguments@1.0.2(postcss@8.5.6)': + dependencies: + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + '@csstools/postcss-content-alt-text@2.0.8(postcss@8.5.6)': + dependencies: + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + '@csstools/postcss-contrast-color-function@2.0.12(postcss@8.5.6)': + dependencies: + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + '@csstools/postcss-exponential-functions@2.0.9(postcss@8.5.6)': + dependencies: + '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + postcss: 8.5.6 + + '@csstools/postcss-font-format-keywords@4.0.0(postcss@8.5.6)': + dependencies: + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + '@csstools/postcss-gamut-mapping@2.0.11(postcss@8.5.6)': + dependencies: + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + postcss: 8.5.6 + + '@csstools/postcss-gradients-interpolation-method@5.0.12(postcss@8.5.6)': + dependencies: + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + '@csstools/postcss-hwb-function@4.0.12(postcss@8.5.6)': + dependencies: + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + '@csstools/postcss-ic-unit@4.0.4(postcss@8.5.6)': + dependencies: + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + '@csstools/postcss-initial@2.0.1(postcss@8.5.6)': + dependencies: + postcss: 8.5.6 + + '@csstools/postcss-is-pseudo-class@5.0.3(postcss@8.5.6)': + dependencies: + '@csstools/selector-specificity': 5.0.0(postcss-selector-parser@7.1.1) + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + + '@csstools/postcss-light-dark-function@2.0.11(postcss@8.5.6)': + dependencies: + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + '@csstools/postcss-logical-float-and-clear@3.0.0(postcss@8.5.6)': + dependencies: + postcss: 8.5.6 + + '@csstools/postcss-logical-overflow@2.0.0(postcss@8.5.6)': + dependencies: + postcss: 8.5.6 + + '@csstools/postcss-logical-overscroll-behavior@2.0.0(postcss@8.5.6)': + dependencies: + postcss: 8.5.6 + + '@csstools/postcss-logical-resize@3.0.0(postcss@8.5.6)': + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + '@csstools/postcss-logical-viewport-units@3.0.4(postcss@8.5.6)': + dependencies: + '@csstools/css-tokenizer': 3.0.4 + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + '@csstools/postcss-media-minmax@2.0.9(postcss@8.5.6)': + dependencies: + '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/media-query-list-parser': 4.0.3(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + postcss: 8.5.6 + + '@csstools/postcss-media-queries-aspect-ratio-number-values@3.0.5(postcss@8.5.6)': + dependencies: + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/media-query-list-parser': 4.0.3(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + postcss: 8.5.6 + + '@csstools/postcss-nested-calc@4.0.0(postcss@8.5.6)': + dependencies: + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + '@csstools/postcss-normalize-display-values@4.0.1(postcss@8.5.6)': + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + '@csstools/postcss-oklab-function@4.0.12(postcss@8.5.6)': + dependencies: + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + '@csstools/postcss-position-area-property@1.0.0(postcss@8.5.6)': + dependencies: + postcss: 8.5.6 + + '@csstools/postcss-progressive-custom-properties@4.2.1(postcss@8.5.6)': + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + '@csstools/postcss-property-rule-prelude-list@1.0.0(postcss@8.5.6)': + dependencies: + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + postcss: 8.5.6 + + '@csstools/postcss-random-function@2.0.1(postcss@8.5.6)': + dependencies: + '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + postcss: 8.5.6 + + '@csstools/postcss-relative-color-syntax@3.0.12(postcss@8.5.6)': + dependencies: + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + '@csstools/postcss-scope-pseudo-class@4.0.1(postcss@8.5.6)': + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + + '@csstools/postcss-sign-functions@1.1.4(postcss@8.5.6)': + dependencies: + '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + postcss: 8.5.6 + + '@csstools/postcss-stepped-value-functions@4.0.9(postcss@8.5.6)': + dependencies: + '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + postcss: 8.5.6 + + '@csstools/postcss-syntax-descriptor-syntax-production@1.0.1(postcss@8.5.6)': + dependencies: + '@csstools/css-tokenizer': 3.0.4 + postcss: 8.5.6 + + '@csstools/postcss-system-ui-font-family@1.0.0(postcss@8.5.6)': + dependencies: + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + postcss: 8.5.6 + + '@csstools/postcss-text-decoration-shorthand@4.0.3(postcss@8.5.6)': + dependencies: + '@csstools/color-helpers': 5.1.0 + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + '@csstools/postcss-trigonometric-functions@4.0.9(postcss@8.5.6)': + dependencies: + '@csstools/css-calc': 2.1.4(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + postcss: 8.5.6 + + '@csstools/postcss-unset-value@4.0.0(postcss@8.5.6)': + dependencies: + postcss: 8.5.6 + + '@csstools/selector-resolve-nested@3.1.0(postcss-selector-parser@7.1.1)': + dependencies: + postcss-selector-parser: 7.1.1 + + '@csstools/selector-specificity@5.0.0(postcss-selector-parser@7.1.1)': + dependencies: + postcss-selector-parser: 7.1.1 + + '@csstools/utilities@2.0.0(postcss@8.5.6)': + dependencies: + postcss: 8.5.6 + + '@discoveryjs/json-ext@0.5.7': {} + + '@docsearch/core@4.5.3(@types/react@18.3.27)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + optionalDependencies: + '@types/react': 18.3.27 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + + '@docsearch/css@4.5.3': {} + + '@docsearch/react@4.5.3(@types/react@18.3.27)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@docsearch/core': 4.5.3(@types/react@18.3.27)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docsearch/css': 4.5.3 + optionalDependencies: + '@types/react': 18.3.27 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + + '@docusaurus/babel@3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@babel/core': 7.29.0 + '@babel/generator': 7.29.0 + '@babel/plugin-syntax-dynamic-import': 7.8.3(@babel/core@7.29.0) + '@babel/plugin-transform-runtime': 7.29.0(@babel/core@7.29.0) + '@babel/preset-env': 7.29.0(@babel/core@7.29.0) + '@babel/preset-react': 7.28.5(@babel/core@7.29.0) + '@babel/preset-typescript': 7.28.5(@babel/core@7.29.0) + '@babel/runtime': 7.28.6 + '@babel/runtime-corejs3': 7.29.0 + '@babel/traverse': 7.29.0 + '@docusaurus/logger': 3.9.2 + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + babel-plugin-dynamic-import-node: 2.3.3 + fs-extra: 11.3.3 + tslib: 2.8.1 + transitivePeerDependencies: + - '@swc/core' + - esbuild + - react + - react-dom + - supports-color + - uglify-js + - webpack-cli + + '@docusaurus/bundler@3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@babel/core': 7.29.0 + '@docusaurus/babel': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/cssnano-preset': 3.9.2 + '@docusaurus/logger': 3.9.2 + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + babel-loader: 9.2.1(@babel/core@7.29.0)(webpack@5.104.1) + clean-css: 5.3.3 + copy-webpack-plugin: 11.0.0(webpack@5.104.1) + css-loader: 6.11.0(webpack@5.104.1) + css-minimizer-webpack-plugin: 5.0.1(clean-css@5.3.3)(webpack@5.104.1) + cssnano: 6.1.2(postcss@8.5.6) + file-loader: 6.2.0(webpack@5.104.1) + html-minifier-terser: 7.2.0 + mini-css-extract-plugin: 2.10.0(webpack@5.104.1) + null-loader: 4.0.1(webpack@5.104.1) + postcss: 8.5.6 + postcss-loader: 7.3.4(postcss@8.5.6)(typescript@5.9.3)(webpack@5.104.1) + postcss-preset-env: 10.6.1(postcss@8.5.6) + terser-webpack-plugin: 5.3.16(webpack@5.104.1) + tslib: 2.8.1 + url-loader: 4.1.1(file-loader@6.2.0(webpack@5.104.1))(webpack@5.104.1) + webpack: 5.104.1 + webpackbar: 6.0.1(webpack@5.104.1) + transitivePeerDependencies: + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - csso + - esbuild + - lightningcss + - react + - react-dom + - supports-color + - typescript + - uglify-js + - webpack-cli + + '@docusaurus/core@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/babel': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/bundler': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/logger': 3.9.2 + '@docusaurus/mdx-loader': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-common': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@mdx-js/react': 3.1.1(@types/react@18.3.27)(react@18.3.1) + boxen: 6.2.1 + chalk: 4.1.2 + chokidar: 3.6.0 + cli-table3: 0.6.5 + combine-promises: 1.2.0 + commander: 5.1.0 + core-js: 3.48.0 + detect-port: 1.6.1 + escape-html: 1.0.3 + eta: 2.2.0 + eval: 0.1.8 + execa: 5.1.1 + fs-extra: 11.3.3 + html-tags: 3.3.1 + html-webpack-plugin: 5.6.6(webpack@5.104.1) + leven: 3.1.0 + lodash: 4.17.23 + open: 8.4.2 + p-map: 4.0.0 + prompts: 2.4.2 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + react-helmet-async: '@slorber/react-helmet-async@1.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)' + react-loadable: '@docusaurus/react-loadable@6.0.0(react@18.3.1)' + react-loadable-ssr-addon-v5-slorber: 1.0.1(@docusaurus/react-loadable@6.0.0(react@18.3.1))(webpack@5.104.1) + react-router: 5.3.4(react@18.3.1) + react-router-config: 5.1.1(react-router@5.3.4(react@18.3.1))(react@18.3.1) + react-router-dom: 5.3.4(react@18.3.1) + semver: 7.7.3 + serve-handler: 6.1.6 + tinypool: 1.1.1 + tslib: 2.8.1 + update-notifier: 6.0.2 + webpack: 5.104.1 + webpack-bundle-analyzer: 4.10.2 + webpack-dev-server: 5.2.3(debug@4.4.3)(tslib@2.8.1)(webpack@5.104.1) + webpack-merge: 6.0.1 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/cssnano-preset@3.9.2': + dependencies: + cssnano-preset-advanced: 6.1.2(postcss@8.5.6) + postcss: 8.5.6 + postcss-sort-media-queries: 5.2.0(postcss@8.5.6) + tslib: 2.8.1 + + '@docusaurus/logger@3.9.2': + dependencies: + chalk: 4.1.2 + tslib: 2.8.1 + + '@docusaurus/mdx-loader@3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@docusaurus/logger': 3.9.2 + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@mdx-js/mdx': 3.1.1 + '@slorber/remark-comment': 1.0.0 + escape-html: 1.0.3 + estree-util-value-to-estree: 3.5.0 + file-loader: 6.2.0(webpack@5.104.1) + fs-extra: 11.3.3 + image-size: 2.0.2 + mdast-util-mdx: 3.0.0 + mdast-util-to-string: 4.0.0 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + rehype-raw: 7.0.0 + remark-directive: 3.0.1 + remark-emoji: 4.0.1 + remark-frontmatter: 5.0.0 + remark-gfm: 4.0.1 + stringify-object: 3.3.0 + tslib: 2.8.1 + unified: 11.0.5 + unist-util-visit: 5.1.0 + url-loader: 4.1.1(file-loader@6.2.0(webpack@5.104.1))(webpack@5.104.1) + vfile: 6.0.3 + webpack: 5.104.1 + transitivePeerDependencies: + - '@swc/core' + - esbuild + - supports-color + - uglify-js + - webpack-cli + + '@docusaurus/module-type-aliases@3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@types/history': 4.7.11 + '@types/react': 18.3.27 + '@types/react-router-config': 5.0.11 + '@types/react-router-dom': 5.3.3 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + react-helmet-async: '@slorber/react-helmet-async@1.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)' + react-loadable: '@docusaurus/react-loadable@6.0.0(react@18.3.1)' + transitivePeerDependencies: + - '@swc/core' + - esbuild + - supports-color + - uglify-js + - webpack-cli + + '@docusaurus/plugin-content-blog@3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/logger': 3.9.2 + '@docusaurus/mdx-loader': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/plugin-content-docs': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/theme-common': 3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-common': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + cheerio: 1.0.0-rc.12 + feed: 4.2.2 + fs-extra: 11.3.3 + lodash: 4.17.23 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + schema-dts: 1.1.5 + srcset: 4.0.0 + tslib: 2.8.1 + unist-util-visit: 5.1.0 + utility-types: 3.11.0 + webpack: 5.104.1 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/logger': 3.9.2 + '@docusaurus/mdx-loader': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/module-type-aliases': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/theme-common': 3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-common': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@types/react-router-config': 5.0.11 + combine-promises: 1.2.0 + fs-extra: 11.3.3 + js-yaml: 4.1.1 + lodash: 4.17.23 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + schema-dts: 1.1.5 + tslib: 2.8.1 + utility-types: 3.11.0 + webpack: 5.104.1 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/plugin-content-pages@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/mdx-loader': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + fs-extra: 11.3.3 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + tslib: 2.8.1 + webpack: 5.104.1 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/plugin-css-cascade-layers@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + tslib: 2.8.1 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - react + - react-dom + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/plugin-debug@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + fs-extra: 11.3.3 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + react-json-view-lite: 2.5.0(react@18.3.1) + tslib: 2.8.1 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/plugin-google-analytics@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + tslib: 2.8.1 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/plugin-google-gtag@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@types/gtag.js': 0.0.12 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + tslib: 2.8.1 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/plugin-google-tag-manager@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + tslib: 2.8.1 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/plugin-sitemap@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/logger': 3.9.2 + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-common': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + fs-extra: 11.3.3 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + sitemap: 7.1.2 + tslib: 2.8.1 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/plugin-svgr@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@svgr/core': 8.1.0(typescript@5.9.3) + '@svgr/webpack': 8.1.0(typescript@5.9.3) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + tslib: 2.8.1 + webpack: 5.104.1 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/preset-classic@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(@types/react@18.3.27)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/plugin-content-blog': 3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/plugin-content-docs': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/plugin-content-pages': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/plugin-css-cascade-layers': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/plugin-debug': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/plugin-google-analytics': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/plugin-google-gtag': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/plugin-google-tag-manager': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/plugin-sitemap': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/plugin-svgr': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/theme-classic': 3.9.2(@types/react@18.3.27)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/theme-common': 3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/theme-search-algolia': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(@types/react@18.3.27)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + transitivePeerDependencies: + - '@docusaurus/faster' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - '@types/react' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - search-insights + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/react-loadable@6.0.0(react@18.3.1)': + dependencies: + '@types/react': 18.3.27 + react: 18.3.1 + + '@docusaurus/theme-classic@3.9.2(@types/react@18.3.27)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/logger': 3.9.2 + '@docusaurus/mdx-loader': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/module-type-aliases': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/plugin-content-blog': 3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/plugin-content-docs': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/plugin-content-pages': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/theme-common': 3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/theme-translations': 3.9.2 + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-common': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@mdx-js/react': 3.1.1(@types/react@18.3.27)(react@18.3.1) + clsx: 2.1.1 + infima: 0.2.0-alpha.45 + lodash: 4.17.23 + nprogress: 0.2.0 + postcss: 8.5.6 + prism-react-renderer: 2.4.1(react@18.3.1) + prismjs: 1.30.0 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + react-router-dom: 5.3.4(react@18.3.1) + rtlcss: 4.3.0 + tslib: 2.8.1 + utility-types: 3.11.0 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - '@types/react' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/theme-common@3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@docusaurus/mdx-loader': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/module-type-aliases': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/plugin-content-docs': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-common': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@types/history': 4.7.11 + '@types/react': 18.3.27 + '@types/react-router-config': 5.0.11 + clsx: 2.1.1 + parse-numeric-range: 1.3.0 + prism-react-renderer: 2.4.1(react@18.3.1) + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + tslib: 2.8.1 + utility-types: 3.11.0 + transitivePeerDependencies: + - '@swc/core' + - esbuild + - supports-color + - uglify-js + - webpack-cli + + '@docusaurus/theme-mermaid@3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/module-type-aliases': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/theme-common': 3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + mermaid: 11.12.2 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + tslib: 2.8.1 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@docusaurus/plugin-content-docs' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/theme-search-algolia@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(@types/react@18.3.27)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docsearch/react': 4.5.3(@types/react@18.3.27)(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/core': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/logger': 3.9.2 + '@docusaurus/plugin-content-docs': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/theme-common': 3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/theme-translations': 3.9.2 + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + algoliasearch: 5.47.0 + algoliasearch-helper: 3.27.0(algoliasearch@5.47.0) + clsx: 2.1.1 + eta: 2.2.0 + fs-extra: 11.3.3 + lodash: 4.17.23 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + tslib: 2.8.1 + utility-types: 3.11.0 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - '@types/react' + - bufferutil + - csso + - debug + - esbuild + - lightningcss + - search-insights + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@docusaurus/theme-translations@3.9.2': + dependencies: + fs-extra: 11.3.3 + tslib: 2.8.1 + + '@docusaurus/types@3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@mdx-js/mdx': 3.1.1 + '@types/history': 4.7.11 + '@types/mdast': 4.0.4 + '@types/react': 18.3.27 + commander: 5.1.0 + joi: 17.13.3 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + react-helmet-async: '@slorber/react-helmet-async@1.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)' + utility-types: 3.11.0 + webpack: 5.104.1 + webpack-merge: 5.10.0 + transitivePeerDependencies: + - '@swc/core' + - esbuild + - supports-color + - uglify-js + - webpack-cli + + '@docusaurus/utils-common@3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + tslib: 2.8.1 + transitivePeerDependencies: + - '@swc/core' + - esbuild + - react + - react-dom + - supports-color + - uglify-js + - webpack-cli + + '@docusaurus/utils-validation@3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@docusaurus/logger': 3.9.2 + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-common': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + fs-extra: 11.3.3 + joi: 17.13.3 + js-yaml: 4.1.1 + lodash: 4.17.23 + tslib: 2.8.1 + transitivePeerDependencies: + - '@swc/core' + - esbuild + - react + - react-dom + - supports-color + - uglify-js + - webpack-cli + + '@docusaurus/utils@3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@docusaurus/logger': 3.9.2 + '@docusaurus/types': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-common': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + escape-string-regexp: 4.0.0 + execa: 5.1.1 + file-loader: 6.2.0(webpack@5.104.1) + fs-extra: 11.3.3 + github-slugger: 1.5.0 + globby: 11.1.0 + gray-matter: 4.0.3 + jiti: 1.21.7 + js-yaml: 4.1.1 + lodash: 4.17.23 + micromatch: 4.0.8 + p-queue: 6.6.2 + prompts: 2.4.2 + resolve-pathname: 3.0.0 + tslib: 2.8.1 + url-loader: 4.1.1(file-loader@6.2.0(webpack@5.104.1))(webpack@5.104.1) + utility-types: 3.11.0 + webpack: 5.104.1 + transitivePeerDependencies: + - '@swc/core' + - esbuild + - react + - react-dom + - supports-color + - uglify-js + - webpack-cli + + '@easyops-cn/autocomplete.js@0.38.1': + dependencies: + cssesc: 3.0.0 + immediate: 3.3.0 + + '@easyops-cn/docusaurus-search-local@0.52.3(@docusaurus/theme-common@3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1))(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3)': + dependencies: + '@docusaurus/plugin-content-docs': 3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(debug@4.4.3)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3) + '@docusaurus/theme-common': 3.9.2(@docusaurus/plugin-content-docs@3.9.2(@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1))(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(typescript@5.9.3))(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/theme-translations': 3.9.2 + '@docusaurus/utils': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-common': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@docusaurus/utils-validation': 3.9.2(react-dom@18.3.1(react@18.3.1))(react@18.3.1) + '@easyops-cn/autocomplete.js': 0.38.1 + '@node-rs/jieba': 1.10.4 + cheerio: 1.2.0 + clsx: 2.1.1 + comlink: 4.4.2 + debug: 4.4.3 + fs-extra: 10.1.0 + klaw-sync: 6.0.0 + lunr: 2.3.9 + lunr-languages: 1.14.0 + mark.js: 8.11.1 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + tslib: 2.8.1 + transitivePeerDependencies: + - '@docusaurus/faster' + - '@mdx-js/react' + - '@parcel/css' + - '@rspack/core' + - '@swc/core' + - '@swc/css' + - bufferutil + - csso + - esbuild + - lightningcss + - supports-color + - typescript + - uglify-js + - utf-8-validate + - webpack-cli + + '@emnapi/core@1.8.1': + dependencies: + '@emnapi/wasi-threads': 1.1.0 + tslib: 2.8.1 + optional: true + + '@emnapi/runtime@1.8.1': + dependencies: + tslib: 2.8.1 + optional: true + + '@emnapi/wasi-threads@1.1.0': + dependencies: + tslib: 2.8.1 + optional: true + + '@esbuild/aix-ppc64@0.21.5': + optional: true + + '@esbuild/aix-ppc64@0.27.2': + optional: true + + '@esbuild/android-arm64@0.21.5': + optional: true + + '@esbuild/android-arm64@0.27.2': + optional: true + + '@esbuild/android-arm@0.21.5': + optional: true + + '@esbuild/android-arm@0.27.2': + optional: true + + '@esbuild/android-x64@0.21.5': + optional: true + + '@esbuild/android-x64@0.27.2': + optional: true + + '@esbuild/darwin-arm64@0.21.5': + optional: true + + '@esbuild/darwin-arm64@0.27.2': + optional: true + + '@esbuild/darwin-x64@0.21.5': + optional: true + + '@esbuild/darwin-x64@0.27.2': + optional: true + + '@esbuild/freebsd-arm64@0.21.5': + optional: true + + '@esbuild/freebsd-arm64@0.27.2': + optional: true + + '@esbuild/freebsd-x64@0.21.5': + optional: true + + '@esbuild/freebsd-x64@0.27.2': + optional: true + + '@esbuild/linux-arm64@0.21.5': + optional: true + + '@esbuild/linux-arm64@0.27.2': + optional: true + + '@esbuild/linux-arm@0.21.5': + optional: true + + '@esbuild/linux-arm@0.27.2': + optional: true + + '@esbuild/linux-ia32@0.21.5': + optional: true + + '@esbuild/linux-ia32@0.27.2': + optional: true + + '@esbuild/linux-loong64@0.21.5': + optional: true + + '@esbuild/linux-loong64@0.27.2': + optional: true + + '@esbuild/linux-mips64el@0.21.5': + optional: true + + '@esbuild/linux-mips64el@0.27.2': + optional: true + + '@esbuild/linux-ppc64@0.21.5': + optional: true + + '@esbuild/linux-ppc64@0.27.2': + optional: true + + '@esbuild/linux-riscv64@0.21.5': + optional: true + + '@esbuild/linux-riscv64@0.27.2': + optional: true + + '@esbuild/linux-s390x@0.21.5': + optional: true + + '@esbuild/linux-s390x@0.27.2': + optional: true + + '@esbuild/linux-x64@0.21.5': + optional: true + + '@esbuild/linux-x64@0.27.2': + optional: true + + '@esbuild/netbsd-arm64@0.27.2': + optional: true + + '@esbuild/netbsd-x64@0.21.5': + optional: true + + '@esbuild/netbsd-x64@0.27.2': + optional: true + + '@esbuild/openbsd-arm64@0.27.2': + optional: true + + '@esbuild/openbsd-x64@0.21.5': + optional: true + + '@esbuild/openbsd-x64@0.27.2': + optional: true + + '@esbuild/openharmony-arm64@0.27.2': + optional: true + + '@esbuild/sunos-x64@0.21.5': + optional: true + + '@esbuild/sunos-x64@0.27.2': + optional: true + + '@esbuild/win32-arm64@0.21.5': + optional: true + + '@esbuild/win32-arm64@0.27.2': + optional: true + + '@esbuild/win32-ia32@0.21.5': + optional: true + + '@esbuild/win32-ia32@0.27.2': + optional: true + + '@esbuild/win32-x64@0.21.5': + optional: true + + '@esbuild/win32-x64@0.27.2': + optional: true + + '@eslint-community/eslint-utils@4.9.1(eslint@9.39.2(jiti@1.21.7))': + dependencies: + eslint: 9.39.2(jiti@1.21.7) + eslint-visitor-keys: 3.4.3 + + '@eslint-community/regexpp@4.12.2': {} + + '@eslint/config-array@0.21.1': + dependencies: + '@eslint/object-schema': 2.1.7 + debug: 4.4.3 + minimatch: 3.1.2 + transitivePeerDependencies: + - supports-color + + '@eslint/config-helpers@0.4.2': + dependencies: + '@eslint/core': 0.17.0 + + '@eslint/core@0.17.0': + dependencies: + '@types/json-schema': 7.0.15 + + '@eslint/eslintrc@3.3.3': + dependencies: + ajv: 6.12.6 + debug: 4.4.3 + espree: 10.4.0 + globals: 14.0.0 + ignore: 5.3.2 + import-fresh: 3.3.1 + js-yaml: 4.1.1 + minimatch: 3.1.2 + strip-json-comments: 3.1.1 + transitivePeerDependencies: + - supports-color + + '@eslint/js@9.39.2': {} + + '@eslint/object-schema@2.1.7': {} + + '@eslint/plugin-kit@0.4.1': + dependencies: + '@eslint/core': 0.17.0 + levn: 0.4.1 + + '@fastify/busboy@2.1.1': {} + + '@hapi/hoek@9.3.0': {} + + '@hapi/topo@5.1.0': + dependencies: + '@hapi/hoek': 9.3.0 + + '@humanfs/core@0.19.1': {} + + '@humanfs/node@0.16.7': + dependencies: + '@humanfs/core': 0.19.1 + '@humanwhocodes/retry': 0.4.3 + + '@humanwhocodes/module-importer@1.0.1': {} + + '@humanwhocodes/retry@0.4.3': {} + + '@iconify/types@2.0.0': {} + + '@iconify/utils@3.1.0': + dependencies: + '@antfu/install-pkg': 1.1.0 + '@iconify/types': 2.0.0 + mlly: 1.8.0 + + '@inquirer/external-editor@1.0.3(@types/node@20.19.30)': + dependencies: + chardet: 2.1.1 + iconv-lite: 0.7.2 + optionalDependencies: + '@types/node': 20.19.30 + + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.2 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + + '@istanbuljs/schema@0.1.3': {} + + '@jest/schemas@29.6.3': + dependencies: + '@sinclair/typebox': 0.27.8 + + '@jest/types@29.6.3': + dependencies: + '@jest/schemas': 29.6.3 + '@types/istanbul-lib-coverage': 2.0.6 + '@types/istanbul-reports': 3.0.4 + '@types/node': 20.19.30 + '@types/yargs': 17.0.35 + chalk: 4.1.2 + + '@jridgewell/gen-mapping@0.3.13': + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/remapping@2.3.5': + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/resolve-uri@3.1.2': {} + + '@jridgewell/source-map@0.3.11': + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + '@jridgewell/trace-mapping': 0.3.31 + + '@jridgewell/sourcemap-codec@1.5.5': {} + + '@jridgewell/trace-mapping@0.3.31': + dependencies: + '@jridgewell/resolve-uri': 3.1.2 + '@jridgewell/sourcemap-codec': 1.5.5 + + '@jsonjoy.com/base64@1.1.2(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/base64@17.65.0(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/buffers@1.2.1(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/buffers@17.65.0(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/codegen@1.0.0(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/codegen@17.65.0(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/fs-core@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + thingies: 2.5.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/fs-fsa@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/fs-core': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + thingies: 2.5.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/fs-node-builtins@4.56.10(tslib@2.8.1)': + dependencies: + tslib: 2.8.1 + + '@jsonjoy.com/fs-node-to-fsa@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/fs-fsa': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/fs-node-utils@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/fs-node@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/fs-core': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-print': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-snapshot': 4.56.10(tslib@2.8.1) + glob-to-regex.js: 1.2.0(tslib@2.8.1) + thingies: 2.5.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/fs-print@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + tree-dump: 1.1.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/fs-snapshot@4.56.10(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/buffers': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/json-pack': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/util': 17.65.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/json-pack@1.21.0(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/base64': 1.1.2(tslib@2.8.1) + '@jsonjoy.com/buffers': 1.2.1(tslib@2.8.1) + '@jsonjoy.com/codegen': 1.0.0(tslib@2.8.1) + '@jsonjoy.com/json-pointer': 1.0.2(tslib@2.8.1) + '@jsonjoy.com/util': 1.9.0(tslib@2.8.1) + hyperdyperid: 1.2.0 + thingies: 2.5.0(tslib@2.8.1) + tree-dump: 1.1.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/json-pack@17.65.0(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/base64': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/buffers': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/codegen': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/json-pointer': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/util': 17.65.0(tslib@2.8.1) + hyperdyperid: 1.2.0 + thingies: 2.5.0(tslib@2.8.1) + tree-dump: 1.1.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/json-pointer@1.0.2(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/codegen': 1.0.0(tslib@2.8.1) + '@jsonjoy.com/util': 1.9.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/json-pointer@17.65.0(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/util': 17.65.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/util@1.9.0(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/buffers': 1.2.1(tslib@2.8.1) + '@jsonjoy.com/codegen': 1.0.0(tslib@2.8.1) + tslib: 2.8.1 + + '@jsonjoy.com/util@17.65.0(tslib@2.8.1)': + dependencies: + '@jsonjoy.com/buffers': 17.65.0(tslib@2.8.1) + '@jsonjoy.com/codegen': 17.65.0(tslib@2.8.1) + tslib: 2.8.1 + + '@leichtgewicht/ip-codec@2.0.5': {} + + '@manypkg/find-root@1.1.0': + dependencies: + '@babel/runtime': 7.28.6 + '@types/node': 12.20.55 + find-up: 4.1.0 + fs-extra: 8.1.0 + + '@manypkg/get-packages@1.1.3': + dependencies: + '@babel/runtime': 7.28.6 + '@changesets/types': 4.1.0 + '@manypkg/find-root': 1.1.0 + fs-extra: 8.1.0 + globby: 11.1.0 + read-yaml-file: 1.1.0 + + '@mdx-js/mdx@3.1.1': + dependencies: + '@types/estree': 1.0.8 + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdx': 2.0.13 + acorn: 8.15.0 + collapse-white-space: 2.1.0 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + estree-util-scope: 1.0.0 + estree-walker: 3.0.3 + hast-util-to-jsx-runtime: 2.3.6 + markdown-extensions: 2.0.0 + recma-build-jsx: 1.0.0 + recma-jsx: 1.0.1(acorn@8.15.0) + recma-stringify: 1.0.0 + rehype-recma: 1.0.0 + remark-mdx: 3.1.1 + remark-parse: 11.0.0 + remark-rehype: 11.1.2 + source-map: 0.7.6 + unified: 11.0.5 + unist-util-position-from-estree: 2.0.0 + unist-util-stringify-position: 4.0.0 + unist-util-visit: 5.1.0 + vfile: 6.0.3 + transitivePeerDependencies: + - supports-color + + '@mdx-js/react@3.1.1(@types/react@18.3.27)(react@18.3.1)': + dependencies: + '@types/mdx': 2.0.13 + '@types/react': 18.3.27 + react: 18.3.1 + + '@mermaid-js/parser@0.6.3': + dependencies: + langium: 3.3.1 + + '@napi-rs/wasm-runtime@0.2.12': + dependencies: + '@emnapi/core': 1.8.1 + '@emnapi/runtime': 1.8.1 + '@tybys/wasm-util': 0.10.1 + optional: true + + '@noble/hashes@1.4.0': {} + + '@node-rs/jieba-android-arm-eabi@1.10.4': + optional: true + + '@node-rs/jieba-android-arm64@1.10.4': + optional: true + + '@node-rs/jieba-darwin-arm64@1.10.4': + optional: true + + '@node-rs/jieba-darwin-x64@1.10.4': + optional: true + + '@node-rs/jieba-freebsd-x64@1.10.4': + optional: true + + '@node-rs/jieba-linux-arm-gnueabihf@1.10.4': + optional: true + + '@node-rs/jieba-linux-arm64-gnu@1.10.4': + optional: true + + '@node-rs/jieba-linux-arm64-musl@1.10.4': + optional: true + + '@node-rs/jieba-linux-x64-gnu@1.10.4': + optional: true + + '@node-rs/jieba-linux-x64-musl@1.10.4': + optional: true + + '@node-rs/jieba-wasm32-wasi@1.10.4': + dependencies: + '@napi-rs/wasm-runtime': 0.2.12 + optional: true + + '@node-rs/jieba-win32-arm64-msvc@1.10.4': + optional: true + + '@node-rs/jieba-win32-ia32-msvc@1.10.4': + optional: true + + '@node-rs/jieba-win32-x64-msvc@1.10.4': + optional: true + + '@node-rs/jieba@1.10.4': + optionalDependencies: + '@node-rs/jieba-android-arm-eabi': 1.10.4 + '@node-rs/jieba-android-arm64': 1.10.4 + '@node-rs/jieba-darwin-arm64': 1.10.4 + '@node-rs/jieba-darwin-x64': 1.10.4 + '@node-rs/jieba-freebsd-x64': 1.10.4 + '@node-rs/jieba-linux-arm-gnueabihf': 1.10.4 + '@node-rs/jieba-linux-arm64-gnu': 1.10.4 + '@node-rs/jieba-linux-arm64-musl': 1.10.4 + '@node-rs/jieba-linux-x64-gnu': 1.10.4 + '@node-rs/jieba-linux-x64-musl': 1.10.4 + '@node-rs/jieba-wasm32-wasi': 1.10.4 + '@node-rs/jieba-win32-arm64-msvc': 1.10.4 + '@node-rs/jieba-win32-ia32-msvc': 1.10.4 + '@node-rs/jieba-win32-x64-msvc': 1.10.4 + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.20.1 + + '@peculiar/asn1-cms@2.6.0': + dependencies: + '@peculiar/asn1-schema': 2.6.0 + '@peculiar/asn1-x509': 2.6.0 + '@peculiar/asn1-x509-attr': 2.6.0 + asn1js: 3.0.7 + tslib: 2.8.1 + + '@peculiar/asn1-csr@2.6.0': + dependencies: + '@peculiar/asn1-schema': 2.6.0 + '@peculiar/asn1-x509': 2.6.0 + asn1js: 3.0.7 + tslib: 2.8.1 + + '@peculiar/asn1-ecc@2.6.0': + dependencies: + '@peculiar/asn1-schema': 2.6.0 + '@peculiar/asn1-x509': 2.6.0 + asn1js: 3.0.7 + tslib: 2.8.1 + + '@peculiar/asn1-pfx@2.6.0': + dependencies: + '@peculiar/asn1-cms': 2.6.0 + '@peculiar/asn1-pkcs8': 2.6.0 + '@peculiar/asn1-rsa': 2.6.0 + '@peculiar/asn1-schema': 2.6.0 + asn1js: 3.0.7 + tslib: 2.8.1 + + '@peculiar/asn1-pkcs8@2.6.0': + dependencies: + '@peculiar/asn1-schema': 2.6.0 + '@peculiar/asn1-x509': 2.6.0 + asn1js: 3.0.7 + tslib: 2.8.1 + + '@peculiar/asn1-pkcs9@2.6.0': + dependencies: + '@peculiar/asn1-cms': 2.6.0 + '@peculiar/asn1-pfx': 2.6.0 + '@peculiar/asn1-pkcs8': 2.6.0 + '@peculiar/asn1-schema': 2.6.0 + '@peculiar/asn1-x509': 2.6.0 + '@peculiar/asn1-x509-attr': 2.6.0 + asn1js: 3.0.7 + tslib: 2.8.1 + + '@peculiar/asn1-rsa@2.6.0': + dependencies: + '@peculiar/asn1-schema': 2.6.0 + '@peculiar/asn1-x509': 2.6.0 + asn1js: 3.0.7 + tslib: 2.8.1 + + '@peculiar/asn1-schema@2.6.0': + dependencies: + asn1js: 3.0.7 + pvtsutils: 1.3.6 + tslib: 2.8.1 + + '@peculiar/asn1-x509-attr@2.6.0': + dependencies: + '@peculiar/asn1-schema': 2.6.0 + '@peculiar/asn1-x509': 2.6.0 + asn1js: 3.0.7 + tslib: 2.8.1 + + '@peculiar/asn1-x509@2.6.0': + dependencies: + '@peculiar/asn1-schema': 2.6.0 + asn1js: 3.0.7 + pvtsutils: 1.3.6 + tslib: 2.8.1 + + '@peculiar/x509@1.14.3': + dependencies: + '@peculiar/asn1-cms': 2.6.0 + '@peculiar/asn1-csr': 2.6.0 + '@peculiar/asn1-ecc': 2.6.0 + '@peculiar/asn1-pkcs9': 2.6.0 + '@peculiar/asn1-rsa': 2.6.0 + '@peculiar/asn1-schema': 2.6.0 + '@peculiar/asn1-x509': 2.6.0 + pvtsutils: 1.3.6 + reflect-metadata: 0.2.2 + tslib: 2.8.1 + tsyringe: 4.10.0 + + '@pkgjs/parseargs@0.11.0': + optional: true + + '@pnpm/config.env-replace@1.1.0': {} + + '@pnpm/network.ca-file@1.0.2': + dependencies: + graceful-fs: 4.2.10 + + '@pnpm/npm-conf@3.0.2': + dependencies: + '@pnpm/config.env-replace': 1.1.0 + '@pnpm/network.ca-file': 1.0.2 + config-chain: 1.1.13 + + '@polka/url@1.0.0-next.29': {} + + '@rollup/rollup-android-arm-eabi@4.57.1': + optional: true + + '@rollup/rollup-android-arm64@4.57.1': + optional: true + + '@rollup/rollup-darwin-arm64@4.57.1': + optional: true + + '@rollup/rollup-darwin-x64@4.57.1': + optional: true + + '@rollup/rollup-freebsd-arm64@4.57.1': + optional: true + + '@rollup/rollup-freebsd-x64@4.57.1': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.57.1': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.57.1': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.57.1': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.57.1': + optional: true + + '@rollup/rollup-linux-loong64-gnu@4.57.1': + optional: true + + '@rollup/rollup-linux-loong64-musl@4.57.1': + optional: true + + '@rollup/rollup-linux-ppc64-gnu@4.57.1': + optional: true + + '@rollup/rollup-linux-ppc64-musl@4.57.1': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.57.1': + optional: true + + '@rollup/rollup-linux-riscv64-musl@4.57.1': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.57.1': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.57.1': + optional: true + + '@rollup/rollup-linux-x64-musl@4.57.1': + optional: true + + '@rollup/rollup-openbsd-x64@4.57.1': + optional: true + + '@rollup/rollup-openharmony-arm64@4.57.1': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.57.1': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.57.1': + optional: true + + '@rollup/rollup-win32-x64-gnu@4.57.1': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.57.1': + optional: true + + '@sideway/address@4.1.5': + dependencies: + '@hapi/hoek': 9.3.0 + + '@sideway/formula@3.0.1': {} + + '@sideway/pinpoint@2.0.0': {} + + '@sinclair/typebox@0.27.8': {} + + '@sindresorhus/is@4.6.0': {} + + '@sindresorhus/is@5.6.0': {} + + '@slorber/react-helmet-async@1.3.0(react-dom@18.3.1(react@18.3.1))(react@18.3.1)': + dependencies: + '@babel/runtime': 7.28.6 + invariant: 2.2.4 + prop-types: 15.8.1 + react: 18.3.1 + react-dom: 18.3.1(react@18.3.1) + react-fast-compare: 3.2.2 + shallowequal: 1.1.0 + + '@slorber/remark-comment@1.0.0': + dependencies: + micromark-factory-space: 1.1.0 + micromark-util-character: 1.2.0 + micromark-util-symbol: 1.1.0 + + '@svgr/babel-plugin-add-jsx-attribute@8.0.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + + '@svgr/babel-plugin-remove-jsx-attribute@8.0.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + + '@svgr/babel-plugin-remove-jsx-empty-expression@8.0.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + + '@svgr/babel-plugin-replace-jsx-attribute-value@8.0.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + + '@svgr/babel-plugin-svg-dynamic-title@8.0.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + + '@svgr/babel-plugin-svg-em-dimensions@8.0.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + + '@svgr/babel-plugin-transform-react-native-svg@8.1.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + + '@svgr/babel-plugin-transform-svg-component@8.0.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + + '@svgr/babel-preset@8.1.0(@babel/core@7.29.0)': + dependencies: + '@babel/core': 7.29.0 + '@svgr/babel-plugin-add-jsx-attribute': 8.0.0(@babel/core@7.29.0) + '@svgr/babel-plugin-remove-jsx-attribute': 8.0.0(@babel/core@7.29.0) + '@svgr/babel-plugin-remove-jsx-empty-expression': 8.0.0(@babel/core@7.29.0) + '@svgr/babel-plugin-replace-jsx-attribute-value': 8.0.0(@babel/core@7.29.0) + '@svgr/babel-plugin-svg-dynamic-title': 8.0.0(@babel/core@7.29.0) + '@svgr/babel-plugin-svg-em-dimensions': 8.0.0(@babel/core@7.29.0) + '@svgr/babel-plugin-transform-react-native-svg': 8.1.0(@babel/core@7.29.0) + '@svgr/babel-plugin-transform-svg-component': 8.0.0(@babel/core@7.29.0) + + '@svgr/core@8.1.0(typescript@5.9.3)': + dependencies: + '@babel/core': 7.29.0 + '@svgr/babel-preset': 8.1.0(@babel/core@7.29.0) + camelcase: 6.3.0 + cosmiconfig: 8.3.6(typescript@5.9.3) + snake-case: 3.0.4 + transitivePeerDependencies: + - supports-color + - typescript + + '@svgr/hast-util-to-babel-ast@8.0.0': + dependencies: + '@babel/types': 7.29.0 + entities: 4.5.0 + + '@svgr/plugin-jsx@8.1.0(@svgr/core@8.1.0(typescript@5.9.3))': + dependencies: + '@babel/core': 7.29.0 + '@svgr/babel-preset': 8.1.0(@babel/core@7.29.0) + '@svgr/core': 8.1.0(typescript@5.9.3) + '@svgr/hast-util-to-babel-ast': 8.0.0 + svg-parser: 2.0.4 + transitivePeerDependencies: + - supports-color + + '@svgr/plugin-svgo@8.1.0(@svgr/core@8.1.0(typescript@5.9.3))(typescript@5.9.3)': + dependencies: + '@svgr/core': 8.1.0(typescript@5.9.3) + cosmiconfig: 8.3.6(typescript@5.9.3) + deepmerge: 4.3.1 + svgo: 3.3.2 + transitivePeerDependencies: + - typescript + + '@svgr/webpack@8.1.0(typescript@5.9.3)': + dependencies: + '@babel/core': 7.29.0 + '@babel/plugin-transform-react-constant-elements': 7.27.1(@babel/core@7.29.0) + '@babel/preset-env': 7.29.0(@babel/core@7.29.0) + '@babel/preset-react': 7.28.5(@babel/core@7.29.0) + '@babel/preset-typescript': 7.28.5(@babel/core@7.29.0) + '@svgr/core': 8.1.0(typescript@5.9.3) + '@svgr/plugin-jsx': 8.1.0(@svgr/core@8.1.0(typescript@5.9.3)) + '@svgr/plugin-svgo': 8.1.0(@svgr/core@8.1.0(typescript@5.9.3))(typescript@5.9.3) + transitivePeerDependencies: + - supports-color + - typescript + + '@szmarczak/http-timer@5.0.1': + dependencies: + defer-to-connect: 2.0.1 + + '@trysound/sax@0.2.0': {} + + '@tybys/wasm-util@0.10.1': + dependencies: + tslib: 2.8.1 + optional: true + + '@types/body-parser@1.19.6': + dependencies: + '@types/connect': 3.4.38 + '@types/node': 20.19.30 + + '@types/bonjour@3.5.13': + dependencies: + '@types/node': 20.19.30 + + '@types/connect-history-api-fallback@1.5.4': + dependencies: + '@types/express-serve-static-core': 4.19.8 + '@types/node': 20.19.30 + + '@types/connect@3.4.38': + dependencies: + '@types/node': 20.19.30 + + '@types/d3-array@3.2.2': {} + + '@types/d3-axis@3.0.6': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-brush@3.0.6': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-chord@3.0.6': {} + + '@types/d3-color@3.1.3': {} + + '@types/d3-contour@3.0.6': + dependencies: + '@types/d3-array': 3.2.2 + '@types/geojson': 7946.0.16 + + '@types/d3-delaunay@6.0.4': {} + + '@types/d3-dispatch@3.0.7': {} + + '@types/d3-drag@3.0.7': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-dsv@3.0.7': {} + + '@types/d3-ease@3.0.2': {} + + '@types/d3-fetch@3.0.7': + dependencies: + '@types/d3-dsv': 3.0.7 + + '@types/d3-force@3.0.10': {} + + '@types/d3-format@3.0.4': {} + + '@types/d3-geo@3.1.0': + dependencies: + '@types/geojson': 7946.0.16 + + '@types/d3-hierarchy@3.1.7': {} + + '@types/d3-interpolate@3.0.4': + dependencies: + '@types/d3-color': 3.1.3 + + '@types/d3-path@3.1.1': {} + + '@types/d3-polygon@3.0.2': {} + + '@types/d3-quadtree@3.0.6': {} + + '@types/d3-random@3.0.3': {} + + '@types/d3-scale-chromatic@3.1.0': {} + + '@types/d3-scale@4.0.9': + dependencies: + '@types/d3-time': 3.0.4 + + '@types/d3-selection@3.0.11': {} + + '@types/d3-shape@3.1.8': + dependencies: + '@types/d3-path': 3.1.1 + + '@types/d3-time-format@4.0.3': {} + + '@types/d3-time@3.0.4': {} + + '@types/d3-timer@3.0.2': {} + + '@types/d3-transition@3.0.9': + dependencies: + '@types/d3-selection': 3.0.11 + + '@types/d3-zoom@3.0.8': + dependencies: + '@types/d3-interpolate': 3.0.4 + '@types/d3-selection': 3.0.11 + + '@types/d3@7.4.3': + dependencies: + '@types/d3-array': 3.2.2 + '@types/d3-axis': 3.0.6 + '@types/d3-brush': 3.0.6 + '@types/d3-chord': 3.0.6 + '@types/d3-color': 3.1.3 + '@types/d3-contour': 3.0.6 + '@types/d3-delaunay': 6.0.4 + '@types/d3-dispatch': 3.0.7 + '@types/d3-drag': 3.0.7 + '@types/d3-dsv': 3.0.7 + '@types/d3-ease': 3.0.2 + '@types/d3-fetch': 3.0.7 + '@types/d3-force': 3.0.10 + '@types/d3-format': 3.0.4 + '@types/d3-geo': 3.1.0 + '@types/d3-hierarchy': 3.1.7 + '@types/d3-interpolate': 3.0.4 + '@types/d3-path': 3.1.1 + '@types/d3-polygon': 3.0.2 + '@types/d3-quadtree': 3.0.6 + '@types/d3-random': 3.0.3 + '@types/d3-scale': 4.0.9 + '@types/d3-scale-chromatic': 3.1.0 + '@types/d3-selection': 3.0.11 + '@types/d3-shape': 3.1.8 + '@types/d3-time': 3.0.4 + '@types/d3-time-format': 4.0.3 + '@types/d3-timer': 3.0.2 + '@types/d3-transition': 3.0.9 + '@types/d3-zoom': 3.0.8 + + '@types/debug@4.1.12': + dependencies: + '@types/ms': 2.1.0 + + '@types/eslint-scope@3.7.7': + dependencies: + '@types/eslint': 9.6.1 + '@types/estree': 1.0.8 + + '@types/eslint@9.6.1': + dependencies: + '@types/estree': 1.0.8 + '@types/json-schema': 7.0.15 + + '@types/estree-jsx@1.0.5': + dependencies: + '@types/estree': 1.0.8 + + '@types/estree@1.0.8': {} + + '@types/express-serve-static-core@4.19.8': + dependencies: + '@types/node': 20.19.30 + '@types/qs': 6.14.0 + '@types/range-parser': 1.2.7 + '@types/send': 1.2.1 + + '@types/express@4.17.25': + dependencies: + '@types/body-parser': 1.19.6 + '@types/express-serve-static-core': 4.19.8 + '@types/qs': 6.14.0 + '@types/serve-static': 1.15.10 + + '@types/geojson@7946.0.16': {} + + '@types/gtag.js@0.0.12': {} + + '@types/hast@3.0.4': + dependencies: + '@types/unist': 3.0.3 + + '@types/history@4.7.11': {} + + '@types/html-minifier-terser@6.1.0': {} + + '@types/http-cache-semantics@4.2.0': {} + + '@types/http-errors@2.0.5': {} + + '@types/http-proxy@1.17.17': + dependencies: + '@types/node': 20.19.30 + + '@types/istanbul-lib-coverage@2.0.6': {} + + '@types/istanbul-lib-report@3.0.3': + dependencies: + '@types/istanbul-lib-coverage': 2.0.6 + + '@types/istanbul-reports@3.0.4': + dependencies: + '@types/istanbul-lib-report': 3.0.3 + + '@types/json-schema@7.0.15': {} + + '@types/mdast@4.0.4': + dependencies: + '@types/unist': 3.0.3 + + '@types/mdx@2.0.13': {} + + '@types/mime@1.3.5': {} + + '@types/ms@2.1.0': {} + + '@types/node@12.20.55': {} + + '@types/node@17.0.45': {} + + '@types/node@20.19.30': + dependencies: + undici-types: 6.21.0 + + '@types/prismjs@1.26.5': {} + + '@types/prop-types@15.7.15': {} + + '@types/qs@6.14.0': {} + + '@types/range-parser@1.2.7': {} + + '@types/react-dom@18.3.7(@types/react@18.3.27)': + dependencies: + '@types/react': 18.3.27 + + '@types/react-router-config@5.0.11': + dependencies: + '@types/history': 4.7.11 + '@types/react': 18.3.27 + '@types/react-router': 5.1.20 + + '@types/react-router-dom@5.3.3': + dependencies: + '@types/history': 4.7.11 + '@types/react': 18.3.27 + '@types/react-router': 5.1.20 + + '@types/react-router@5.1.20': + dependencies: + '@types/history': 4.7.11 + '@types/react': 18.3.27 + + '@types/react@18.3.27': + dependencies: + '@types/prop-types': 15.7.15 + csstype: 3.2.3 + + '@types/retry@0.12.2': {} + + '@types/sax@1.2.7': + dependencies: + '@types/node': 20.19.30 + + '@types/send@0.17.6': + dependencies: + '@types/mime': 1.3.5 + '@types/node': 20.19.30 + + '@types/send@1.2.1': + dependencies: + '@types/node': 20.19.30 + + '@types/serve-index@1.9.4': + dependencies: + '@types/express': 4.17.25 + + '@types/serve-static@1.15.10': + dependencies: + '@types/http-errors': 2.0.5 + '@types/node': 20.19.30 + '@types/send': 0.17.6 + + '@types/sockjs@0.3.36': + dependencies: + '@types/node': 20.19.30 + + '@types/trusted-types@2.0.7': + optional: true + + '@types/unist@2.0.11': {} + + '@types/unist@3.0.3': {} + + '@types/ws@8.18.1': + dependencies: + '@types/node': 20.19.30 + + '@types/yargs-parser@21.0.3': {} + + '@types/yargs@17.0.35': + dependencies: + '@types/yargs-parser': 21.0.3 + + '@typescript-eslint/eslint-plugin@8.54.0(@typescript-eslint/parser@8.54.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3))(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + dependencies: + '@eslint-community/regexpp': 4.12.2 + '@typescript-eslint/parser': 8.54.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/scope-manager': 8.54.0 + '@typescript-eslint/type-utils': 8.54.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/utils': 8.54.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.54.0 + eslint: 9.39.2(jiti@1.21.7) + ignore: 7.0.5 + natural-compare: 1.4.0 + ts-api-utils: 2.4.0(typescript@5.9.3) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/parser@8.54.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + dependencies: + '@typescript-eslint/scope-manager': 8.54.0 + '@typescript-eslint/types': 8.54.0 + '@typescript-eslint/typescript-estree': 8.54.0(typescript@5.9.3) + '@typescript-eslint/visitor-keys': 8.54.0 + debug: 4.4.3 + eslint: 9.39.2(jiti@1.21.7) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/project-service@8.54.0(typescript@5.9.3)': + dependencies: + '@typescript-eslint/tsconfig-utils': 8.54.0(typescript@5.9.3) + '@typescript-eslint/types': 8.54.0 + debug: 4.4.3 + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/scope-manager@8.54.0': + dependencies: + '@typescript-eslint/types': 8.54.0 + '@typescript-eslint/visitor-keys': 8.54.0 + + '@typescript-eslint/tsconfig-utils@8.54.0(typescript@5.9.3)': + dependencies: + typescript: 5.9.3 + + '@typescript-eslint/type-utils@8.54.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + dependencies: + '@typescript-eslint/types': 8.54.0 + '@typescript-eslint/typescript-estree': 8.54.0(typescript@5.9.3) + '@typescript-eslint/utils': 8.54.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3) + debug: 4.4.3 + eslint: 9.39.2(jiti@1.21.7) + ts-api-utils: 2.4.0(typescript@5.9.3) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/types@8.54.0': {} + + '@typescript-eslint/typescript-estree@8.54.0(typescript@5.9.3)': + dependencies: + '@typescript-eslint/project-service': 8.54.0(typescript@5.9.3) + '@typescript-eslint/tsconfig-utils': 8.54.0(typescript@5.9.3) + '@typescript-eslint/types': 8.54.0 + '@typescript-eslint/visitor-keys': 8.54.0 + debug: 4.4.3 + minimatch: 9.0.5 + semver: 7.7.3 + tinyglobby: 0.2.15 + ts-api-utils: 2.4.0(typescript@5.9.3) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/utils@8.54.0(eslint@9.39.2(jiti@1.21.7))(typescript@5.9.3)': + dependencies: + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@1.21.7)) + '@typescript-eslint/scope-manager': 8.54.0 + '@typescript-eslint/types': 8.54.0 + '@typescript-eslint/typescript-estree': 8.54.0(typescript@5.9.3) + eslint: 9.39.2(jiti@1.21.7) + typescript: 5.9.3 + transitivePeerDependencies: + - supports-color + + '@typescript-eslint/visitor-keys@8.54.0': + dependencies: + '@typescript-eslint/types': 8.54.0 + eslint-visitor-keys: 4.2.1 + + '@ungap/structured-clone@1.3.0': {} + + '@vitest/coverage-v8@2.1.9(vitest@2.1.9(@types/node@20.19.30)(terser@5.46.0))': + dependencies: + '@ampproject/remapping': 2.3.0 + '@bcoe/v8-coverage': 0.2.3 + debug: 4.4.3 + istanbul-lib-coverage: 3.2.2 + istanbul-lib-report: 3.0.1 + istanbul-lib-source-maps: 5.0.6 + istanbul-reports: 3.2.0 + magic-string: 0.30.21 + magicast: 0.3.5 + std-env: 3.10.0 + test-exclude: 7.0.1 + tinyrainbow: 1.2.0 + vitest: 2.1.9(@types/node@20.19.30)(terser@5.46.0) + transitivePeerDependencies: + - supports-color + + '@vitest/expect@2.1.9': + dependencies: + '@vitest/spy': 2.1.9 + '@vitest/utils': 2.1.9 + chai: 5.3.3 + tinyrainbow: 1.2.0 + + '@vitest/mocker@2.1.9(vite@5.4.21(@types/node@20.19.30)(terser@5.46.0))': + dependencies: + '@vitest/spy': 2.1.9 + estree-walker: 3.0.3 + magic-string: 0.30.21 + optionalDependencies: + vite: 5.4.21(@types/node@20.19.30)(terser@5.46.0) + + '@vitest/pretty-format@2.1.9': + dependencies: + tinyrainbow: 1.2.0 + + '@vitest/runner@2.1.9': + dependencies: + '@vitest/utils': 2.1.9 + pathe: 1.1.2 + + '@vitest/snapshot@2.1.9': + dependencies: + '@vitest/pretty-format': 2.1.9 + magic-string: 0.30.21 + pathe: 1.1.2 + + '@vitest/spy@2.1.9': + dependencies: + tinyspy: 3.0.2 + + '@vitest/utils@2.1.9': + dependencies: + '@vitest/pretty-format': 2.1.9 + loupe: 3.2.1 + tinyrainbow: 1.2.0 + + '@webassemblyjs/ast@1.14.1': + dependencies: + '@webassemblyjs/helper-numbers': 1.13.2 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + + '@webassemblyjs/floating-point-hex-parser@1.13.2': {} + + '@webassemblyjs/helper-api-error@1.13.2': {} + + '@webassemblyjs/helper-buffer@1.14.1': {} + + '@webassemblyjs/helper-numbers@1.13.2': + dependencies: + '@webassemblyjs/floating-point-hex-parser': 1.13.2 + '@webassemblyjs/helper-api-error': 1.13.2 + '@xtuc/long': 4.2.2 + + '@webassemblyjs/helper-wasm-bytecode@1.13.2': {} + + '@webassemblyjs/helper-wasm-section@1.14.1': + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-buffer': 1.14.1 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + '@webassemblyjs/wasm-gen': 1.14.1 + + '@webassemblyjs/ieee754@1.13.2': + dependencies: + '@xtuc/ieee754': 1.2.0 + + '@webassemblyjs/leb128@1.13.2': + dependencies: + '@xtuc/long': 4.2.2 + + '@webassemblyjs/utf8@1.13.2': {} + + '@webassemblyjs/wasm-edit@1.14.1': + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-buffer': 1.14.1 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + '@webassemblyjs/helper-wasm-section': 1.14.1 + '@webassemblyjs/wasm-gen': 1.14.1 + '@webassemblyjs/wasm-opt': 1.14.1 + '@webassemblyjs/wasm-parser': 1.14.1 + '@webassemblyjs/wast-printer': 1.14.1 + + '@webassemblyjs/wasm-gen@1.14.1': + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + '@webassemblyjs/ieee754': 1.13.2 + '@webassemblyjs/leb128': 1.13.2 + '@webassemblyjs/utf8': 1.13.2 + + '@webassemblyjs/wasm-opt@1.14.1': + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-buffer': 1.14.1 + '@webassemblyjs/wasm-gen': 1.14.1 + '@webassemblyjs/wasm-parser': 1.14.1 + + '@webassemblyjs/wasm-parser@1.14.1': + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/helper-api-error': 1.13.2 + '@webassemblyjs/helper-wasm-bytecode': 1.13.2 + '@webassemblyjs/ieee754': 1.13.2 + '@webassemblyjs/leb128': 1.13.2 + '@webassemblyjs/utf8': 1.13.2 + + '@webassemblyjs/wast-printer@1.14.1': + dependencies: + '@webassemblyjs/ast': 1.14.1 + '@xtuc/long': 4.2.2 + + '@xtuc/ieee754@1.2.0': {} + + '@xtuc/long@4.2.2': {} + + accepts@1.3.8: + dependencies: + mime-types: 2.1.35 + negotiator: 0.6.3 + + acorn-import-phases@1.0.4(acorn@8.15.0): + dependencies: + acorn: 8.15.0 + + acorn-jsx@5.3.2(acorn@8.15.0): + dependencies: + acorn: 8.15.0 + + acorn-walk@8.3.4: + dependencies: + acorn: 8.15.0 + + acorn@8.15.0: {} + + address@1.2.2: {} + + aggregate-error@3.1.0: + dependencies: + clean-stack: 2.2.0 + indent-string: 4.0.0 + + ajv-formats@2.1.1(ajv@8.17.1): + optionalDependencies: + ajv: 8.17.1 + + ajv-keywords@3.5.2(ajv@6.12.6): + dependencies: + ajv: 6.12.6 + + ajv-keywords@5.1.0(ajv@8.17.1): + dependencies: + ajv: 8.17.1 + fast-deep-equal: 3.1.3 + + ajv@6.12.6: + dependencies: + fast-deep-equal: 3.1.3 + fast-json-stable-stringify: 2.1.0 + json-schema-traverse: 0.4.1 + uri-js: 4.4.1 + + ajv@8.17.1: + dependencies: + fast-deep-equal: 3.1.3 + fast-uri: 3.1.0 + json-schema-traverse: 1.0.0 + require-from-string: 2.0.2 + + algoliasearch-helper@3.27.0(algoliasearch@5.47.0): + dependencies: + '@algolia/events': 4.0.1 + algoliasearch: 5.47.0 + + algoliasearch@5.47.0: + dependencies: + '@algolia/abtesting': 1.13.0 + '@algolia/client-abtesting': 5.47.0 + '@algolia/client-analytics': 5.47.0 + '@algolia/client-common': 5.47.0 + '@algolia/client-insights': 5.47.0 + '@algolia/client-personalization': 5.47.0 + '@algolia/client-query-suggestions': 5.47.0 + '@algolia/client-search': 5.47.0 + '@algolia/ingestion': 1.47.0 + '@algolia/monitoring': 1.47.0 + '@algolia/recommend': 5.47.0 + '@algolia/requester-browser-xhr': 5.47.0 + '@algolia/requester-fetch': 5.47.0 + '@algolia/requester-node-http': 5.47.0 + + ansi-align@3.0.1: + dependencies: + string-width: 4.2.3 + + ansi-colors@4.1.3: {} + + ansi-escapes@4.3.2: + dependencies: + type-fest: 0.21.3 + + ansi-html-community@0.0.8: {} + + ansi-regex@5.0.1: {} + + ansi-regex@6.2.2: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + + ansi-styles@6.2.3: {} + + any-promise@1.3.0: {} + + anymatch@3.1.3: + dependencies: + normalize-path: 3.0.0 + picomatch: 2.3.1 + + arg@5.0.2: {} + + argparse@1.0.10: + dependencies: + sprintf-js: 1.0.3 + + argparse@2.0.1: {} + + array-flatten@1.1.1: {} + + array-union@2.1.0: {} + + asn1js@3.0.7: + dependencies: + pvtsutils: 1.3.6 + pvutils: 1.1.5 + tslib: 2.8.1 + + assertion-error@2.0.1: {} + + astring@1.9.0: {} + + autoprefixer@10.4.24(postcss@8.5.6): + dependencies: + browserslist: 4.28.1 + caniuse-lite: 1.0.30001766 + fraction.js: 5.3.4 + picocolors: 1.1.1 + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + babel-loader@9.2.1(@babel/core@7.29.0)(webpack@5.104.1): + dependencies: + '@babel/core': 7.29.0 + find-cache-dir: 4.0.0 + schema-utils: 4.3.3 + webpack: 5.104.1 + + babel-plugin-dynamic-import-node@2.3.3: + dependencies: + object.assign: 4.1.7 + + babel-plugin-polyfill-corejs2@0.4.15(@babel/core@7.29.0): + dependencies: + '@babel/compat-data': 7.29.0 + '@babel/core': 7.29.0 + '@babel/helper-define-polyfill-provider': 0.6.6(@babel/core@7.29.0) + semver: 6.3.1 + transitivePeerDependencies: + - supports-color + + babel-plugin-polyfill-corejs3@0.13.0(@babel/core@7.29.0): + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-define-polyfill-provider': 0.6.6(@babel/core@7.29.0) + core-js-compat: 3.48.0 + transitivePeerDependencies: + - supports-color + + babel-plugin-polyfill-corejs3@0.14.0(@babel/core@7.29.0): + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-define-polyfill-provider': 0.6.6(@babel/core@7.29.0) + core-js-compat: 3.48.0 + transitivePeerDependencies: + - supports-color + + babel-plugin-polyfill-regenerator@0.6.6(@babel/core@7.29.0): + dependencies: + '@babel/core': 7.29.0 + '@babel/helper-define-polyfill-provider': 0.6.6(@babel/core@7.29.0) + transitivePeerDependencies: + - supports-color + + bail@2.0.2: {} + + balanced-match@1.0.2: {} + + baseline-browser-mapping@2.9.19: {} + + batch@0.6.1: {} + + better-path-resolve@1.0.0: + dependencies: + is-windows: 1.0.2 + + big.js@5.2.2: {} + + binary-extensions@2.3.0: {} + + body-parser@1.20.4: + dependencies: + bytes: 3.1.2 + content-type: 1.0.5 + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + http-errors: 2.0.1 + iconv-lite: 0.4.24 + on-finished: 2.4.1 + qs: 6.14.1 + raw-body: 2.5.3 + type-is: 1.6.18 + unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color + + bonjour-service@1.3.0: + dependencies: + fast-deep-equal: 3.1.3 + multicast-dns: 7.2.5 + + boolbase@1.0.0: {} + + boxen@6.2.1: + dependencies: + ansi-align: 3.0.1 + camelcase: 6.3.0 + chalk: 4.1.2 + cli-boxes: 3.0.0 + string-width: 5.1.2 + type-fest: 2.19.0 + widest-line: 4.0.1 + wrap-ansi: 8.1.0 + + boxen@7.1.1: + dependencies: + ansi-align: 3.0.1 + camelcase: 7.0.1 + chalk: 5.6.2 + cli-boxes: 3.0.0 + string-width: 5.1.2 + type-fest: 2.19.0 + widest-line: 4.0.1 + wrap-ansi: 8.1.0 + + brace-expansion@1.1.12: + dependencies: + balanced-match: 1.0.2 + concat-map: 0.0.1 + + brace-expansion@2.0.2: + dependencies: + balanced-match: 1.0.2 + + braces@3.0.3: + dependencies: + fill-range: 7.1.1 + + browserslist@4.28.1: + dependencies: + baseline-browser-mapping: 2.9.19 + caniuse-lite: 1.0.30001766 + electron-to-chromium: 1.5.283 + node-releases: 2.0.27 + update-browserslist-db: 1.2.3(browserslist@4.28.1) + + buffer-from@1.1.2: {} + + bundle-name@4.1.0: + dependencies: + run-applescript: 7.1.0 + + bundle-require@5.1.0(esbuild@0.27.2): + dependencies: + esbuild: 0.27.2 + load-tsconfig: 0.2.5 + + bytes@3.0.0: {} + + bytes@3.1.2: {} + + bytestreamjs@2.0.1: {} + + cac@6.7.14: {} + + cacheable-lookup@7.0.0: {} + + cacheable-request@10.2.14: + dependencies: + '@types/http-cache-semantics': 4.2.0 + get-stream: 6.0.1 + http-cache-semantics: 4.2.0 + keyv: 4.5.4 + mimic-response: 4.0.0 + normalize-url: 8.1.1 + responselike: 3.0.0 + + call-bind-apply-helpers@1.0.2: + dependencies: + es-errors: 1.3.0 + function-bind: 1.1.2 + + call-bind@1.0.8: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + get-intrinsic: 1.3.0 + set-function-length: 1.2.2 + + call-bound@1.0.4: + dependencies: + call-bind-apply-helpers: 1.0.2 + get-intrinsic: 1.3.0 + + callsites@3.1.0: {} + + camel-case@4.1.2: + dependencies: + pascal-case: 3.1.2 + tslib: 2.8.1 + + camelcase@6.3.0: {} + + camelcase@7.0.1: {} + + caniuse-api@3.0.0: + dependencies: + browserslist: 4.28.1 + caniuse-lite: 1.0.30001766 + lodash.memoize: 4.1.2 + lodash.uniq: 4.5.0 + + caniuse-lite@1.0.30001766: {} + + ccount@2.0.1: {} + + chai@5.3.3: + dependencies: + assertion-error: 2.0.1 + check-error: 2.1.3 + deep-eql: 5.0.2 + loupe: 3.2.1 + pathval: 2.0.1 + + chalk@4.1.2: + dependencies: + ansi-styles: 4.3.0 + supports-color: 7.2.0 + + chalk@5.6.2: {} + + char-regex@1.0.2: {} + + character-entities-html4@2.1.0: {} + + character-entities-legacy@3.0.0: {} + + character-entities@2.0.2: {} + + character-reference-invalid@2.0.1: {} + + chardet@2.1.1: {} + + check-error@2.1.3: {} + + cheerio-select@2.1.0: + dependencies: + boolbase: 1.0.0 + css-select: 5.2.2 + css-what: 6.2.2 + domelementtype: 2.3.0 + domhandler: 5.0.3 + domutils: 3.2.2 + + cheerio@1.0.0-rc.12: + dependencies: + cheerio-select: 2.1.0 + dom-serializer: 2.0.0 + domhandler: 5.0.3 + domutils: 3.2.2 + htmlparser2: 8.0.2 + parse5: 7.3.0 + parse5-htmlparser2-tree-adapter: 7.1.0 + + cheerio@1.2.0: + dependencies: + cheerio-select: 2.1.0 + dom-serializer: 2.0.0 + domhandler: 5.0.3 + domutils: 3.2.2 + encoding-sniffer: 0.2.1 + htmlparser2: 10.1.0 + parse5: 7.3.0 + parse5-htmlparser2-tree-adapter: 7.1.0 + parse5-parser-stream: 7.1.2 + undici: 7.19.2 + whatwg-mimetype: 4.0.0 + + chevrotain-allstar@0.3.1(chevrotain@11.0.3): + dependencies: + chevrotain: 11.0.3 + lodash-es: 4.17.23 + + chevrotain@11.0.3: + dependencies: + '@chevrotain/cst-dts-gen': 11.0.3 + '@chevrotain/gast': 11.0.3 + '@chevrotain/regexp-to-ast': 11.0.3 + '@chevrotain/types': 11.0.3 + '@chevrotain/utils': 11.0.3 + lodash-es: 4.17.21 + + chokidar@3.6.0: + dependencies: + anymatch: 3.1.3 + braces: 3.0.3 + glob-parent: 5.1.2 + is-binary-path: 2.1.0 + is-glob: 4.0.3 + normalize-path: 3.0.0 + readdirp: 3.6.0 + optionalDependencies: + fsevents: 2.3.3 + + chokidar@4.0.3: + dependencies: + readdirp: 4.1.2 + + chrome-trace-event@1.0.4: {} + + ci-info@3.9.0: {} + + clean-css@5.3.3: + dependencies: + source-map: 0.6.1 + + clean-stack@2.2.0: {} + + cli-boxes@3.0.0: {} + + cli-table3@0.6.5: + dependencies: + string-width: 4.2.3 + optionalDependencies: + '@colors/colors': 1.5.0 + + clone-deep@4.0.1: + dependencies: + is-plain-object: 2.0.4 + kind-of: 6.0.3 + shallow-clone: 3.0.1 + + clsx@2.1.1: {} + + collapse-white-space@2.1.0: {} + + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + + colord@2.9.3: {} + + colorette@2.0.20: {} + + combine-promises@1.2.0: {} + + comlink@4.4.2: {} + + comma-separated-tokens@2.0.3: {} + + commander@10.0.1: {} + + commander@2.20.3: {} + + commander@4.1.1: {} + + commander@5.1.0: {} + + commander@7.2.0: {} + + commander@8.3.0: {} + + common-path-prefix@3.0.0: {} + + compressible@2.0.18: + dependencies: + mime-db: 1.54.0 + + compression@1.8.1: + dependencies: + bytes: 3.1.2 + compressible: 2.0.18 + debug: 2.6.9 + negotiator: 0.6.4 + on-headers: 1.1.0 + safe-buffer: 5.2.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + + concat-map@0.0.1: {} + + confbox@0.1.8: {} + + config-chain@1.1.13: + dependencies: + ini: 1.3.8 + proto-list: 1.2.4 + + configstore@6.0.0: + dependencies: + dot-prop: 6.0.1 + graceful-fs: 4.2.11 + unique-string: 3.0.0 + write-file-atomic: 3.0.3 + xdg-basedir: 5.1.0 + + connect-history-api-fallback@2.0.0: {} + + consola@3.4.2: {} + + content-disposition@0.5.2: {} + + content-disposition@0.5.4: + dependencies: + safe-buffer: 5.2.1 + + content-type@1.0.5: {} + + convert-source-map@2.0.0: {} + + cookie-signature@1.0.7: {} + + cookie@0.7.2: {} + + copy-webpack-plugin@11.0.0(webpack@5.104.1): + dependencies: + fast-glob: 3.3.3 + glob-parent: 6.0.2 + globby: 13.2.2 + normalize-path: 3.0.0 + schema-utils: 4.3.3 + serialize-javascript: 6.0.2 + webpack: 5.104.1 + + core-js-compat@3.48.0: + dependencies: + browserslist: 4.28.1 + + core-js-pure@3.48.0: {} + + core-js@3.48.0: {} + + core-util-is@1.0.3: {} + + cose-base@1.0.3: + dependencies: + layout-base: 1.0.2 + + cose-base@2.2.0: + dependencies: + layout-base: 2.0.1 + + cosmiconfig@8.3.6(typescript@5.9.3): + dependencies: + import-fresh: 3.3.1 + js-yaml: 4.1.1 + parse-json: 5.2.0 + path-type: 4.0.0 + optionalDependencies: + typescript: 5.9.3 + + cross-spawn@7.0.6: + dependencies: + path-key: 3.1.1 + shebang-command: 2.0.0 + which: 2.0.2 + + crypto-random-string@4.0.0: + dependencies: + type-fest: 1.4.0 + + css-blank-pseudo@7.0.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + + css-declaration-sorter@7.3.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + css-has-pseudo@7.0.3(postcss@8.5.6): + dependencies: + '@csstools/selector-specificity': 5.0.0(postcss-selector-parser@7.1.1) + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + postcss-value-parser: 4.2.0 + + css-loader@6.11.0(webpack@5.104.1): + dependencies: + icss-utils: 5.1.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-modules-extract-imports: 3.1.0(postcss@8.5.6) + postcss-modules-local-by-default: 4.2.0(postcss@8.5.6) + postcss-modules-scope: 3.2.1(postcss@8.5.6) + postcss-modules-values: 4.0.0(postcss@8.5.6) + postcss-value-parser: 4.2.0 + semver: 7.7.3 + optionalDependencies: + webpack: 5.104.1 + + css-minimizer-webpack-plugin@5.0.1(clean-css@5.3.3)(webpack@5.104.1): + dependencies: + '@jridgewell/trace-mapping': 0.3.31 + cssnano: 6.1.2(postcss@8.5.6) + jest-worker: 29.7.0 + postcss: 8.5.6 + schema-utils: 4.3.3 + serialize-javascript: 6.0.2 + webpack: 5.104.1 + optionalDependencies: + clean-css: 5.3.3 + + css-prefers-color-scheme@10.0.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + css-select@4.3.0: + dependencies: + boolbase: 1.0.0 + css-what: 6.2.2 + domhandler: 4.3.1 + domutils: 2.8.0 + nth-check: 2.1.1 + + css-select@5.2.2: + dependencies: + boolbase: 1.0.0 + css-what: 6.2.2 + domhandler: 5.0.3 + domutils: 3.2.2 + nth-check: 2.1.1 + + css-tree@2.2.1: + dependencies: + mdn-data: 2.0.28 + source-map-js: 1.2.1 + + css-tree@2.3.1: + dependencies: + mdn-data: 2.0.30 + source-map-js: 1.2.1 + + css-what@6.2.2: {} + + cssdb@8.7.1: {} + + cssesc@3.0.0: {} + + cssnano-preset-advanced@6.1.2(postcss@8.5.6): + dependencies: + autoprefixer: 10.4.24(postcss@8.5.6) + browserslist: 4.28.1 + cssnano-preset-default: 6.1.2(postcss@8.5.6) + postcss: 8.5.6 + postcss-discard-unused: 6.0.5(postcss@8.5.6) + postcss-merge-idents: 6.0.3(postcss@8.5.6) + postcss-reduce-idents: 6.0.3(postcss@8.5.6) + postcss-zindex: 6.0.2(postcss@8.5.6) + + cssnano-preset-default@6.1.2(postcss@8.5.6): + dependencies: + browserslist: 4.28.1 + css-declaration-sorter: 7.3.1(postcss@8.5.6) + cssnano-utils: 4.0.2(postcss@8.5.6) + postcss: 8.5.6 + postcss-calc: 9.0.1(postcss@8.5.6) + postcss-colormin: 6.1.0(postcss@8.5.6) + postcss-convert-values: 6.1.0(postcss@8.5.6) + postcss-discard-comments: 6.0.2(postcss@8.5.6) + postcss-discard-duplicates: 6.0.3(postcss@8.5.6) + postcss-discard-empty: 6.0.3(postcss@8.5.6) + postcss-discard-overridden: 6.0.2(postcss@8.5.6) + postcss-merge-longhand: 6.0.5(postcss@8.5.6) + postcss-merge-rules: 6.1.1(postcss@8.5.6) + postcss-minify-font-values: 6.1.0(postcss@8.5.6) + postcss-minify-gradients: 6.0.3(postcss@8.5.6) + postcss-minify-params: 6.1.0(postcss@8.5.6) + postcss-minify-selectors: 6.0.4(postcss@8.5.6) + postcss-normalize-charset: 6.0.2(postcss@8.5.6) + postcss-normalize-display-values: 6.0.2(postcss@8.5.6) + postcss-normalize-positions: 6.0.2(postcss@8.5.6) + postcss-normalize-repeat-style: 6.0.2(postcss@8.5.6) + postcss-normalize-string: 6.0.2(postcss@8.5.6) + postcss-normalize-timing-functions: 6.0.2(postcss@8.5.6) + postcss-normalize-unicode: 6.1.0(postcss@8.5.6) + postcss-normalize-url: 6.0.2(postcss@8.5.6) + postcss-normalize-whitespace: 6.0.2(postcss@8.5.6) + postcss-ordered-values: 6.0.2(postcss@8.5.6) + postcss-reduce-initial: 6.1.0(postcss@8.5.6) + postcss-reduce-transforms: 6.0.2(postcss@8.5.6) + postcss-svgo: 6.0.3(postcss@8.5.6) + postcss-unique-selectors: 6.0.4(postcss@8.5.6) + + cssnano-utils@4.0.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + cssnano@6.1.2(postcss@8.5.6): + dependencies: + cssnano-preset-default: 6.1.2(postcss@8.5.6) + lilconfig: 3.1.3 + postcss: 8.5.6 + + csso@5.0.5: + dependencies: + css-tree: 2.2.1 + + csstype@3.2.3: {} + + cytoscape-cose-bilkent@4.1.0(cytoscape@3.33.1): + dependencies: + cose-base: 1.0.3 + cytoscape: 3.33.1 + + cytoscape-fcose@2.2.0(cytoscape@3.33.1): + dependencies: + cose-base: 2.2.0 + cytoscape: 3.33.1 + + cytoscape@3.33.1: {} + + d3-array@2.12.1: + dependencies: + internmap: 1.0.1 + + d3-array@3.2.4: + dependencies: + internmap: 2.0.3 + + d3-axis@3.0.0: {} + + d3-brush@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-transition: 3.0.1(d3-selection@3.0.0) + + d3-chord@3.0.1: + dependencies: + d3-path: 3.1.0 + + d3-color@3.1.0: {} + + d3-contour@4.0.2: + dependencies: + d3-array: 3.2.4 + + d3-delaunay@6.0.4: + dependencies: + delaunator: 5.0.1 + + d3-dispatch@3.0.1: {} + + d3-drag@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-selection: 3.0.0 + + d3-dsv@3.0.1: + dependencies: + commander: 7.2.0 + iconv-lite: 0.6.3 + rw: 1.3.3 + + d3-ease@3.0.1: {} + + d3-fetch@3.0.1: + dependencies: + d3-dsv: 3.0.1 + + d3-force@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-quadtree: 3.0.1 + d3-timer: 3.0.1 + + d3-format@3.1.2: {} + + d3-geo@3.1.1: + dependencies: + d3-array: 3.2.4 + + d3-hierarchy@3.1.2: {} + + d3-interpolate@3.0.1: + dependencies: + d3-color: 3.1.0 + + d3-path@1.0.9: {} + + d3-path@3.1.0: {} + + d3-polygon@3.0.1: {} + + d3-quadtree@3.0.1: {} + + d3-random@3.0.1: {} + + d3-sankey@0.12.3: + dependencies: + d3-array: 2.12.1 + d3-shape: 1.3.7 + + d3-scale-chromatic@3.1.0: + dependencies: + d3-color: 3.1.0 + d3-interpolate: 3.0.1 + + d3-scale@4.0.2: + dependencies: + d3-array: 3.2.4 + d3-format: 3.1.2 + d3-interpolate: 3.0.1 + d3-time: 3.1.0 + d3-time-format: 4.1.0 + + d3-selection@3.0.0: {} + + d3-shape@1.3.7: + dependencies: + d3-path: 1.0.9 + + d3-shape@3.2.0: + dependencies: + d3-path: 3.1.0 + + d3-time-format@4.1.0: + dependencies: + d3-time: 3.1.0 + + d3-time@3.1.0: + dependencies: + d3-array: 3.2.4 + + d3-timer@3.0.1: {} + + d3-transition@3.0.1(d3-selection@3.0.0): + dependencies: + d3-color: 3.1.0 + d3-dispatch: 3.0.1 + d3-ease: 3.0.1 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-timer: 3.0.1 + + d3-zoom@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-transition: 3.0.1(d3-selection@3.0.0) + + d3@7.9.0: + dependencies: + d3-array: 3.2.4 + d3-axis: 3.0.0 + d3-brush: 3.0.0 + d3-chord: 3.0.1 + d3-color: 3.1.0 + d3-contour: 4.0.2 + d3-delaunay: 6.0.4 + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-dsv: 3.0.1 + d3-ease: 3.0.1 + d3-fetch: 3.0.1 + d3-force: 3.0.0 + d3-format: 3.1.2 + d3-geo: 3.1.1 + d3-hierarchy: 3.1.2 + d3-interpolate: 3.0.1 + d3-path: 3.1.0 + d3-polygon: 3.0.1 + d3-quadtree: 3.0.1 + d3-random: 3.0.1 + d3-scale: 4.0.2 + d3-scale-chromatic: 3.1.0 + d3-selection: 3.0.0 + d3-shape: 3.2.0 + d3-time: 3.1.0 + d3-time-format: 4.1.0 + d3-timer: 3.0.1 + d3-transition: 3.0.1(d3-selection@3.0.0) + d3-zoom: 3.0.0 + + dagre-d3-es@7.0.13: + dependencies: + d3: 7.9.0 + lodash-es: 4.17.23 + + dayjs@1.11.19: {} + + debounce@1.2.1: {} + + debug@2.6.9: + dependencies: + ms: 2.0.0 + + debug@4.4.3: + dependencies: + ms: 2.1.3 + + decode-named-character-reference@1.3.0: + dependencies: + character-entities: 2.0.2 + + decompress-response@6.0.0: + dependencies: + mimic-response: 3.1.0 + + deep-eql@5.0.2: {} + + deep-extend@0.6.0: {} + + deep-is@0.1.4: {} + + deepmerge@4.3.1: {} + + default-browser-id@5.0.1: {} + + default-browser@5.4.0: + dependencies: + bundle-name: 4.1.0 + default-browser-id: 5.0.1 + + defer-to-connect@2.0.1: {} + + define-data-property@1.1.4: + dependencies: + es-define-property: 1.0.1 + es-errors: 1.3.0 + gopd: 1.2.0 + + define-lazy-prop@2.0.0: {} + + define-lazy-prop@3.0.0: {} + + define-properties@1.2.1: + dependencies: + define-data-property: 1.1.4 + has-property-descriptors: 1.0.2 + object-keys: 1.1.1 + + delaunator@5.0.1: + dependencies: + robust-predicates: 3.0.2 + + depd@1.1.2: {} + + depd@2.0.0: {} + + dequal@2.0.3: {} + + destroy@1.2.0: {} + + detect-indent@6.1.0: {} + + detect-node@2.1.0: {} + + detect-port@1.6.1: + dependencies: + address: 1.2.2 + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + + devlop@1.1.0: + dependencies: + dequal: 2.0.3 + + dir-glob@3.0.1: + dependencies: + path-type: 4.0.0 + + dns-packet@5.6.1: + dependencies: + '@leichtgewicht/ip-codec': 2.0.5 + + dom-converter@0.2.0: + dependencies: + utila: 0.4.0 + + dom-serializer@1.4.1: + dependencies: + domelementtype: 2.3.0 + domhandler: 4.3.1 + entities: 2.2.0 + + dom-serializer@2.0.0: + dependencies: + domelementtype: 2.3.0 + domhandler: 5.0.3 + entities: 4.5.0 + + domelementtype@2.3.0: {} + + domhandler@4.3.1: + dependencies: + domelementtype: 2.3.0 + + domhandler@5.0.3: + dependencies: + domelementtype: 2.3.0 + + dompurify@3.3.1: + optionalDependencies: + '@types/trusted-types': 2.0.7 + + domutils@2.8.0: + dependencies: + dom-serializer: 1.4.1 + domelementtype: 2.3.0 + domhandler: 4.3.1 + + domutils@3.2.2: + dependencies: + dom-serializer: 2.0.0 + domelementtype: 2.3.0 + domhandler: 5.0.3 + + dot-case@3.0.4: + dependencies: + no-case: 3.0.4 + tslib: 2.8.1 + + dot-prop@6.0.1: + dependencies: + is-obj: 2.0.0 + + dotenv@16.6.1: {} + + dunder-proto@1.0.1: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-errors: 1.3.0 + gopd: 1.2.0 + + duplexer@0.1.2: {} + + eastasianwidth@0.2.0: {} + + ee-first@1.1.1: {} + + electron-to-chromium@1.5.283: {} + + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + + emojilib@2.4.0: {} + + emojis-list@3.0.0: {} + + emoticon@4.1.0: {} + + encodeurl@2.0.0: {} + + encoding-sniffer@0.2.1: + dependencies: + iconv-lite: 0.6.3 + whatwg-encoding: 3.1.1 + + enhanced-resolve@5.18.4: + dependencies: + graceful-fs: 4.2.11 + tapable: 2.3.0 + + enquirer@2.4.1: + dependencies: + ansi-colors: 4.1.3 + strip-ansi: 6.0.1 + + entities@2.2.0: {} + + entities@4.5.0: {} + + entities@6.0.1: {} + + entities@7.0.1: {} + + error-ex@1.3.4: + dependencies: + is-arrayish: 0.2.1 + + es-define-property@1.0.1: {} + + es-errors@1.3.0: {} + + es-module-lexer@1.7.0: {} + + es-module-lexer@2.0.0: {} + + es-object-atoms@1.1.1: + dependencies: + es-errors: 1.3.0 + + esast-util-from-estree@2.0.0: + dependencies: + '@types/estree-jsx': 1.0.5 + devlop: 1.1.0 + estree-util-visit: 2.0.0 + unist-util-position-from-estree: 2.0.0 + + esast-util-from-js@2.0.1: + dependencies: + '@types/estree-jsx': 1.0.5 + acorn: 8.15.0 + esast-util-from-estree: 2.0.0 + vfile-message: 4.0.3 + + esbuild@0.21.5: + optionalDependencies: + '@esbuild/aix-ppc64': 0.21.5 + '@esbuild/android-arm': 0.21.5 + '@esbuild/android-arm64': 0.21.5 + '@esbuild/android-x64': 0.21.5 + '@esbuild/darwin-arm64': 0.21.5 + '@esbuild/darwin-x64': 0.21.5 + '@esbuild/freebsd-arm64': 0.21.5 + '@esbuild/freebsd-x64': 0.21.5 + '@esbuild/linux-arm': 0.21.5 + '@esbuild/linux-arm64': 0.21.5 + '@esbuild/linux-ia32': 0.21.5 + '@esbuild/linux-loong64': 0.21.5 + '@esbuild/linux-mips64el': 0.21.5 + '@esbuild/linux-ppc64': 0.21.5 + '@esbuild/linux-riscv64': 0.21.5 + '@esbuild/linux-s390x': 0.21.5 + '@esbuild/linux-x64': 0.21.5 + '@esbuild/netbsd-x64': 0.21.5 + '@esbuild/openbsd-x64': 0.21.5 + '@esbuild/sunos-x64': 0.21.5 + '@esbuild/win32-arm64': 0.21.5 + '@esbuild/win32-ia32': 0.21.5 + '@esbuild/win32-x64': 0.21.5 + + esbuild@0.27.2: + optionalDependencies: + '@esbuild/aix-ppc64': 0.27.2 + '@esbuild/android-arm': 0.27.2 + '@esbuild/android-arm64': 0.27.2 + '@esbuild/android-x64': 0.27.2 + '@esbuild/darwin-arm64': 0.27.2 + '@esbuild/darwin-x64': 0.27.2 + '@esbuild/freebsd-arm64': 0.27.2 + '@esbuild/freebsd-x64': 0.27.2 + '@esbuild/linux-arm': 0.27.2 + '@esbuild/linux-arm64': 0.27.2 + '@esbuild/linux-ia32': 0.27.2 + '@esbuild/linux-loong64': 0.27.2 + '@esbuild/linux-mips64el': 0.27.2 + '@esbuild/linux-ppc64': 0.27.2 + '@esbuild/linux-riscv64': 0.27.2 + '@esbuild/linux-s390x': 0.27.2 + '@esbuild/linux-x64': 0.27.2 + '@esbuild/netbsd-arm64': 0.27.2 + '@esbuild/netbsd-x64': 0.27.2 + '@esbuild/openbsd-arm64': 0.27.2 + '@esbuild/openbsd-x64': 0.27.2 + '@esbuild/openharmony-arm64': 0.27.2 + '@esbuild/sunos-x64': 0.27.2 + '@esbuild/win32-arm64': 0.27.2 + '@esbuild/win32-ia32': 0.27.2 + '@esbuild/win32-x64': 0.27.2 + + escalade@3.2.0: {} + + escape-goat@4.0.0: {} + + escape-html@1.0.3: {} + + escape-string-regexp@1.0.5: {} + + escape-string-regexp@4.0.0: {} + + escape-string-regexp@5.0.0: {} + + eslint-scope@5.1.1: + dependencies: + esrecurse: 4.3.0 + estraverse: 4.3.0 + + eslint-scope@8.4.0: + dependencies: + esrecurse: 4.3.0 + estraverse: 5.3.0 + + eslint-visitor-keys@3.4.3: {} + + eslint-visitor-keys@4.2.1: {} + + eslint@9.39.2(jiti@1.21.7): + dependencies: + '@eslint-community/eslint-utils': 4.9.1(eslint@9.39.2(jiti@1.21.7)) + '@eslint-community/regexpp': 4.12.2 + '@eslint/config-array': 0.21.1 + '@eslint/config-helpers': 0.4.2 + '@eslint/core': 0.17.0 + '@eslint/eslintrc': 3.3.3 + '@eslint/js': 9.39.2 + '@eslint/plugin-kit': 0.4.1 + '@humanfs/node': 0.16.7 + '@humanwhocodes/module-importer': 1.0.1 + '@humanwhocodes/retry': 0.4.3 + '@types/estree': 1.0.8 + ajv: 6.12.6 + chalk: 4.1.2 + cross-spawn: 7.0.6 + debug: 4.4.3 + escape-string-regexp: 4.0.0 + eslint-scope: 8.4.0 + eslint-visitor-keys: 4.2.1 + espree: 10.4.0 + esquery: 1.7.0 + esutils: 2.0.3 + fast-deep-equal: 3.1.3 + file-entry-cache: 8.0.0 + find-up: 5.0.0 + glob-parent: 6.0.2 + ignore: 5.3.2 + imurmurhash: 0.1.4 + is-glob: 4.0.3 + json-stable-stringify-without-jsonify: 1.0.1 + lodash.merge: 4.6.2 + minimatch: 3.1.2 + natural-compare: 1.4.0 + optionator: 0.9.4 + optionalDependencies: + jiti: 1.21.7 + transitivePeerDependencies: + - supports-color + + espree@10.4.0: + dependencies: + acorn: 8.15.0 + acorn-jsx: 5.3.2(acorn@8.15.0) + eslint-visitor-keys: 4.2.1 + + esprima@4.0.1: {} + + esquery@1.7.0: + dependencies: + estraverse: 5.3.0 + + esrecurse@4.3.0: + dependencies: + estraverse: 5.3.0 + + estraverse@4.3.0: {} + + estraverse@5.3.0: {} + + estree-util-attach-comments@3.0.0: + dependencies: + '@types/estree': 1.0.8 + + estree-util-build-jsx@3.0.1: + dependencies: + '@types/estree-jsx': 1.0.5 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + estree-walker: 3.0.3 + + estree-util-is-identifier-name@3.0.0: {} + + estree-util-scope@1.0.0: + dependencies: + '@types/estree': 1.0.8 + devlop: 1.1.0 + + estree-util-to-js@2.0.0: + dependencies: + '@types/estree-jsx': 1.0.5 + astring: 1.9.0 + source-map: 0.7.6 + + estree-util-value-to-estree@3.5.0: + dependencies: + '@types/estree': 1.0.8 + + estree-util-visit@2.0.0: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/unist': 3.0.3 + + estree-walker@3.0.3: + dependencies: + '@types/estree': 1.0.8 + + esutils@2.0.3: {} + + eta@2.2.0: {} + + etag@1.8.1: {} + + eval@0.1.8: + dependencies: + '@types/node': 20.19.30 + require-like: 0.1.2 + + eventemitter3@4.0.7: {} + + events@3.3.0: {} + + execa@5.1.1: + dependencies: + cross-spawn: 7.0.6 + get-stream: 6.0.1 + human-signals: 2.1.0 + is-stream: 2.0.1 + merge-stream: 2.0.0 + npm-run-path: 4.0.1 + onetime: 5.1.2 + signal-exit: 3.0.7 + strip-final-newline: 2.0.0 + + expect-type@1.3.0: {} + + express@4.22.1: + dependencies: + accepts: 1.3.8 + array-flatten: 1.1.1 + body-parser: 1.20.4 + content-disposition: 0.5.4 + content-type: 1.0.5 + cookie: 0.7.2 + cookie-signature: 1.0.7 + debug: 2.6.9 + depd: 2.0.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + finalhandler: 1.3.2 + fresh: 0.5.2 + http-errors: 2.0.1 + merge-descriptors: 1.0.3 + methods: 1.1.2 + on-finished: 2.4.1 + parseurl: 1.3.3 + path-to-regexp: 0.1.12 + proxy-addr: 2.0.7 + qs: 6.14.1 + range-parser: 1.2.1 + safe-buffer: 5.2.1 + send: 0.19.2 + serve-static: 1.16.3 + setprototypeof: 1.2.0 + statuses: 2.0.2 + type-is: 1.6.18 + utils-merge: 1.0.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + + extend-shallow@2.0.1: + dependencies: + is-extendable: 0.1.1 + + extend@3.0.2: {} + + extendable-error@0.1.7: {} + + fast-deep-equal@3.1.3: {} + + fast-glob@3.3.3: + dependencies: + '@nodelib/fs.stat': 2.0.5 + '@nodelib/fs.walk': 1.2.8 + glob-parent: 5.1.2 + merge2: 1.4.1 + micromatch: 4.0.8 + + fast-json-stable-stringify@2.1.0: {} + + fast-levenshtein@2.0.6: {} + + fast-uri@3.1.0: {} + + fastq@1.20.1: + dependencies: + reusify: 1.1.0 + + fault@2.0.1: + dependencies: + format: 0.2.2 + + faye-websocket@0.11.4: + dependencies: + websocket-driver: 0.7.4 + + fdir@6.5.0(picomatch@4.0.3): + optionalDependencies: + picomatch: 4.0.3 + + feed@4.2.2: + dependencies: + xml-js: 1.6.11 + + figures@3.2.0: + dependencies: + escape-string-regexp: 1.0.5 + + file-entry-cache@8.0.0: + dependencies: + flat-cache: 4.0.1 + + file-loader@6.2.0(webpack@5.104.1): + dependencies: + loader-utils: 2.0.4 + schema-utils: 3.3.0 + webpack: 5.104.1 + + fill-range@7.1.1: + dependencies: + to-regex-range: 5.0.1 + + finalhandler@1.3.2: + dependencies: + debug: 2.6.9 + encodeurl: 2.0.0 + escape-html: 1.0.3 + on-finished: 2.4.1 + parseurl: 1.3.3 + statuses: 2.0.2 + unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color + + find-cache-dir@4.0.0: + dependencies: + common-path-prefix: 3.0.0 + pkg-dir: 7.0.0 + + find-up@4.1.0: + dependencies: + locate-path: 5.0.0 + path-exists: 4.0.0 + + find-up@5.0.0: + dependencies: + locate-path: 6.0.0 + path-exists: 4.0.0 + + find-up@6.3.0: + dependencies: + locate-path: 7.2.0 + path-exists: 5.0.0 + + fix-dts-default-cjs-exports@1.0.1: + dependencies: + magic-string: 0.30.21 + mlly: 1.8.0 + rollup: 4.57.1 + + flat-cache@4.0.1: + dependencies: + flatted: 3.3.3 + keyv: 4.5.4 + + flat@5.0.2: {} + + flatted@3.3.3: {} + + follow-redirects@1.15.11(debug@4.4.3): + optionalDependencies: + debug: 4.4.3 + + foreground-child@3.3.1: + dependencies: + cross-spawn: 7.0.6 + signal-exit: 4.1.0 + + form-data-encoder@2.1.4: {} + + format@0.2.2: {} + + forwarded@0.2.0: {} + + fraction.js@5.3.4: {} + + fresh@0.5.2: {} + + fs-extra@10.1.0: + dependencies: + graceful-fs: 4.2.11 + jsonfile: 6.2.0 + universalify: 2.0.1 + + fs-extra@11.3.3: + dependencies: + graceful-fs: 4.2.11 + jsonfile: 6.2.0 + universalify: 2.0.1 + + fs-extra@7.0.1: + dependencies: + graceful-fs: 4.2.11 + jsonfile: 4.0.0 + universalify: 0.1.2 + + fs-extra@8.1.0: + dependencies: + graceful-fs: 4.2.11 + jsonfile: 4.0.0 + universalify: 0.1.2 + + fsevents@2.3.3: + optional: true + + function-bind@1.1.2: {} + + gensync@1.0.0-beta.2: {} + + get-intrinsic@1.3.0: + dependencies: + call-bind-apply-helpers: 1.0.2 + es-define-property: 1.0.1 + es-errors: 1.3.0 + es-object-atoms: 1.1.1 + function-bind: 1.1.2 + get-proto: 1.0.1 + gopd: 1.2.0 + has-symbols: 1.1.0 + hasown: 2.0.2 + math-intrinsics: 1.1.0 + + get-own-enumerable-property-symbols@3.0.2: {} + + get-proto@1.0.1: + dependencies: + dunder-proto: 1.0.1 + es-object-atoms: 1.1.1 + + get-stream@6.0.1: {} + + github-slugger@1.5.0: {} + + glob-parent@5.1.2: + dependencies: + is-glob: 4.0.3 + + glob-parent@6.0.2: + dependencies: + is-glob: 4.0.3 + + glob-to-regex.js@1.2.0(tslib@2.8.1): + dependencies: + tslib: 2.8.1 + + glob-to-regexp@0.4.1: {} + + glob@10.5.0: + dependencies: + foreground-child: 3.3.1 + jackspeak: 3.4.3 + minimatch: 9.0.5 + minipass: 7.1.2 + package-json-from-dist: 1.0.1 + path-scurry: 1.11.1 + + global-dirs@3.0.1: + dependencies: + ini: 2.0.0 + + globals@14.0.0: {} + + globby@11.1.0: + dependencies: + array-union: 2.1.0 + dir-glob: 3.0.1 + fast-glob: 3.3.3 + ignore: 5.3.2 + merge2: 1.4.1 + slash: 3.0.0 + + globby@13.2.2: + dependencies: + dir-glob: 3.0.1 + fast-glob: 3.3.3 + ignore: 5.3.2 + merge2: 1.4.1 + slash: 4.0.0 + + gopd@1.2.0: {} + + got@12.6.1: + dependencies: + '@sindresorhus/is': 5.6.0 + '@szmarczak/http-timer': 5.0.1 + cacheable-lookup: 7.0.0 + cacheable-request: 10.2.14 + decompress-response: 6.0.0 + form-data-encoder: 2.1.4 + get-stream: 6.0.1 + http2-wrapper: 2.2.1 + lowercase-keys: 3.0.0 + p-cancelable: 3.0.0 + responselike: 3.0.0 + + graceful-fs@4.2.10: {} + + graceful-fs@4.2.11: {} + + gray-matter@4.0.3: + dependencies: + js-yaml: 3.14.2 + kind-of: 6.0.3 + section-matter: 1.0.0 + strip-bom-string: 1.0.0 + + gzip-size@6.0.0: + dependencies: + duplexer: 0.1.2 + + hachure-fill@0.5.2: {} + + handle-thing@2.0.1: {} + + has-flag@4.0.0: {} + + has-property-descriptors@1.0.2: + dependencies: + es-define-property: 1.0.1 + + has-symbols@1.1.0: {} + + has-yarn@3.0.0: {} + + hasown@2.0.2: + dependencies: + function-bind: 1.1.2 + + hast-util-from-parse5@8.0.3: + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + devlop: 1.1.0 + hastscript: 9.0.1 + property-information: 7.1.0 + vfile: 6.0.3 + vfile-location: 5.0.3 + web-namespaces: 2.0.1 + + hast-util-parse-selector@4.0.0: + dependencies: + '@types/hast': 3.0.4 + + hast-util-raw@9.1.0: + dependencies: + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + '@ungap/structured-clone': 1.3.0 + hast-util-from-parse5: 8.0.3 + hast-util-to-parse5: 8.0.1 + html-void-elements: 3.0.0 + mdast-util-to-hast: 13.2.1 + parse5: 7.3.0 + unist-util-position: 5.0.0 + unist-util-visit: 5.1.0 + vfile: 6.0.3 + web-namespaces: 2.0.1 + zwitch: 2.0.4 + + hast-util-to-estree@3.1.3: + dependencies: + '@types/estree': 1.0.8 + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + comma-separated-tokens: 2.0.3 + devlop: 1.1.0 + estree-util-attach-comments: 3.0.0 + estree-util-is-identifier-name: 3.0.0 + hast-util-whitespace: 3.0.0 + mdast-util-mdx-expression: 2.0.1 + mdast-util-mdx-jsx: 3.2.0 + mdast-util-mdxjs-esm: 2.0.1 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + style-to-js: 1.1.21 + unist-util-position: 5.0.0 + zwitch: 2.0.4 + transitivePeerDependencies: + - supports-color + + hast-util-to-jsx-runtime@2.3.6: + dependencies: + '@types/estree': 1.0.8 + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + comma-separated-tokens: 2.0.3 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + hast-util-whitespace: 3.0.0 + mdast-util-mdx-expression: 2.0.1 + mdast-util-mdx-jsx: 3.2.0 + mdast-util-mdxjs-esm: 2.0.1 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + style-to-js: 1.1.21 + unist-util-position: 5.0.0 + vfile-message: 4.0.3 + transitivePeerDependencies: + - supports-color + + hast-util-to-parse5@8.0.1: + dependencies: + '@types/hast': 3.0.4 + comma-separated-tokens: 2.0.3 + devlop: 1.1.0 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + web-namespaces: 2.0.1 + zwitch: 2.0.4 + + hast-util-whitespace@3.0.0: + dependencies: + '@types/hast': 3.0.4 + + hastscript@9.0.1: + dependencies: + '@types/hast': 3.0.4 + comma-separated-tokens: 2.0.3 + hast-util-parse-selector: 4.0.0 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + + he@1.2.0: {} + + history@4.10.1: + dependencies: + '@babel/runtime': 7.28.6 + loose-envify: 1.4.0 + resolve-pathname: 3.0.0 + tiny-invariant: 1.3.3 + tiny-warning: 1.0.3 + value-equal: 1.0.1 + + hoist-non-react-statics@3.3.2: + dependencies: + react-is: 16.13.1 + + hpack.js@2.1.6: + dependencies: + inherits: 2.0.4 + obuf: 1.1.2 + readable-stream: 2.3.8 + wbuf: 1.7.3 + + html-escaper@2.0.2: {} + + html-minifier-terser@6.1.0: + dependencies: + camel-case: 4.1.2 + clean-css: 5.3.3 + commander: 8.3.0 + he: 1.2.0 + param-case: 3.0.4 + relateurl: 0.2.7 + terser: 5.46.0 + + html-minifier-terser@7.2.0: + dependencies: + camel-case: 4.1.2 + clean-css: 5.3.3 + commander: 10.0.1 + entities: 4.5.0 + param-case: 3.0.4 + relateurl: 0.2.7 + terser: 5.46.0 + + html-tags@3.3.1: {} + + html-void-elements@3.0.0: {} + + html-webpack-plugin@5.6.6(webpack@5.104.1): + dependencies: + '@types/html-minifier-terser': 6.1.0 + html-minifier-terser: 6.1.0 + lodash: 4.17.23 + pretty-error: 4.0.0 + tapable: 2.3.0 + optionalDependencies: + webpack: 5.104.1 + + htmlparser2@10.1.0: + dependencies: + domelementtype: 2.3.0 + domhandler: 5.0.3 + domutils: 3.2.2 + entities: 7.0.1 + + htmlparser2@6.1.0: + dependencies: + domelementtype: 2.3.0 + domhandler: 4.3.1 + domutils: 2.8.0 + entities: 2.2.0 + + htmlparser2@8.0.2: + dependencies: + domelementtype: 2.3.0 + domhandler: 5.0.3 + domutils: 3.2.2 + entities: 4.5.0 + + http-cache-semantics@4.2.0: {} + + http-deceiver@1.2.7: {} + + http-errors@1.8.1: + dependencies: + depd: 1.1.2 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 1.5.0 + toidentifier: 1.0.1 + + http-errors@2.0.1: + dependencies: + depd: 2.0.0 + inherits: 2.0.4 + setprototypeof: 1.2.0 + statuses: 2.0.2 + toidentifier: 1.0.1 + + http-parser-js@0.5.10: {} + + http-proxy-middleware@2.0.9(@types/express@4.17.25)(debug@4.4.3): + dependencies: + '@types/http-proxy': 1.17.17 + http-proxy: 1.18.1(debug@4.4.3) + is-glob: 4.0.3 + is-plain-obj: 3.0.0 + micromatch: 4.0.8 + optionalDependencies: + '@types/express': 4.17.25 + transitivePeerDependencies: + - debug + + http-proxy@1.18.1(debug@4.4.3): + dependencies: + eventemitter3: 4.0.7 + follow-redirects: 1.15.11(debug@4.4.3) + requires-port: 1.0.0 + transitivePeerDependencies: + - debug + + http2-wrapper@2.2.1: + dependencies: + quick-lru: 5.1.1 + resolve-alpn: 1.2.1 + + human-id@4.1.3: {} + + human-signals@2.1.0: {} + + hyperdyperid@1.2.0: {} + + iconv-lite@0.4.24: + dependencies: + safer-buffer: 2.1.2 + + iconv-lite@0.6.3: + dependencies: + safer-buffer: 2.1.2 + + iconv-lite@0.7.2: + dependencies: + safer-buffer: 2.1.2 + + icss-utils@5.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + ignore@5.3.2: {} + + ignore@7.0.5: {} + + image-size@2.0.2: {} + + immediate@3.3.0: {} + + import-fresh@3.3.1: + dependencies: + parent-module: 1.0.1 + resolve-from: 4.0.0 + + import-lazy@4.0.0: {} + + imurmurhash@0.1.4: {} + + indent-string@4.0.0: {} + + infima@0.2.0-alpha.45: {} + + inherits@2.0.4: {} + + ini@1.3.8: {} + + ini@2.0.0: {} + + inline-style-parser@0.2.7: {} + + internmap@1.0.1: {} + + internmap@2.0.3: {} + + invariant@2.2.4: + dependencies: + loose-envify: 1.4.0 + + ipaddr.js@1.9.1: {} + + ipaddr.js@2.3.0: {} + + is-alphabetical@2.0.1: {} + + is-alphanumerical@2.0.1: + dependencies: + is-alphabetical: 2.0.1 + is-decimal: 2.0.1 + + is-arrayish@0.2.1: {} + + is-binary-path@2.1.0: + dependencies: + binary-extensions: 2.3.0 + + is-ci@3.0.1: + dependencies: + ci-info: 3.9.0 + + is-core-module@2.16.1: + dependencies: + hasown: 2.0.2 + + is-decimal@2.0.1: {} + + is-docker@2.2.1: {} + + is-docker@3.0.0: {} + + is-extendable@0.1.1: {} + + is-extglob@2.1.1: {} + + is-fullwidth-code-point@3.0.0: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-hexadecimal@2.0.1: {} + + is-inside-container@1.0.0: + dependencies: + is-docker: 3.0.0 + + is-installed-globally@0.4.0: + dependencies: + global-dirs: 3.0.1 + is-path-inside: 3.0.3 + + is-network-error@1.3.0: {} + + is-npm@6.1.0: {} + + is-number@7.0.0: {} + + is-obj@1.0.1: {} + + is-obj@2.0.0: {} + + is-path-inside@3.0.3: {} + + is-plain-obj@3.0.0: {} + + is-plain-obj@4.1.0: {} + + is-plain-object@2.0.4: + dependencies: + isobject: 3.0.1 + + is-regexp@1.0.0: {} + + is-stream@2.0.1: {} + + is-subdir@1.2.0: + dependencies: + better-path-resolve: 1.0.0 + + is-typedarray@1.0.0: {} + + is-windows@1.0.2: {} + + is-wsl@2.2.0: + dependencies: + is-docker: 2.2.1 + + is-wsl@3.1.0: + dependencies: + is-inside-container: 1.0.0 + + is-yarn-global@0.4.1: {} + + isarray@0.0.1: {} + + isarray@1.0.0: {} + + isexe@2.0.0: {} + + isobject@3.0.1: {} + + istanbul-lib-coverage@3.2.2: {} + + istanbul-lib-report@3.0.1: + dependencies: + istanbul-lib-coverage: 3.2.2 + make-dir: 4.0.0 + supports-color: 7.2.0 + + istanbul-lib-source-maps@5.0.6: + dependencies: + '@jridgewell/trace-mapping': 0.3.31 + debug: 4.4.3 + istanbul-lib-coverage: 3.2.2 + transitivePeerDependencies: + - supports-color + + istanbul-reports@3.2.0: + dependencies: + html-escaper: 2.0.2 + istanbul-lib-report: 3.0.1 + + jackspeak@3.4.3: + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + + jest-util@29.7.0: + dependencies: + '@jest/types': 29.6.3 + '@types/node': 20.19.30 + chalk: 4.1.2 + ci-info: 3.9.0 + graceful-fs: 4.2.11 + picomatch: 2.3.1 + + jest-worker@27.5.1: + dependencies: + '@types/node': 20.19.30 + merge-stream: 2.0.0 + supports-color: 8.1.1 + + jest-worker@29.7.0: + dependencies: + '@types/node': 20.19.30 + jest-util: 29.7.0 + merge-stream: 2.0.0 + supports-color: 8.1.1 + + jiti@1.21.7: {} + + joi@17.13.3: + dependencies: + '@hapi/hoek': 9.3.0 + '@hapi/topo': 5.1.0 + '@sideway/address': 4.1.5 + '@sideway/formula': 3.0.1 + '@sideway/pinpoint': 2.0.0 + + joycon@3.1.1: {} + + js-tokens@4.0.0: {} + + js-yaml@3.14.2: + dependencies: + argparse: 1.0.10 + esprima: 4.0.1 + + js-yaml@4.1.1: + dependencies: + argparse: 2.0.1 + + jsesc@3.1.0: {} + + json-buffer@3.0.1: {} + + json-parse-even-better-errors@2.3.1: {} + + json-schema-traverse@0.4.1: {} + + json-schema-traverse@1.0.0: {} + + json-stable-stringify-without-jsonify@1.0.1: {} + + json5@2.2.3: {} + + jsonfile@4.0.0: + optionalDependencies: + graceful-fs: 4.2.11 + + jsonfile@6.2.0: + dependencies: + universalify: 2.0.1 + optionalDependencies: + graceful-fs: 4.2.11 + + katex@0.16.28: + dependencies: + commander: 8.3.0 + + keyv@4.5.4: + dependencies: + json-buffer: 3.0.1 + + khroma@2.1.0: {} + + kind-of@6.0.3: {} + + klaw-sync@6.0.0: + dependencies: + graceful-fs: 4.2.11 + + kleur@3.0.3: {} + + langium@3.3.1: + dependencies: + chevrotain: 11.0.3 + chevrotain-allstar: 0.3.1(chevrotain@11.0.3) + vscode-languageserver: 9.0.1 + vscode-languageserver-textdocument: 1.0.12 + vscode-uri: 3.0.8 + + latest-version@7.0.0: + dependencies: + package-json: 8.1.1 + + launch-editor@2.12.0: + dependencies: + picocolors: 1.1.1 + shell-quote: 1.8.3 + + layout-base@1.0.2: {} + + layout-base@2.0.1: {} + + leven@3.1.0: {} + + levn@0.4.1: + dependencies: + prelude-ls: 1.2.1 + type-check: 0.4.0 + + lilconfig@3.1.3: {} + + lines-and-columns@1.2.4: {} + + linkify-it@5.0.0: + dependencies: + uc.micro: 2.1.0 + + load-tsconfig@0.2.5: {} + + loader-runner@4.3.1: {} + + loader-utils@2.0.4: + dependencies: + big.js: 5.2.2 + emojis-list: 3.0.0 + json5: 2.2.3 + + locate-path@5.0.0: + dependencies: + p-locate: 4.1.0 + + locate-path@6.0.0: + dependencies: + p-locate: 5.0.0 + + locate-path@7.2.0: + dependencies: + p-locate: 6.0.0 + + lodash-es@4.17.21: {} + + lodash-es@4.17.23: {} + + lodash.debounce@4.0.8: {} + + lodash.memoize@4.1.2: {} + + lodash.merge@4.6.2: {} + + lodash.startcase@4.4.0: {} + + lodash.uniq@4.5.0: {} + + lodash@4.17.23: {} + + longest-streak@3.1.0: {} + + loose-envify@1.4.0: + dependencies: + js-tokens: 4.0.0 + + loupe@3.2.1: {} + + lower-case@2.0.2: + dependencies: + tslib: 2.8.1 + + lowercase-keys@3.0.0: {} + + lru-cache@10.4.3: {} + + lru-cache@5.1.1: + dependencies: + yallist: 3.1.1 + + lunr-languages@1.14.0: {} + + lunr@2.3.9: {} + + magic-string@0.30.21: + dependencies: + '@jridgewell/sourcemap-codec': 1.5.5 + + magicast@0.3.5: + dependencies: + '@babel/parser': 7.29.0 + '@babel/types': 7.29.0 + source-map-js: 1.2.1 + + make-dir@4.0.0: + dependencies: + semver: 7.7.3 + + mark.js@8.11.1: {} + + markdown-extensions@2.0.0: {} + + markdown-it@14.0.0: + dependencies: + argparse: 2.0.1 + entities: 4.5.0 + linkify-it: 5.0.0 + mdurl: 2.0.0 + punycode.js: 2.3.1 + uc.micro: 2.1.0 + + markdown-table@2.0.0: + dependencies: + repeat-string: 1.6.1 + + markdown-table@3.0.4: {} + + markdownlint-micromark@0.1.8: {} + + markdownlint@0.33.0: + dependencies: + markdown-it: 14.0.0 + markdownlint-micromark: 0.1.8 + + marked@16.4.2: {} + + math-intrinsics@1.1.0: {} + + mdast-util-directive@3.1.0: + dependencies: + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + ccount: 2.0.1 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.2 + mdast-util-to-markdown: 2.1.2 + parse-entities: 4.0.2 + stringify-entities: 4.0.4 + unist-util-visit-parents: 6.0.2 + transitivePeerDependencies: + - supports-color + + mdast-util-find-and-replace@3.0.2: + dependencies: + '@types/mdast': 4.0.4 + escape-string-regexp: 5.0.0 + unist-util-is: 6.0.1 + unist-util-visit-parents: 6.0.2 + + mdast-util-from-markdown@2.0.2: + dependencies: + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + decode-named-character-reference: 1.3.0 + devlop: 1.1.0 + mdast-util-to-string: 4.0.0 + micromark: 4.0.2 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-decode-string: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + unist-util-stringify-position: 4.0.0 + transitivePeerDependencies: + - supports-color + + mdast-util-frontmatter@2.0.1: + dependencies: + '@types/mdast': 4.0.4 + devlop: 1.1.0 + escape-string-regexp: 5.0.0 + mdast-util-from-markdown: 2.0.2 + mdast-util-to-markdown: 2.1.2 + micromark-extension-frontmatter: 2.0.0 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-autolink-literal@2.0.1: + dependencies: + '@types/mdast': 4.0.4 + ccount: 2.0.1 + devlop: 1.1.0 + mdast-util-find-and-replace: 3.0.2 + micromark-util-character: 2.1.1 + + mdast-util-gfm-footnote@2.1.0: + dependencies: + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.2 + mdast-util-to-markdown: 2.1.2 + micromark-util-normalize-identifier: 2.0.1 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-strikethrough@2.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-from-markdown: 2.0.2 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-table@2.0.0: + dependencies: + '@types/mdast': 4.0.4 + devlop: 1.1.0 + markdown-table: 3.0.4 + mdast-util-from-markdown: 2.0.2 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm-task-list-item@2.0.0: + dependencies: + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.2 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-gfm@3.1.0: + dependencies: + mdast-util-from-markdown: 2.0.2 + mdast-util-gfm-autolink-literal: 2.0.1 + mdast-util-gfm-footnote: 2.1.0 + mdast-util-gfm-strikethrough: 2.0.0 + mdast-util-gfm-table: 2.0.0 + mdast-util-gfm-task-list-item: 2.0.0 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-mdx-expression@2.0.1: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.2 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-mdx-jsx@3.2.0: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + ccount: 2.0.1 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.2 + mdast-util-to-markdown: 2.1.2 + parse-entities: 4.0.2 + stringify-entities: 4.0.4 + unist-util-stringify-position: 4.0.0 + vfile-message: 4.0.3 + transitivePeerDependencies: + - supports-color + + mdast-util-mdx@3.0.0: + dependencies: + mdast-util-from-markdown: 2.0.2 + mdast-util-mdx-expression: 2.0.1 + mdast-util-mdx-jsx: 3.2.0 + mdast-util-mdxjs-esm: 2.0.1 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-mdxjs-esm@2.0.1: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.2 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-phrasing@4.1.0: + dependencies: + '@types/mdast': 4.0.4 + unist-util-is: 6.0.1 + + mdast-util-to-hast@13.2.1: + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@ungap/structured-clone': 1.3.0 + devlop: 1.1.0 + micromark-util-sanitize-uri: 2.0.1 + trim-lines: 3.0.1 + unist-util-position: 5.0.0 + unist-util-visit: 5.1.0 + vfile: 6.0.3 + + mdast-util-to-markdown@2.1.2: + dependencies: + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + longest-streak: 3.1.0 + mdast-util-phrasing: 4.1.0 + mdast-util-to-string: 4.0.0 + micromark-util-classify-character: 2.0.1 + micromark-util-decode-string: 2.0.1 + unist-util-visit: 5.1.0 + zwitch: 2.0.4 + + mdast-util-to-string@4.0.0: + dependencies: + '@types/mdast': 4.0.4 + + mdn-data@2.0.28: {} + + mdn-data@2.0.30: {} + + mdurl@2.0.0: {} + + media-typer@0.3.0: {} + + memfs@4.56.10(tslib@2.8.1): + dependencies: + '@jsonjoy.com/fs-core': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-fsa': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-builtins': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-to-fsa': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-node-utils': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-print': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/fs-snapshot': 4.56.10(tslib@2.8.1) + '@jsonjoy.com/json-pack': 1.21.0(tslib@2.8.1) + '@jsonjoy.com/util': 1.9.0(tslib@2.8.1) + glob-to-regex.js: 1.2.0(tslib@2.8.1) + thingies: 2.5.0(tslib@2.8.1) + tree-dump: 1.1.0(tslib@2.8.1) + tslib: 2.8.1 + + merge-descriptors@1.0.3: {} + + merge-stream@2.0.0: {} + + merge2@1.4.1: {} + + mermaid@11.12.2: + dependencies: + '@braintree/sanitize-url': 7.1.2 + '@iconify/utils': 3.1.0 + '@mermaid-js/parser': 0.6.3 + '@types/d3': 7.4.3 + cytoscape: 3.33.1 + cytoscape-cose-bilkent: 4.1.0(cytoscape@3.33.1) + cytoscape-fcose: 2.2.0(cytoscape@3.33.1) + d3: 7.9.0 + d3-sankey: 0.12.3 + dagre-d3-es: 7.0.13 + dayjs: 1.11.19 + dompurify: 3.3.1 + katex: 0.16.28 + khroma: 2.1.0 + lodash-es: 4.17.23 + marked: 16.4.2 + roughjs: 4.6.6 + stylis: 4.3.6 + ts-dedent: 2.2.0 + uuid: 11.1.0 + + methods@1.1.2: {} + + micromark-core-commonmark@2.0.3: + dependencies: + decode-named-character-reference: 1.3.0 + devlop: 1.1.0 + micromark-factory-destination: 2.0.1 + micromark-factory-label: 2.0.1 + micromark-factory-space: 2.0.1 + micromark-factory-title: 2.0.1 + micromark-factory-whitespace: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-chunked: 2.0.1 + micromark-util-classify-character: 2.0.1 + micromark-util-html-tag-name: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-subtokenize: 2.1.0 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-directive@3.0.2: + dependencies: + devlop: 1.1.0 + micromark-factory-space: 2.0.1 + micromark-factory-whitespace: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + parse-entities: 4.0.2 + + micromark-extension-frontmatter@2.0.0: + dependencies: + fault: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-autolink-literal@2.1.0: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-sanitize-uri: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-footnote@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-core-commonmark: 2.0.3 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-sanitize-uri: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-strikethrough@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-util-chunked: 2.0.1 + micromark-util-classify-character: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-table@2.1.1: + dependencies: + devlop: 1.1.0 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm-tagfilter@2.0.0: + dependencies: + micromark-util-types: 2.0.2 + + micromark-extension-gfm-task-list-item@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-gfm@3.0.0: + dependencies: + micromark-extension-gfm-autolink-literal: 2.1.0 + micromark-extension-gfm-footnote: 2.1.0 + micromark-extension-gfm-strikethrough: 2.1.0 + micromark-extension-gfm-table: 2.1.1 + micromark-extension-gfm-tagfilter: 2.0.0 + micromark-extension-gfm-task-list-item: 2.1.0 + micromark-util-combine-extensions: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-mdx-expression@3.0.1: + dependencies: + '@types/estree': 1.0.8 + devlop: 1.1.0 + micromark-factory-mdx-expression: 2.0.3 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-events-to-acorn: 2.0.3 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-extension-mdx-jsx@3.0.2: + dependencies: + '@types/estree': 1.0.8 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + micromark-factory-mdx-expression: 2.0.3 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-events-to-acorn: 2.0.3 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + vfile-message: 4.0.3 + + micromark-extension-mdx-md@2.0.0: + dependencies: + micromark-util-types: 2.0.2 + + micromark-extension-mdxjs-esm@3.0.0: + dependencies: + '@types/estree': 1.0.8 + devlop: 1.1.0 + micromark-core-commonmark: 2.0.3 + micromark-util-character: 2.1.1 + micromark-util-events-to-acorn: 2.0.3 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + unist-util-position-from-estree: 2.0.0 + vfile-message: 4.0.3 + + micromark-extension-mdxjs@3.0.0: + dependencies: + acorn: 8.15.0 + acorn-jsx: 5.3.2(acorn@8.15.0) + micromark-extension-mdx-expression: 3.0.1 + micromark-extension-mdx-jsx: 3.0.2 + micromark-extension-mdx-md: 2.0.0 + micromark-extension-mdxjs-esm: 3.0.0 + micromark-util-combine-extensions: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-destination@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-label@2.0.1: + dependencies: + devlop: 1.1.0 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-mdx-expression@2.0.3: + dependencies: + '@types/estree': 1.0.8 + devlop: 1.1.0 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-events-to-acorn: 2.0.3 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + unist-util-position-from-estree: 2.0.0 + vfile-message: 4.0.3 + + micromark-factory-space@1.1.0: + dependencies: + micromark-util-character: 1.2.0 + micromark-util-types: 1.1.0 + + micromark-factory-space@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-types: 2.0.2 + + micromark-factory-title@2.0.1: + dependencies: + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-factory-whitespace@2.0.1: + dependencies: + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-character@1.2.0: + dependencies: + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 + + micromark-util-character@2.1.1: + dependencies: + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-chunked@2.0.1: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-classify-character@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-combine-extensions@2.0.1: + dependencies: + micromark-util-chunked: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-decode-numeric-character-reference@2.0.2: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-decode-string@2.0.1: + dependencies: + decode-named-character-reference: 1.3.0 + micromark-util-character: 2.1.1 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-symbol: 2.0.1 + + micromark-util-encode@2.0.1: {} + + micromark-util-events-to-acorn@2.0.3: + dependencies: + '@types/estree': 1.0.8 + '@types/unist': 3.0.3 + devlop: 1.1.0 + estree-util-visit: 2.0.0 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + vfile-message: 4.0.3 + + micromark-util-html-tag-name@2.0.1: {} + + micromark-util-normalize-identifier@2.0.1: + dependencies: + micromark-util-symbol: 2.0.1 + + micromark-util-resolve-all@2.0.1: + dependencies: + micromark-util-types: 2.0.2 + + micromark-util-sanitize-uri@2.0.1: + dependencies: + micromark-util-character: 2.1.1 + micromark-util-encode: 2.0.1 + micromark-util-symbol: 2.0.1 + + micromark-util-subtokenize@2.1.0: + dependencies: + devlop: 1.1.0 + micromark-util-chunked: 2.0.1 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + + micromark-util-symbol@1.1.0: {} + + micromark-util-symbol@2.0.1: {} + + micromark-util-types@1.1.0: {} + + micromark-util-types@2.0.2: {} + + micromark@4.0.2: + dependencies: + '@types/debug': 4.1.12 + debug: 4.4.3 + decode-named-character-reference: 1.3.0 + devlop: 1.1.0 + micromark-core-commonmark: 2.0.3 + micromark-factory-space: 2.0.1 + micromark-util-character: 2.1.1 + micromark-util-chunked: 2.0.1 + micromark-util-combine-extensions: 2.0.1 + micromark-util-decode-numeric-character-reference: 2.0.2 + micromark-util-encode: 2.0.1 + micromark-util-normalize-identifier: 2.0.1 + micromark-util-resolve-all: 2.0.1 + micromark-util-sanitize-uri: 2.0.1 + micromark-util-subtokenize: 2.1.0 + micromark-util-symbol: 2.0.1 + micromark-util-types: 2.0.2 + transitivePeerDependencies: + - supports-color + + micromatch@4.0.8: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 + + mime-db@1.33.0: {} + + mime-db@1.52.0: {} + + mime-db@1.54.0: {} + + mime-types@2.1.18: + dependencies: + mime-db: 1.33.0 + + mime-types@2.1.35: + dependencies: + mime-db: 1.52.0 + + mime-types@3.0.2: + dependencies: + mime-db: 1.54.0 + + mime@1.6.0: {} + + mimic-fn@2.1.0: {} + + mimic-response@3.1.0: {} + + mimic-response@4.0.0: {} + + mini-css-extract-plugin@2.10.0(webpack@5.104.1): + dependencies: + schema-utils: 4.3.3 + tapable: 2.3.0 + webpack: 5.104.1 + + minimalistic-assert@1.0.1: {} + + minimatch@3.1.2: + dependencies: + brace-expansion: 1.1.12 + + minimatch@9.0.5: + dependencies: + brace-expansion: 2.0.2 + + minimist@1.2.8: {} + + minipass@7.1.2: {} + + mlly@1.8.0: + dependencies: + acorn: 8.15.0 + pathe: 2.0.3 + pkg-types: 1.3.1 + ufo: 1.6.3 + + mri@1.2.0: {} + + mrmime@2.0.1: {} + + ms@2.0.0: {} + + ms@2.1.3: {} + + multicast-dns@7.2.5: + dependencies: + dns-packet: 5.6.1 + thunky: 1.1.0 + + mz@2.7.0: + dependencies: + any-promise: 1.3.0 + object-assign: 4.1.1 + thenify-all: 1.6.0 + + nanoid@3.3.11: {} + + natural-compare@1.4.0: {} + + negotiator@0.6.3: {} + + negotiator@0.6.4: {} + + neo-async@2.6.2: {} + + no-case@3.0.4: + dependencies: + lower-case: 2.0.2 + tslib: 2.8.1 + + node-emoji@2.2.0: + dependencies: + '@sindresorhus/is': 4.6.0 + char-regex: 1.0.2 + emojilib: 2.4.0 + skin-tone: 2.0.0 + + node-releases@2.0.27: {} + + normalize-path@3.0.0: {} + + normalize-url@8.1.1: {} + + npm-run-path@4.0.1: + dependencies: + path-key: 3.1.1 + + nprogress@0.2.0: {} + + nth-check@2.1.1: + dependencies: + boolbase: 1.0.0 + + null-loader@4.0.1(webpack@5.104.1): + dependencies: + loader-utils: 2.0.4 + schema-utils: 3.3.0 + webpack: 5.104.1 + + object-assign@4.1.1: {} + + object-inspect@1.13.4: {} + + object-keys@1.1.1: {} + + object.assign@4.1.7: + dependencies: + call-bind: 1.0.8 + call-bound: 1.0.4 + define-properties: 1.2.1 + es-object-atoms: 1.1.1 + has-symbols: 1.1.0 + object-keys: 1.1.1 + + obuf@1.1.2: {} + + on-finished@2.4.1: + dependencies: + ee-first: 1.1.1 + + on-headers@1.1.0: {} + + onetime@5.1.2: + dependencies: + mimic-fn: 2.1.0 + + open@10.2.0: + dependencies: + default-browser: 5.4.0 + define-lazy-prop: 3.0.0 + is-inside-container: 1.0.0 + wsl-utils: 0.1.0 + + open@8.4.2: + dependencies: + define-lazy-prop: 2.0.0 + is-docker: 2.2.1 + is-wsl: 2.2.0 + + openapi-fetch@0.10.6: + dependencies: + openapi-typescript-helpers: 0.0.11 + + openapi-typescript-helpers@0.0.11: {} + + openapi-typescript@6.7.6: + dependencies: + ansi-colors: 4.1.3 + fast-glob: 3.3.3 + js-yaml: 4.1.1 + supports-color: 9.4.0 + undici: 5.29.0 + yargs-parser: 21.1.1 + + opener@1.5.2: {} + + optionator@0.9.4: + dependencies: + deep-is: 0.1.4 + fast-levenshtein: 2.0.6 + levn: 0.4.1 + prelude-ls: 1.2.1 + type-check: 0.4.0 + word-wrap: 1.2.5 + + outdent@0.5.0: {} + + p-cancelable@3.0.0: {} + + p-filter@2.1.0: + dependencies: + p-map: 2.1.0 + + p-finally@1.0.0: {} + + p-limit@2.3.0: + dependencies: + p-try: 2.2.0 + + p-limit@3.1.0: + dependencies: + yocto-queue: 0.1.0 + + p-limit@4.0.0: + dependencies: + yocto-queue: 1.2.2 + + p-locate@4.1.0: + dependencies: + p-limit: 2.3.0 + + p-locate@5.0.0: + dependencies: + p-limit: 3.1.0 + + p-locate@6.0.0: + dependencies: + p-limit: 4.0.0 + + p-map@2.1.0: {} + + p-map@4.0.0: + dependencies: + aggregate-error: 3.1.0 + + p-queue@6.6.2: + dependencies: + eventemitter3: 4.0.7 + p-timeout: 3.2.0 + + p-retry@6.2.1: + dependencies: + '@types/retry': 0.12.2 + is-network-error: 1.3.0 + retry: 0.13.1 + + p-timeout@3.2.0: + dependencies: + p-finally: 1.0.0 + + p-try@2.2.0: {} + + package-json-from-dist@1.0.1: {} + + package-json@8.1.1: + dependencies: + got: 12.6.1 + registry-auth-token: 5.1.1 + registry-url: 6.0.1 + semver: 7.7.3 + + package-manager-detector@0.2.11: + dependencies: + quansync: 0.2.11 + + package-manager-detector@1.6.0: {} + + param-case@3.0.4: + dependencies: + dot-case: 3.0.4 + tslib: 2.8.1 + + parent-module@1.0.1: + dependencies: + callsites: 3.1.0 + + parse-entities@4.0.2: + dependencies: + '@types/unist': 2.0.11 + character-entities-legacy: 3.0.0 + character-reference-invalid: 2.0.1 + decode-named-character-reference: 1.3.0 + is-alphanumerical: 2.0.1 + is-decimal: 2.0.1 + is-hexadecimal: 2.0.1 + + parse-json@5.2.0: + dependencies: + '@babel/code-frame': 7.29.0 + error-ex: 1.3.4 + json-parse-even-better-errors: 2.3.1 + lines-and-columns: 1.2.4 + + parse-numeric-range@1.3.0: {} + + parse5-htmlparser2-tree-adapter@7.1.0: + dependencies: + domhandler: 5.0.3 + parse5: 7.3.0 + + parse5-parser-stream@7.1.2: + dependencies: + parse5: 7.3.0 + + parse5@7.3.0: + dependencies: + entities: 6.0.1 + + parseurl@1.3.3: {} + + pascal-case@3.1.2: + dependencies: + no-case: 3.0.4 + tslib: 2.8.1 + + path-data-parser@0.1.0: {} + + path-exists@4.0.0: {} + + path-exists@5.0.0: {} + + path-is-inside@1.0.2: {} + + path-key@3.1.1: {} + + path-parse@1.0.7: {} + + path-scurry@1.11.1: + dependencies: + lru-cache: 10.4.3 + minipass: 7.1.2 + + path-to-regexp@0.1.12: {} + + path-to-regexp@1.9.0: + dependencies: + isarray: 0.0.1 + + path-to-regexp@3.3.0: {} + + path-type@4.0.0: {} + + pathe@1.1.2: {} + + pathe@2.0.3: {} + + pathval@2.0.1: {} + + picocolors@1.1.1: {} + + picomatch@2.3.1: {} + + picomatch@4.0.3: {} + + pify@4.0.1: {} + + pirates@4.0.7: {} + + pkg-dir@7.0.0: + dependencies: + find-up: 6.3.0 + + pkg-types@1.3.1: + dependencies: + confbox: 0.1.8 + mlly: 1.8.0 + pathe: 2.0.3 + + pkijs@3.3.3: + dependencies: + '@noble/hashes': 1.4.0 + asn1js: 3.0.7 + bytestreamjs: 2.0.1 + pvtsutils: 1.3.6 + pvutils: 1.1.5 + tslib: 2.8.1 + + points-on-curve@0.2.0: {} + + points-on-path@0.2.1: + dependencies: + path-data-parser: 0.1.0 + points-on-curve: 0.2.0 + + postcss-attribute-case-insensitive@7.0.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + + postcss-calc@9.0.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 6.1.2 + postcss-value-parser: 4.2.0 + + postcss-clamp@4.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-color-functional-notation@7.0.12(postcss@8.5.6): + dependencies: + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + postcss-color-hex-alpha@10.0.0(postcss@8.5.6): + dependencies: + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-color-rebeccapurple@10.0.0(postcss@8.5.6): + dependencies: + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-colormin@6.1.0(postcss@8.5.6): + dependencies: + browserslist: 4.28.1 + caniuse-api: 3.0.0 + colord: 2.9.3 + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-convert-values@6.1.0(postcss@8.5.6): + dependencies: + browserslist: 4.28.1 + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-custom-media@11.0.6(postcss@8.5.6): + dependencies: + '@csstools/cascade-layer-name-parser': 2.0.5(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/media-query-list-parser': 4.0.3(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + postcss: 8.5.6 + + postcss-custom-properties@14.0.6(postcss@8.5.6): + dependencies: + '@csstools/cascade-layer-name-parser': 2.0.5(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-custom-selectors@8.0.5(postcss@8.5.6): + dependencies: + '@csstools/cascade-layer-name-parser': 2.0.5(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + + postcss-dir-pseudo-class@9.0.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + + postcss-discard-comments@6.0.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-discard-duplicates@6.0.3(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-discard-empty@6.0.3(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-discard-overridden@6.0.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-discard-unused@6.0.5(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 6.1.2 + + postcss-double-position-gradients@6.0.4(postcss@8.5.6): + dependencies: + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-focus-visible@10.0.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + + postcss-focus-within@9.0.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + + postcss-font-variant@5.0.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-gap-properties@6.0.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-image-set-function@7.0.0(postcss@8.5.6): + dependencies: + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-lab-function@7.0.12(postcss@8.5.6): + dependencies: + '@csstools/css-color-parser': 3.1.0(@csstools/css-parser-algorithms@3.0.5(@csstools/css-tokenizer@3.0.4))(@csstools/css-tokenizer@3.0.4) + '@csstools/css-parser-algorithms': 3.0.5(@csstools/css-tokenizer@3.0.4) + '@csstools/css-tokenizer': 3.0.4 + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/utilities': 2.0.0(postcss@8.5.6) + postcss: 8.5.6 + + postcss-load-config@6.0.1(jiti@1.21.7)(postcss@8.5.6)(yaml@2.8.2): + dependencies: + lilconfig: 3.1.3 + optionalDependencies: + jiti: 1.21.7 + postcss: 8.5.6 + yaml: 2.8.2 + + postcss-loader@7.3.4(postcss@8.5.6)(typescript@5.9.3)(webpack@5.104.1): + dependencies: + cosmiconfig: 8.3.6(typescript@5.9.3) + jiti: 1.21.7 + postcss: 8.5.6 + semver: 7.7.3 + webpack: 5.104.1 + transitivePeerDependencies: + - typescript + + postcss-logical@8.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-merge-idents@6.0.3(postcss@8.5.6): + dependencies: + cssnano-utils: 4.0.2(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-merge-longhand@6.0.5(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + stylehacks: 6.1.1(postcss@8.5.6) + + postcss-merge-rules@6.1.1(postcss@8.5.6): + dependencies: + browserslist: 4.28.1 + caniuse-api: 3.0.0 + cssnano-utils: 4.0.2(postcss@8.5.6) + postcss: 8.5.6 + postcss-selector-parser: 6.1.2 + + postcss-minify-font-values@6.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-minify-gradients@6.0.3(postcss@8.5.6): + dependencies: + colord: 2.9.3 + cssnano-utils: 4.0.2(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-minify-params@6.1.0(postcss@8.5.6): + dependencies: + browserslist: 4.28.1 + cssnano-utils: 4.0.2(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-minify-selectors@6.0.4(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 6.1.2 + + postcss-modules-extract-imports@3.1.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-modules-local-by-default@4.2.0(postcss@8.5.6): + dependencies: + icss-utils: 5.1.0(postcss@8.5.6) + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + postcss-value-parser: 4.2.0 + + postcss-modules-scope@3.2.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + + postcss-modules-values@4.0.0(postcss@8.5.6): + dependencies: + icss-utils: 5.1.0(postcss@8.5.6) + postcss: 8.5.6 + + postcss-nesting@13.0.2(postcss@8.5.6): + dependencies: + '@csstools/selector-resolve-nested': 3.1.0(postcss-selector-parser@7.1.1) + '@csstools/selector-specificity': 5.0.0(postcss-selector-parser@7.1.1) + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + + postcss-normalize-charset@6.0.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-normalize-display-values@6.0.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-positions@6.0.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-repeat-style@6.0.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-string@6.0.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-timing-functions@6.0.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-unicode@6.1.0(postcss@8.5.6): + dependencies: + browserslist: 4.28.1 + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-url@6.0.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-normalize-whitespace@6.0.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-opacity-percentage@3.0.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-ordered-values@6.0.2(postcss@8.5.6): + dependencies: + cssnano-utils: 4.0.2(postcss@8.5.6) + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-overflow-shorthand@6.0.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-page-break@3.0.4(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-place@10.0.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-preset-env@10.6.1(postcss@8.5.6): + dependencies: + '@csstools/postcss-alpha-function': 1.0.1(postcss@8.5.6) + '@csstools/postcss-cascade-layers': 5.0.2(postcss@8.5.6) + '@csstools/postcss-color-function': 4.0.12(postcss@8.5.6) + '@csstools/postcss-color-function-display-p3-linear': 1.0.1(postcss@8.5.6) + '@csstools/postcss-color-mix-function': 3.0.12(postcss@8.5.6) + '@csstools/postcss-color-mix-variadic-function-arguments': 1.0.2(postcss@8.5.6) + '@csstools/postcss-content-alt-text': 2.0.8(postcss@8.5.6) + '@csstools/postcss-contrast-color-function': 2.0.12(postcss@8.5.6) + '@csstools/postcss-exponential-functions': 2.0.9(postcss@8.5.6) + '@csstools/postcss-font-format-keywords': 4.0.0(postcss@8.5.6) + '@csstools/postcss-gamut-mapping': 2.0.11(postcss@8.5.6) + '@csstools/postcss-gradients-interpolation-method': 5.0.12(postcss@8.5.6) + '@csstools/postcss-hwb-function': 4.0.12(postcss@8.5.6) + '@csstools/postcss-ic-unit': 4.0.4(postcss@8.5.6) + '@csstools/postcss-initial': 2.0.1(postcss@8.5.6) + '@csstools/postcss-is-pseudo-class': 5.0.3(postcss@8.5.6) + '@csstools/postcss-light-dark-function': 2.0.11(postcss@8.5.6) + '@csstools/postcss-logical-float-and-clear': 3.0.0(postcss@8.5.6) + '@csstools/postcss-logical-overflow': 2.0.0(postcss@8.5.6) + '@csstools/postcss-logical-overscroll-behavior': 2.0.0(postcss@8.5.6) + '@csstools/postcss-logical-resize': 3.0.0(postcss@8.5.6) + '@csstools/postcss-logical-viewport-units': 3.0.4(postcss@8.5.6) + '@csstools/postcss-media-minmax': 2.0.9(postcss@8.5.6) + '@csstools/postcss-media-queries-aspect-ratio-number-values': 3.0.5(postcss@8.5.6) + '@csstools/postcss-nested-calc': 4.0.0(postcss@8.5.6) + '@csstools/postcss-normalize-display-values': 4.0.1(postcss@8.5.6) + '@csstools/postcss-oklab-function': 4.0.12(postcss@8.5.6) + '@csstools/postcss-position-area-property': 1.0.0(postcss@8.5.6) + '@csstools/postcss-progressive-custom-properties': 4.2.1(postcss@8.5.6) + '@csstools/postcss-property-rule-prelude-list': 1.0.0(postcss@8.5.6) + '@csstools/postcss-random-function': 2.0.1(postcss@8.5.6) + '@csstools/postcss-relative-color-syntax': 3.0.12(postcss@8.5.6) + '@csstools/postcss-scope-pseudo-class': 4.0.1(postcss@8.5.6) + '@csstools/postcss-sign-functions': 1.1.4(postcss@8.5.6) + '@csstools/postcss-stepped-value-functions': 4.0.9(postcss@8.5.6) + '@csstools/postcss-syntax-descriptor-syntax-production': 1.0.1(postcss@8.5.6) + '@csstools/postcss-system-ui-font-family': 1.0.0(postcss@8.5.6) + '@csstools/postcss-text-decoration-shorthand': 4.0.3(postcss@8.5.6) + '@csstools/postcss-trigonometric-functions': 4.0.9(postcss@8.5.6) + '@csstools/postcss-unset-value': 4.0.0(postcss@8.5.6) + autoprefixer: 10.4.24(postcss@8.5.6) + browserslist: 4.28.1 + css-blank-pseudo: 7.0.1(postcss@8.5.6) + css-has-pseudo: 7.0.3(postcss@8.5.6) + css-prefers-color-scheme: 10.0.0(postcss@8.5.6) + cssdb: 8.7.1 + postcss: 8.5.6 + postcss-attribute-case-insensitive: 7.0.1(postcss@8.5.6) + postcss-clamp: 4.1.0(postcss@8.5.6) + postcss-color-functional-notation: 7.0.12(postcss@8.5.6) + postcss-color-hex-alpha: 10.0.0(postcss@8.5.6) + postcss-color-rebeccapurple: 10.0.0(postcss@8.5.6) + postcss-custom-media: 11.0.6(postcss@8.5.6) + postcss-custom-properties: 14.0.6(postcss@8.5.6) + postcss-custom-selectors: 8.0.5(postcss@8.5.6) + postcss-dir-pseudo-class: 9.0.1(postcss@8.5.6) + postcss-double-position-gradients: 6.0.4(postcss@8.5.6) + postcss-focus-visible: 10.0.1(postcss@8.5.6) + postcss-focus-within: 9.0.1(postcss@8.5.6) + postcss-font-variant: 5.0.0(postcss@8.5.6) + postcss-gap-properties: 6.0.0(postcss@8.5.6) + postcss-image-set-function: 7.0.0(postcss@8.5.6) + postcss-lab-function: 7.0.12(postcss@8.5.6) + postcss-logical: 8.1.0(postcss@8.5.6) + postcss-nesting: 13.0.2(postcss@8.5.6) + postcss-opacity-percentage: 3.0.0(postcss@8.5.6) + postcss-overflow-shorthand: 6.0.0(postcss@8.5.6) + postcss-page-break: 3.0.4(postcss@8.5.6) + postcss-place: 10.0.0(postcss@8.5.6) + postcss-pseudo-class-any-link: 10.0.1(postcss@8.5.6) + postcss-replace-overflow-wrap: 4.0.0(postcss@8.5.6) + postcss-selector-not: 8.0.1(postcss@8.5.6) + + postcss-pseudo-class-any-link@10.0.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + + postcss-reduce-idents@6.0.3(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-reduce-initial@6.1.0(postcss@8.5.6): + dependencies: + browserslist: 4.28.1 + caniuse-api: 3.0.0 + postcss: 8.5.6 + + postcss-reduce-transforms@6.0.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + + postcss-replace-overflow-wrap@4.0.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss-selector-not@8.0.1(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 7.1.1 + + postcss-selector-parser@6.1.2: + dependencies: + cssesc: 3.0.0 + util-deprecate: 1.0.2 + + postcss-selector-parser@7.1.1: + dependencies: + cssesc: 3.0.0 + util-deprecate: 1.0.2 + + postcss-sort-media-queries@5.2.0(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + sort-css-media-queries: 2.2.0 + + postcss-svgo@6.0.3(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-value-parser: 4.2.0 + svgo: 3.3.2 + + postcss-unique-selectors@6.0.4(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + postcss-selector-parser: 6.1.2 + + postcss-value-parser@4.2.0: {} + + postcss-zindex@6.0.2(postcss@8.5.6): + dependencies: + postcss: 8.5.6 + + postcss@8.5.6: + dependencies: + nanoid: 3.3.11 + picocolors: 1.1.1 + source-map-js: 1.2.1 + + prelude-ls@1.2.1: {} + + prettier@2.8.8: {} + + prettier@3.8.1: {} + + pretty-error@4.0.0: + dependencies: + lodash: 4.17.23 + renderkid: 3.0.0 + + pretty-time@1.1.0: {} + + prism-react-renderer@2.4.1(react@18.3.1): + dependencies: + '@types/prismjs': 1.26.5 + clsx: 2.1.1 + react: 18.3.1 + + prismjs@1.30.0: {} + + process-nextick-args@2.0.1: {} + + prompts@2.4.2: + dependencies: + kleur: 3.0.3 + sisteransi: 1.0.5 + + prop-types@15.8.1: + dependencies: + loose-envify: 1.4.0 + object-assign: 4.1.1 + react-is: 16.13.1 + + property-information@7.1.0: {} + + proto-list@1.2.4: {} + + proxy-addr@2.0.7: + dependencies: + forwarded: 0.2.0 + ipaddr.js: 1.9.1 + + punycode.js@2.3.1: {} + + punycode@2.3.1: {} + + pupa@3.3.0: + dependencies: + escape-goat: 4.0.0 + + pvtsutils@1.3.6: + dependencies: + tslib: 2.8.1 + + pvutils@1.1.5: {} + + qs@6.14.1: + dependencies: + side-channel: 1.1.0 + + quansync@0.2.11: {} + + queue-microtask@1.2.3: {} + + quick-lru@5.1.1: {} + + randombytes@2.1.0: + dependencies: + safe-buffer: 5.2.1 + + range-parser@1.2.0: {} + + range-parser@1.2.1: {} + + raw-body@2.5.3: + dependencies: + bytes: 3.1.2 + http-errors: 2.0.1 + iconv-lite: 0.4.24 + unpipe: 1.0.0 + + rc@1.2.8: + dependencies: + deep-extend: 0.6.0 + ini: 1.3.8 + minimist: 1.2.8 + strip-json-comments: 2.0.1 + + react-dom@18.3.1(react@18.3.1): + dependencies: + loose-envify: 1.4.0 + react: 18.3.1 + scheduler: 0.23.2 + + react-fast-compare@3.2.2: {} + + react-is@16.13.1: {} + + react-json-view-lite@2.5.0(react@18.3.1): + dependencies: + react: 18.3.1 + + react-loadable-ssr-addon-v5-slorber@1.0.1(@docusaurus/react-loadable@6.0.0(react@18.3.1))(webpack@5.104.1): + dependencies: + '@babel/runtime': 7.28.6 + react-loadable: '@docusaurus/react-loadable@6.0.0(react@18.3.1)' + webpack: 5.104.1 + + react-router-config@5.1.1(react-router@5.3.4(react@18.3.1))(react@18.3.1): + dependencies: + '@babel/runtime': 7.28.6 + react: 18.3.1 + react-router: 5.3.4(react@18.3.1) + + react-router-dom@5.3.4(react@18.3.1): + dependencies: + '@babel/runtime': 7.28.6 + history: 4.10.1 + loose-envify: 1.4.0 + prop-types: 15.8.1 + react: 18.3.1 + react-router: 5.3.4(react@18.3.1) + tiny-invariant: 1.3.3 + tiny-warning: 1.0.3 + + react-router@5.3.4(react@18.3.1): + dependencies: + '@babel/runtime': 7.28.6 + history: 4.10.1 + hoist-non-react-statics: 3.3.2 + loose-envify: 1.4.0 + path-to-regexp: 1.9.0 + prop-types: 15.8.1 + react: 18.3.1 + react-is: 16.13.1 + tiny-invariant: 1.3.3 + tiny-warning: 1.0.3 + + react@18.3.1: + dependencies: + loose-envify: 1.4.0 + + read-yaml-file@1.1.0: + dependencies: + graceful-fs: 4.2.11 + js-yaml: 3.14.2 + pify: 4.0.1 + strip-bom: 3.0.0 + + readable-stream@2.3.8: + dependencies: + core-util-is: 1.0.3 + inherits: 2.0.4 + isarray: 1.0.0 + process-nextick-args: 2.0.1 + safe-buffer: 5.1.2 + string_decoder: 1.1.1 + util-deprecate: 1.0.2 + + readable-stream@3.6.2: + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + + readdirp@3.6.0: + dependencies: + picomatch: 2.3.1 + + readdirp@4.1.2: {} + + recma-build-jsx@1.0.0: + dependencies: + '@types/estree': 1.0.8 + estree-util-build-jsx: 3.0.1 + vfile: 6.0.3 + + recma-jsx@1.0.1(acorn@8.15.0): + dependencies: + acorn: 8.15.0 + acorn-jsx: 5.3.2(acorn@8.15.0) + estree-util-to-js: 2.0.0 + recma-parse: 1.0.0 + recma-stringify: 1.0.0 + unified: 11.0.5 + + recma-parse@1.0.0: + dependencies: + '@types/estree': 1.0.8 + esast-util-from-js: 2.0.1 + unified: 11.0.5 + vfile: 6.0.3 + + recma-stringify@1.0.0: + dependencies: + '@types/estree': 1.0.8 + estree-util-to-js: 2.0.0 + unified: 11.0.5 + vfile: 6.0.3 + + reflect-metadata@0.2.2: {} + + regenerate-unicode-properties@10.2.2: + dependencies: + regenerate: 1.4.2 + + regenerate@1.4.2: {} + + regexpu-core@6.4.0: + dependencies: + regenerate: 1.4.2 + regenerate-unicode-properties: 10.2.2 + regjsgen: 0.8.0 + regjsparser: 0.13.0 + unicode-match-property-ecmascript: 2.0.0 + unicode-match-property-value-ecmascript: 2.2.1 + + registry-auth-token@5.1.1: + dependencies: + '@pnpm/npm-conf': 3.0.2 + + registry-url@6.0.1: + dependencies: + rc: 1.2.8 + + regjsgen@0.8.0: {} + + regjsparser@0.13.0: + dependencies: + jsesc: 3.1.0 + + rehype-raw@7.0.0: + dependencies: + '@types/hast': 3.0.4 + hast-util-raw: 9.1.0 + vfile: 6.0.3 + + rehype-recma@1.0.0: + dependencies: + '@types/estree': 1.0.8 + '@types/hast': 3.0.4 + hast-util-to-estree: 3.1.3 + transitivePeerDependencies: + - supports-color + + relateurl@0.2.7: {} + + remark-directive@3.0.1: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-directive: 3.1.0 + micromark-extension-directive: 3.0.2 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + remark-emoji@4.0.1: + dependencies: + '@types/mdast': 4.0.4 + emoticon: 4.1.0 + mdast-util-find-and-replace: 3.0.2 + node-emoji: 2.2.0 + unified: 11.0.5 + + remark-frontmatter@5.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-frontmatter: 2.0.1 + micromark-extension-frontmatter: 2.0.0 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + remark-gfm@4.0.1: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-gfm: 3.1.0 + micromark-extension-gfm: 3.0.0 + remark-parse: 11.0.0 + remark-stringify: 11.0.0 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + remark-mdx@3.1.1: + dependencies: + mdast-util-mdx: 3.0.0 + micromark-extension-mdxjs: 3.0.0 + transitivePeerDependencies: + - supports-color + + remark-parse@11.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-from-markdown: 2.0.2 + micromark-util-types: 2.0.2 + unified: 11.0.5 + transitivePeerDependencies: + - supports-color + + remark-rehype@11.1.2: + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + mdast-util-to-hast: 13.2.1 + unified: 11.0.5 + vfile: 6.0.3 + + remark-stringify@11.0.0: + dependencies: + '@types/mdast': 4.0.4 + mdast-util-to-markdown: 2.1.2 + unified: 11.0.5 + + renderkid@3.0.0: + dependencies: + css-select: 4.3.0 + dom-converter: 0.2.0 + htmlparser2: 6.1.0 + lodash: 4.17.23 + strip-ansi: 6.0.1 + + repeat-string@1.6.1: {} + + require-from-string@2.0.2: {} + + require-like@0.1.2: {} + + requires-port@1.0.0: {} + + resolve-alpn@1.2.1: {} + + resolve-from@4.0.0: {} + + resolve-from@5.0.0: {} + + resolve-pathname@3.0.0: {} + + resolve@1.22.11: + dependencies: + is-core-module: 2.16.1 + path-parse: 1.0.7 + supports-preserve-symlinks-flag: 1.0.0 + + responselike@3.0.0: + dependencies: + lowercase-keys: 3.0.0 + + retry@0.13.1: {} + + reusify@1.1.0: {} + + robust-predicates@3.0.2: {} + + rollup@4.57.1: + dependencies: + '@types/estree': 1.0.8 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.57.1 + '@rollup/rollup-android-arm64': 4.57.1 + '@rollup/rollup-darwin-arm64': 4.57.1 + '@rollup/rollup-darwin-x64': 4.57.1 + '@rollup/rollup-freebsd-arm64': 4.57.1 + '@rollup/rollup-freebsd-x64': 4.57.1 + '@rollup/rollup-linux-arm-gnueabihf': 4.57.1 + '@rollup/rollup-linux-arm-musleabihf': 4.57.1 + '@rollup/rollup-linux-arm64-gnu': 4.57.1 + '@rollup/rollup-linux-arm64-musl': 4.57.1 + '@rollup/rollup-linux-loong64-gnu': 4.57.1 + '@rollup/rollup-linux-loong64-musl': 4.57.1 + '@rollup/rollup-linux-ppc64-gnu': 4.57.1 + '@rollup/rollup-linux-ppc64-musl': 4.57.1 + '@rollup/rollup-linux-riscv64-gnu': 4.57.1 + '@rollup/rollup-linux-riscv64-musl': 4.57.1 + '@rollup/rollup-linux-s390x-gnu': 4.57.1 + '@rollup/rollup-linux-x64-gnu': 4.57.1 + '@rollup/rollup-linux-x64-musl': 4.57.1 + '@rollup/rollup-openbsd-x64': 4.57.1 + '@rollup/rollup-openharmony-arm64': 4.57.1 + '@rollup/rollup-win32-arm64-msvc': 4.57.1 + '@rollup/rollup-win32-ia32-msvc': 4.57.1 + '@rollup/rollup-win32-x64-gnu': 4.57.1 + '@rollup/rollup-win32-x64-msvc': 4.57.1 + fsevents: 2.3.3 + + roughjs@4.6.6: + dependencies: + hachure-fill: 0.5.2 + path-data-parser: 0.1.0 + points-on-curve: 0.2.0 + points-on-path: 0.2.1 + + rtlcss@4.3.0: + dependencies: + escalade: 3.2.0 + picocolors: 1.1.1 + postcss: 8.5.6 + strip-json-comments: 3.1.1 + + run-applescript@7.1.0: {} + + run-parallel@1.2.0: + dependencies: + queue-microtask: 1.2.3 + + rw@1.3.3: {} + + safe-buffer@5.1.2: {} + + safe-buffer@5.2.1: {} + + safer-buffer@2.1.2: {} + + sax@1.4.4: {} + + scheduler@0.23.2: + dependencies: + loose-envify: 1.4.0 + + schema-dts@1.1.5: {} + + schema-utils@3.3.0: + dependencies: + '@types/json-schema': 7.0.15 + ajv: 6.12.6 + ajv-keywords: 3.5.2(ajv@6.12.6) + + schema-utils@4.3.3: + dependencies: + '@types/json-schema': 7.0.15 + ajv: 8.17.1 + ajv-formats: 2.1.1(ajv@8.17.1) + ajv-keywords: 5.1.0(ajv@8.17.1) + + section-matter@1.0.0: + dependencies: + extend-shallow: 2.0.1 + kind-of: 6.0.3 + + select-hose@2.0.0: {} + + selfsigned@5.5.0: + dependencies: + '@peculiar/x509': 1.14.3 + pkijs: 3.3.3 + + semver-diff@4.0.0: + dependencies: + semver: 7.7.3 + + semver@6.3.1: {} + + semver@7.7.3: {} + + send@0.19.2: + dependencies: + debug: 2.6.9 + depd: 2.0.0 + destroy: 1.2.0 + encodeurl: 2.0.0 + escape-html: 1.0.3 + etag: 1.8.1 + fresh: 0.5.2 + http-errors: 2.0.1 + mime: 1.6.0 + ms: 2.1.3 + on-finished: 2.4.1 + range-parser: 1.2.1 + statuses: 2.0.2 + transitivePeerDependencies: + - supports-color + + serialize-javascript@6.0.2: + dependencies: + randombytes: 2.1.0 + + serve-handler@6.1.6: + dependencies: + bytes: 3.0.0 + content-disposition: 0.5.2 + mime-types: 2.1.18 + minimatch: 3.1.2 + path-is-inside: 1.0.2 + path-to-regexp: 3.3.0 + range-parser: 1.2.0 + + serve-index@1.9.2: + dependencies: + accepts: 1.3.8 + batch: 0.6.1 + debug: 2.6.9 + escape-html: 1.0.3 + http-errors: 1.8.1 + mime-types: 2.1.35 + parseurl: 1.3.3 + transitivePeerDependencies: + - supports-color + + serve-static@1.16.3: + dependencies: + encodeurl: 2.0.0 + escape-html: 1.0.3 + parseurl: 1.3.3 + send: 0.19.2 + transitivePeerDependencies: + - supports-color + + set-function-length@1.2.2: + dependencies: + define-data-property: 1.1.4 + es-errors: 1.3.0 + function-bind: 1.1.2 + get-intrinsic: 1.3.0 + gopd: 1.2.0 + has-property-descriptors: 1.0.2 + + setprototypeof@1.2.0: {} + + shallow-clone@3.0.1: + dependencies: + kind-of: 6.0.3 + + shallowequal@1.1.0: {} + + shebang-command@2.0.0: + dependencies: + shebang-regex: 3.0.0 + + shebang-regex@3.0.0: {} + + shell-quote@1.8.3: {} + + side-channel-list@1.0.0: + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + + side-channel-map@1.0.1: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + + side-channel-weakmap@1.0.2: + dependencies: + call-bound: 1.0.4 + es-errors: 1.3.0 + get-intrinsic: 1.3.0 + object-inspect: 1.13.4 + side-channel-map: 1.0.1 + + side-channel@1.1.0: + dependencies: + es-errors: 1.3.0 + object-inspect: 1.13.4 + side-channel-list: 1.0.0 + side-channel-map: 1.0.1 + side-channel-weakmap: 1.0.2 + + siginfo@2.0.0: {} + + signal-exit@3.0.7: {} + + signal-exit@4.1.0: {} + + sirv@2.0.4: + dependencies: + '@polka/url': 1.0.0-next.29 + mrmime: 2.0.1 + totalist: 3.0.1 + + sisteransi@1.0.5: {} + + sitemap@7.1.2: + dependencies: + '@types/node': 17.0.45 + '@types/sax': 1.2.7 + arg: 5.0.2 + sax: 1.4.4 + + skin-tone@2.0.0: + dependencies: + unicode-emoji-modifier-base: 1.0.0 + + slash@3.0.0: {} + + slash@4.0.0: {} + + snake-case@3.0.4: + dependencies: + dot-case: 3.0.4 + tslib: 2.8.1 + + sockjs@0.3.24: + dependencies: + faye-websocket: 0.11.4 + uuid: 8.3.2 + websocket-driver: 0.7.4 + + sort-css-media-queries@2.2.0: {} + + source-map-js@1.2.1: {} + + source-map-support@0.5.21: + dependencies: + buffer-from: 1.1.2 + source-map: 0.6.1 + + source-map@0.6.1: {} + + source-map@0.7.6: {} + + space-separated-tokens@2.0.2: {} + + spawndamnit@3.0.1: + dependencies: + cross-spawn: 7.0.6 + signal-exit: 4.1.0 + + spdy-transport@3.0.0: + dependencies: + debug: 4.4.3 + detect-node: 2.1.0 + hpack.js: 2.1.6 + obuf: 1.1.2 + readable-stream: 3.6.2 + wbuf: 1.7.3 + transitivePeerDependencies: + - supports-color + + spdy@4.0.2: + dependencies: + debug: 4.4.3 + handle-thing: 2.0.1 + http-deceiver: 1.2.7 + select-hose: 2.0.0 + spdy-transport: 3.0.0 + transitivePeerDependencies: + - supports-color + + sprintf-js@1.0.3: {} + + srcset@4.0.0: {} + + stackback@0.0.2: {} + + statuses@1.5.0: {} + + statuses@2.0.2: {} + + std-env@3.10.0: {} + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.1.2 + + string_decoder@1.1.1: + dependencies: + safe-buffer: 5.1.2 + + string_decoder@1.3.0: + dependencies: + safe-buffer: 5.2.1 + + stringify-entities@4.0.4: + dependencies: + character-entities-html4: 2.1.0 + character-entities-legacy: 3.0.0 + + stringify-object@3.3.0: + dependencies: + get-own-enumerable-property-symbols: 3.0.2 + is-obj: 1.0.1 + is-regexp: 1.0.0 + + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-ansi@7.1.2: + dependencies: + ansi-regex: 6.2.2 + + strip-bom-string@1.0.0: {} + + strip-bom@3.0.0: {} + + strip-final-newline@2.0.0: {} + + strip-json-comments@2.0.1: {} + + strip-json-comments@3.1.1: {} + + style-to-js@1.1.21: + dependencies: + style-to-object: 1.0.14 + + style-to-object@1.0.14: + dependencies: + inline-style-parser: 0.2.7 + + stylehacks@6.1.1(postcss@8.5.6): + dependencies: + browserslist: 4.28.1 + postcss: 8.5.6 + postcss-selector-parser: 6.1.2 + + stylis@4.3.6: {} + + sucrase@3.35.1: + dependencies: + '@jridgewell/gen-mapping': 0.3.13 + commander: 4.1.1 + lines-and-columns: 1.2.4 + mz: 2.7.0 + pirates: 4.0.7 + tinyglobby: 0.2.15 + ts-interface-checker: 0.1.13 + + supports-color@7.2.0: + dependencies: + has-flag: 4.0.0 + + supports-color@8.1.1: + dependencies: + has-flag: 4.0.0 + + supports-color@9.4.0: {} + + supports-preserve-symlinks-flag@1.0.0: {} + + svg-parser@2.0.4: {} + + svgo@3.3.2: + dependencies: + '@trysound/sax': 0.2.0 + commander: 7.2.0 + css-select: 5.2.2 + css-tree: 2.3.1 + css-what: 6.2.2 + csso: 5.0.5 + picocolors: 1.1.1 + + tapable@2.3.0: {} + + term-size@2.2.1: {} + + terser-webpack-plugin@5.3.16(webpack@5.104.1): + dependencies: + '@jridgewell/trace-mapping': 0.3.31 + jest-worker: 27.5.1 + schema-utils: 4.3.3 + serialize-javascript: 6.0.2 + terser: 5.46.0 + webpack: 5.104.1 + + terser@5.46.0: + dependencies: + '@jridgewell/source-map': 0.3.11 + acorn: 8.15.0 + commander: 2.20.3 + source-map-support: 0.5.21 + + test-exclude@7.0.1: + dependencies: + '@istanbuljs/schema': 0.1.3 + glob: 10.5.0 + minimatch: 9.0.5 + + thenify-all@1.6.0: + dependencies: + thenify: 3.3.1 + + thenify@3.3.1: + dependencies: + any-promise: 1.3.0 + + thingies@2.5.0(tslib@2.8.1): + dependencies: + tslib: 2.8.1 + + thunky@1.1.0: {} + + tiny-invariant@1.3.3: {} + + tiny-warning@1.0.3: {} + + tinybench@2.9.0: {} + + tinyexec@0.3.2: {} + + tinyexec@1.0.2: {} + + tinyglobby@0.2.15: + dependencies: + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 + + tinypool@1.1.1: {} + + tinyrainbow@1.2.0: {} + + tinyspy@3.0.2: {} + + to-regex-range@5.0.1: + dependencies: + is-number: 7.0.0 + + toidentifier@1.0.1: {} + + totalist@3.0.1: {} + + tree-dump@1.1.0(tslib@2.8.1): + dependencies: + tslib: 2.8.1 + + tree-kill@1.2.2: {} + + trim-lines@3.0.1: {} + + trough@2.2.0: {} + + ts-api-utils@2.4.0(typescript@5.9.3): + dependencies: + typescript: 5.9.3 + + ts-dedent@2.2.0: {} + + ts-interface-checker@0.1.13: {} + + tslib@1.14.1: {} + + tslib@2.8.1: {} + + tsup@8.5.1(jiti@1.21.7)(postcss@8.5.6)(typescript@5.9.3)(yaml@2.8.2): + dependencies: + bundle-require: 5.1.0(esbuild@0.27.2) + cac: 6.7.14 + chokidar: 4.0.3 + consola: 3.4.2 + debug: 4.4.3 + esbuild: 0.27.2 + fix-dts-default-cjs-exports: 1.0.1 + joycon: 3.1.1 + picocolors: 1.1.1 + postcss-load-config: 6.0.1(jiti@1.21.7)(postcss@8.5.6)(yaml@2.8.2) + resolve-from: 5.0.0 + rollup: 4.57.1 + source-map: 0.7.6 + sucrase: 3.35.1 + tinyexec: 0.3.2 + tinyglobby: 0.2.15 + tree-kill: 1.2.2 + optionalDependencies: + postcss: 8.5.6 + typescript: 5.9.3 + transitivePeerDependencies: + - jiti + - supports-color + - tsx + - yaml + + tsyringe@4.10.0: + dependencies: + tslib: 1.14.1 + + turbo-darwin-64@2.8.1: + optional: true + + turbo-darwin-arm64@2.8.1: + optional: true + + turbo-linux-64@2.8.1: + optional: true + + turbo-linux-arm64@2.8.1: + optional: true + + turbo-windows-64@2.8.1: + optional: true + + turbo-windows-arm64@2.8.1: + optional: true + + turbo@2.8.1: + optionalDependencies: + turbo-darwin-64: 2.8.1 + turbo-darwin-arm64: 2.8.1 + turbo-linux-64: 2.8.1 + turbo-linux-arm64: 2.8.1 + turbo-windows-64: 2.8.1 + turbo-windows-arm64: 2.8.1 + + type-check@0.4.0: + dependencies: + prelude-ls: 1.2.1 + + type-fest@0.21.3: {} + + type-fest@1.4.0: {} + + type-fest@2.19.0: {} + + type-is@1.6.18: + dependencies: + media-typer: 0.3.0 + mime-types: 2.1.35 + + typedarray-to-buffer@3.1.5: + dependencies: + is-typedarray: 1.0.0 + + typescript@5.9.3: {} + + uc.micro@2.1.0: {} + + ufo@1.6.3: {} + + undici-types@6.21.0: {} + + undici@5.29.0: + dependencies: + '@fastify/busboy': 2.1.1 + + undici@7.19.2: {} + + unicode-canonical-property-names-ecmascript@2.0.1: {} + + unicode-emoji-modifier-base@1.0.0: {} + + unicode-match-property-ecmascript@2.0.0: + dependencies: + unicode-canonical-property-names-ecmascript: 2.0.1 + unicode-property-aliases-ecmascript: 2.2.0 + + unicode-match-property-value-ecmascript@2.2.1: {} + + unicode-property-aliases-ecmascript@2.2.0: {} + + unified@11.0.5: + dependencies: + '@types/unist': 3.0.3 + bail: 2.0.2 + devlop: 1.1.0 + extend: 3.0.2 + is-plain-obj: 4.1.0 + trough: 2.2.0 + vfile: 6.0.3 + + unique-string@3.0.0: + dependencies: + crypto-random-string: 4.0.0 + + unist-util-is@6.0.1: + dependencies: + '@types/unist': 3.0.3 + + unist-util-position-from-estree@2.0.0: + dependencies: + '@types/unist': 3.0.3 + + unist-util-position@5.0.0: + dependencies: + '@types/unist': 3.0.3 + + unist-util-stringify-position@4.0.0: + dependencies: + '@types/unist': 3.0.3 + + unist-util-visit-parents@6.0.2: + dependencies: + '@types/unist': 3.0.3 + unist-util-is: 6.0.1 + + unist-util-visit@5.1.0: + dependencies: + '@types/unist': 3.0.3 + unist-util-is: 6.0.1 + unist-util-visit-parents: 6.0.2 + + universalify@0.1.2: {} + + universalify@2.0.1: {} + + unpipe@1.0.0: {} + + update-browserslist-db@1.2.3(browserslist@4.28.1): + dependencies: + browserslist: 4.28.1 + escalade: 3.2.0 + picocolors: 1.1.1 + + update-notifier@6.0.2: + dependencies: + boxen: 7.1.1 + chalk: 5.6.2 + configstore: 6.0.0 + has-yarn: 3.0.0 + import-lazy: 4.0.0 + is-ci: 3.0.1 + is-installed-globally: 0.4.0 + is-npm: 6.1.0 + is-yarn-global: 0.4.1 + latest-version: 7.0.0 + pupa: 3.3.0 + semver: 7.7.3 + semver-diff: 4.0.0 + xdg-basedir: 5.1.0 + + uri-js@4.4.1: + dependencies: + punycode: 2.3.1 + + url-loader@4.1.1(file-loader@6.2.0(webpack@5.104.1))(webpack@5.104.1): + dependencies: + loader-utils: 2.0.4 + mime-types: 2.1.35 + schema-utils: 3.3.0 + webpack: 5.104.1 + optionalDependencies: + file-loader: 6.2.0(webpack@5.104.1) + + util-deprecate@1.0.2: {} + + utila@0.4.0: {} + + utility-types@3.11.0: {} + + utils-merge@1.0.1: {} + + uuid@11.1.0: {} + + uuid@8.3.2: {} + + value-equal@1.0.1: {} + + vary@1.1.2: {} + + vfile-location@5.0.3: + dependencies: + '@types/unist': 3.0.3 + vfile: 6.0.3 + + vfile-message@4.0.3: + dependencies: + '@types/unist': 3.0.3 + unist-util-stringify-position: 4.0.0 + + vfile@6.0.3: + dependencies: + '@types/unist': 3.0.3 + vfile-message: 4.0.3 + + vite-node@2.1.9(@types/node@20.19.30)(terser@5.46.0): + dependencies: + cac: 6.7.14 + debug: 4.4.3 + es-module-lexer: 1.7.0 + pathe: 1.1.2 + vite: 5.4.21(@types/node@20.19.30)(terser@5.46.0) + transitivePeerDependencies: + - '@types/node' + - less + - lightningcss + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + + vite@5.4.21(@types/node@20.19.30)(terser@5.46.0): + dependencies: + esbuild: 0.21.5 + postcss: 8.5.6 + rollup: 4.57.1 + optionalDependencies: + '@types/node': 20.19.30 + fsevents: 2.3.3 + terser: 5.46.0 + + vitest@2.1.9(@types/node@20.19.30)(terser@5.46.0): + dependencies: + '@vitest/expect': 2.1.9 + '@vitest/mocker': 2.1.9(vite@5.4.21(@types/node@20.19.30)(terser@5.46.0)) + '@vitest/pretty-format': 2.1.9 + '@vitest/runner': 2.1.9 + '@vitest/snapshot': 2.1.9 + '@vitest/spy': 2.1.9 + '@vitest/utils': 2.1.9 + chai: 5.3.3 + debug: 4.4.3 + expect-type: 1.3.0 + magic-string: 0.30.21 + pathe: 1.1.2 + std-env: 3.10.0 + tinybench: 2.9.0 + tinyexec: 0.3.2 + tinypool: 1.1.1 + tinyrainbow: 1.2.0 + vite: 5.4.21(@types/node@20.19.30)(terser@5.46.0) + vite-node: 2.1.9(@types/node@20.19.30)(terser@5.46.0) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/node': 20.19.30 + transitivePeerDependencies: + - less + - lightningcss + - msw + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + + vscode-jsonrpc@8.2.0: {} + + vscode-languageserver-protocol@3.17.5: + dependencies: + vscode-jsonrpc: 8.2.0 + vscode-languageserver-types: 3.17.5 + + vscode-languageserver-textdocument@1.0.12: {} + + vscode-languageserver-types@3.17.5: {} + + vscode-languageserver@9.0.1: + dependencies: + vscode-languageserver-protocol: 3.17.5 + + vscode-uri@3.0.8: {} + + watchpack@2.5.1: + dependencies: + glob-to-regexp: 0.4.1 + graceful-fs: 4.2.11 + + wbuf@1.7.3: + dependencies: + minimalistic-assert: 1.0.1 + + web-namespaces@2.0.1: {} + + webpack-bundle-analyzer@4.10.2: + dependencies: + '@discoveryjs/json-ext': 0.5.7 + acorn: 8.15.0 + acorn-walk: 8.3.4 + commander: 7.2.0 + debounce: 1.2.1 + escape-string-regexp: 4.0.0 + gzip-size: 6.0.0 + html-escaper: 2.0.2 + opener: 1.5.2 + picocolors: 1.1.1 + sirv: 2.0.4 + ws: 7.5.10 + transitivePeerDependencies: + - bufferutil + - utf-8-validate + + webpack-dev-middleware@7.4.5(tslib@2.8.1)(webpack@5.104.1): + dependencies: + colorette: 2.0.20 + memfs: 4.56.10(tslib@2.8.1) + mime-types: 3.0.2 + on-finished: 2.4.1 + range-parser: 1.2.1 + schema-utils: 4.3.3 + optionalDependencies: + webpack: 5.104.1 + transitivePeerDependencies: + - tslib + + webpack-dev-server@5.2.3(debug@4.4.3)(tslib@2.8.1)(webpack@5.104.1): + dependencies: + '@types/bonjour': 3.5.13 + '@types/connect-history-api-fallback': 1.5.4 + '@types/express': 4.17.25 + '@types/express-serve-static-core': 4.19.8 + '@types/serve-index': 1.9.4 + '@types/serve-static': 1.15.10 + '@types/sockjs': 0.3.36 + '@types/ws': 8.18.1 + ansi-html-community: 0.0.8 + bonjour-service: 1.3.0 + chokidar: 3.6.0 + colorette: 2.0.20 + compression: 1.8.1 + connect-history-api-fallback: 2.0.0 + express: 4.22.1 + graceful-fs: 4.2.11 + http-proxy-middleware: 2.0.9(@types/express@4.17.25)(debug@4.4.3) + ipaddr.js: 2.3.0 + launch-editor: 2.12.0 + open: 10.2.0 + p-retry: 6.2.1 + schema-utils: 4.3.3 + selfsigned: 5.5.0 + serve-index: 1.9.2 + sockjs: 0.3.24 + spdy: 4.0.2 + webpack-dev-middleware: 7.4.5(tslib@2.8.1)(webpack@5.104.1) + ws: 8.19.0 + optionalDependencies: + webpack: 5.104.1 + transitivePeerDependencies: + - bufferutil + - debug + - supports-color + - tslib + - utf-8-validate + + webpack-merge@5.10.0: + dependencies: + clone-deep: 4.0.1 + flat: 5.0.2 + wildcard: 2.0.1 + + webpack-merge@6.0.1: + dependencies: + clone-deep: 4.0.1 + flat: 5.0.2 + wildcard: 2.0.1 + + webpack-sources@3.3.3: {} + + webpack@5.104.1: + dependencies: + '@types/eslint-scope': 3.7.7 + '@types/estree': 1.0.8 + '@types/json-schema': 7.0.15 + '@webassemblyjs/ast': 1.14.1 + '@webassemblyjs/wasm-edit': 1.14.1 + '@webassemblyjs/wasm-parser': 1.14.1 + acorn: 8.15.0 + acorn-import-phases: 1.0.4(acorn@8.15.0) + browserslist: 4.28.1 + chrome-trace-event: 1.0.4 + enhanced-resolve: 5.18.4 + es-module-lexer: 2.0.0 + eslint-scope: 5.1.1 + events: 3.3.0 + glob-to-regexp: 0.4.1 + graceful-fs: 4.2.11 + json-parse-even-better-errors: 2.3.1 + loader-runner: 4.3.1 + mime-types: 2.1.35 + neo-async: 2.6.2 + schema-utils: 4.3.3 + tapable: 2.3.0 + terser-webpack-plugin: 5.3.16(webpack@5.104.1) + watchpack: 2.5.1 + webpack-sources: 3.3.3 + transitivePeerDependencies: + - '@swc/core' + - esbuild + - uglify-js + + webpackbar@6.0.1(webpack@5.104.1): + dependencies: + ansi-escapes: 4.3.2 + chalk: 4.1.2 + consola: 3.4.2 + figures: 3.2.0 + markdown-table: 2.0.0 + pretty-time: 1.1.0 + std-env: 3.10.0 + webpack: 5.104.1 + wrap-ansi: 7.0.0 + + websocket-driver@0.7.4: + dependencies: + http-parser-js: 0.5.10 + safe-buffer: 5.2.1 + websocket-extensions: 0.1.4 + + websocket-extensions@0.1.4: {} + + whatwg-encoding@3.1.1: + dependencies: + iconv-lite: 0.6.3 + + whatwg-mimetype@4.0.0: {} + + which@2.0.2: + dependencies: + isexe: 2.0.0 + + why-is-node-running@2.3.0: + dependencies: + siginfo: 2.0.0 + stackback: 0.0.2 + + widest-line@4.0.1: + dependencies: + string-width: 5.1.2 + + wildcard@2.0.1: {} + + word-wrap@1.2.5: {} + + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.3 + string-width: 5.1.2 + strip-ansi: 7.1.2 + + write-file-atomic@3.0.3: + dependencies: + imurmurhash: 0.1.4 + is-typedarray: 1.0.0 + signal-exit: 3.0.7 + typedarray-to-buffer: 3.1.5 + + ws@7.5.10: {} + + ws@8.19.0: {} + + wsl-utils@0.1.0: + dependencies: + is-wsl: 3.1.0 + + xdg-basedir@5.1.0: {} + + xml-js@1.6.11: + dependencies: + sax: 1.4.4 + + yallist@3.1.1: {} + + yaml@2.8.2: {} + + yargs-parser@21.1.1: {} + + yocto-queue@0.1.0: {} + + yocto-queue@1.2.2: {} + + zwitch@2.0.4: {} diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml new file mode 100644 index 0000000..d8cc1d4 --- /dev/null +++ b/pnpm-workspace.yaml @@ -0,0 +1,3 @@ +packages: + - "packages/*" + - "docs-site" diff --git a/pyproject.toml b/pyproject.toml index 208c835..de35bcc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,7 @@ dev = [ "pytest-asyncio>=0.21.0", "hypothesis>=6.0.0", "faker>=18.0.0", + "PyYAML>=6.0.0", "responses>=0.23.0", "freezegun>=1.2.0", "factory-boy>=3.2.0", @@ -62,6 +63,9 @@ local_scheme = "no-local-version" [tool.setuptools.packages.find] include = ["nexla_sdk*"] +[tool.setuptools.package-data] +nexla_sdk = ["py.typed"] + [tool.pytest.ini_options] markers = [ "integration: marks tests as integration tests (require API credentials)", diff --git a/scripts/parity/README.md b/scripts/parity/README.md new file mode 100644 index 0000000..182604f --- /dev/null +++ b/scripts/parity/README.md @@ -0,0 +1,24 @@ +# Parity Tooling + +This folder contains local tooling for validating Python SDK parity against: + +- `plugin-redoc-0.yaml` (OpenAPI spec in this repo) +- `config/routes.rb` from the admin API codebase +- `nexla_sdk/resources/*.py` request surfaces + +## Commands + +Generate operation map used by `NexlaClient.raw`: + +```bash +python scripts/parity/generate_operation_map.py +``` + +Build parity matrices and diffs: + +```bash +python scripts/parity/build_matrices.py \ + --admin-routes /Users/sakshammittal/Documents/GitHub/admin-api/config/routes.rb +``` + +Outputs are written under `artifacts/parity/`. diff --git a/scripts/parity/build_matrices.py b/scripts/parity/build_matrices.py new file mode 100755 index 0000000..c1dd3e6 --- /dev/null +++ b/scripts/parity/build_matrices.py @@ -0,0 +1,267 @@ +#!/usr/bin/env python3 +"""Build route/spec/SDK parity matrices for Nexla SDK verification.""" + +from __future__ import annotations + +import argparse +import json +import os +import re +from dataclasses import dataclass +from pathlib import Path +from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple + +import yaml + +HTTP_METHODS = {"GET", "POST", "PUT", "DELETE", "PATCH", "HEAD", "OPTIONS"} +SDK_REQUEST_RE = re.compile(r'_make_request\(\s*"([A-Z]+)"\s*,\s*([^)]+?)\s*(?:,|\))') +ROUTE_RE = re.compile(r"^\s*(get|post|put|delete|patch|match)\s+['\"]([^'\"]+)['\"]") +ROUTE_VIA_ARRAY_RE = re.compile(r":via\s*=>\s*\[([^\]]+)\]") +ROUTE_VIA_SINGLE_RE = re.compile(r"(?:via:|:via\s*=>)\s*:(\w+)") +PATH_VAR_RE = re.compile(r":([a-zA-Z_][a-zA-Z0-9_]*)") +ASSIGNMENT_RE = re.compile(r'^\s*(\w+)\s*=\s*(f?)"([^"]+)"') +DEF_RE = re.compile(r"^\s*def\s+\w+\(") +PATH_EQ_RE = re.compile(r'^\s*self\._path\s*=\s*"([^"]+)"') + + +@dataclass(frozen=True) +class Endpoint: + method: str + path: str + source: str + operation_id: Optional[str] = None + + def key(self) -> Tuple[str, str]: + return self.method, self.path + + +def normalize_path(path: str) -> str: + normalized = path.strip() + normalized = normalized.replace("(.:format)", "") + normalized = re.sub(r"\((/:([a-zA-Z_][a-zA-Z0-9_]*))\)", r"/:\2", normalized) + normalized = re.sub(r"\(/([a-zA-Z_][a-zA-Z0-9_]*)\)", r"/\1", normalized) + normalized = PATH_VAR_RE.sub(r"{\1}", normalized) + normalized = re.sub(r"//+", "/", normalized) + if not normalized.startswith("/"): + normalized = f"/{normalized}" + if len(normalized) > 1 and normalized.endswith("/"): + normalized = normalized[:-1] + return normalized + + +def load_openapi_endpoints(spec_path: Path) -> List[Endpoint]: + spec = yaml.safe_load(spec_path.read_text()) + endpoints: List[Endpoint] = [] + for path, path_item in (spec.get("paths") or {}).items(): + if not isinstance(path_item, dict): + continue + for method, operation in path_item.items(): + upper_method = method.upper() + if upper_method not in HTTP_METHODS: + continue + if not isinstance(operation, dict): + continue + endpoints.append( + Endpoint( + method=upper_method, + path=normalize_path(path), + source="openapi", + operation_id=operation.get("operationId"), + ) + ) + return endpoints + + +def _extract_match_methods(line: str) -> List[str]: + via_array = ROUTE_VIA_ARRAY_RE.search(line) + if via_array: + tokens = [token.strip() for token in via_array.group(1).split(",")] + return [token.lstrip(":").upper() for token in tokens if token.strip()] + via_single = ROUTE_VIA_SINGLE_RE.search(line) + if via_single: + return [via_single.group(1).upper()] + return [] + + +def load_admin_routes(routes_path: Path) -> List[Endpoint]: + endpoints: List[Endpoint] = [] + for line in routes_path.read_text().splitlines(): + route_match = ROUTE_RE.search(line) + if not route_match: + continue + method = route_match.group(1).upper() + path = normalize_path(route_match.group(2)) + methods: List[str] + if method == "MATCH": + methods = _extract_match_methods(line) + else: + methods = [method] + + for verb in methods: + if verb in HTTP_METHODS: + endpoints.append( + Endpoint(method=verb, path=path, source="admin_routes") + ) + return endpoints + + +def _resolve_sdk_path_expr( + expr: str, base_path: str, path_vars: Mapping[str, str] +) -> str: + candidate = expr.strip() + if candidate in path_vars: + candidate = path_vars[candidate] + + if candidate.startswith('f"') and candidate.endswith('"'): + candidate = candidate[2:-1] + elif candidate.startswith('"') and candidate.endswith('"'): + candidate = candidate[1:-1] + elif candidate.startswith("'") and candidate.endswith("'"): + candidate = candidate[1:-1] + + candidate = candidate.replace("{self._path}", base_path) + if candidate == "self._path": + candidate = base_path + + if "{self._path}" in candidate: + candidate = candidate.replace("{self._path}", base_path) + if "self._path" in candidate and candidate.startswith("self._path"): + candidate = candidate.replace("self._path", base_path, 1) + if candidate.startswith(base_path) or candidate.startswith("/"): + return normalize_path(candidate) + return normalize_path(f"{base_path}/{candidate}") + + +def load_sdk_endpoints(resources_dir: Path) -> List[Endpoint]: + endpoints: List[Endpoint] = [] + for resource_file in sorted(resources_dir.glob("*.py")): + text = resource_file.read_text() + if "_make_request(" not in text: + continue + + base_path = "" + path_assign = PATH_EQ_RE.search(text) + if path_assign: + base_path = path_assign.group(1) + + path_vars: Dict[str, str] = {} + for raw_line in text.splitlines(): + if DEF_RE.search(raw_line): + path_vars = {} + assign_match = ASSIGNMENT_RE.match(raw_line) + if assign_match: + var_name, is_f, value = assign_match.groups() + path_vars[var_name] = f'f"{value}"' if is_f else f'"{value}"' + + request_match = SDK_REQUEST_RE.search(raw_line) + if not request_match: + continue + + method, path_expr = request_match.groups() + if method not in HTTP_METHODS: + continue + if not base_path and "self._path" in path_expr: + continue + + try: + resolved_path = _resolve_sdk_path_expr(path_expr, base_path, path_vars) + except Exception: + continue + + endpoints.append( + Endpoint( + method=method, + path=resolved_path, + source=str(resource_file), + ) + ) + return endpoints + + +def dedupe(endpoints: Iterable[Endpoint]) -> List[Endpoint]: + seen: Dict[Tuple[str, str], Endpoint] = {} + for endpoint in endpoints: + seen.setdefault(endpoint.key(), endpoint) + return sorted(seen.values(), key=lambda endpoint: endpoint.key()) + + +def build_diff( + canonical: Iterable[Endpoint], sdk: Iterable[Endpoint] +) -> Dict[str, List[Dict[str, str]]]: + canonical_set = {(endpoint.method, endpoint.path) for endpoint in canonical} + sdk_set = {(endpoint.method, endpoint.path) for endpoint in sdk} + + missing_in_sdk = sorted(canonical_set - sdk_set) + extra_in_sdk = sorted(sdk_set - canonical_set) + + return { + "missing_in_sdk": [ + {"method": method, "path": path} for method, path in missing_in_sdk + ], + "extra_in_sdk": [ + {"method": method, "path": path} for method, path in extra_in_sdk + ], + } + + +def serialize_endpoints(endpoints: Iterable[Endpoint]) -> List[Dict[str, Any]]: + return [ + { + "method": endpoint.method, + "path": endpoint.path, + "source": endpoint.source, + "operation_id": endpoint.operation_id, + } + for endpoint in endpoints + ] + + +def write_json(path: Path, payload: Any) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(json.dumps(payload, indent=2, sort_keys=True)) + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument("--spec", default="plugin-redoc-0.yaml") + parser.add_argument( + "--admin-routes", + default=os.path.join( + os.getenv( + "NEXLA_ADMIN_API_PATH", + "/Users/sakshammittal/Documents/GitHub/admin-api", + ), + "config/routes.rb", + ), + ) + parser.add_argument("--resources-dir", default="nexla_sdk/resources") + parser.add_argument("--out-dir", default="artifacts/parity") + args = parser.parse_args() + + out_dir = Path(args.out_dir) + spec_endpoints = dedupe(load_openapi_endpoints(Path(args.spec))) + admin_endpoints = dedupe(load_admin_routes(Path(args.admin_routes))) + sdk_endpoints = dedupe(load_sdk_endpoints(Path(args.resources_dir))) + + write_json(out_dir / "openapi_matrix.json", serialize_endpoints(spec_endpoints)) + write_json( + out_dir / "admin_routes_matrix.json", serialize_endpoints(admin_endpoints) + ) + write_json(out_dir / "sdk_matrix.json", serialize_endpoints(sdk_endpoints)) + write_json( + out_dir / "diff_openapi_vs_sdk.json", + build_diff(spec_endpoints, sdk_endpoints), + ) + write_json( + out_dir / "diff_admin_routes_vs_sdk.json", + build_diff(admin_endpoints, sdk_endpoints), + ) + + print(f"Wrote parity matrices to {out_dir}") + print(f"OpenAPI endpoints: {len(spec_endpoints)}") + print(f"Admin route endpoints: {len(admin_endpoints)}") + print(f"SDK endpoints: {len(sdk_endpoints)}") + + +if __name__ == "__main__": + main() diff --git a/scripts/parity/check_operation_map_sync.py b/scripts/parity/check_operation_map_sync.py new file mode 100755 index 0000000..c5de62d --- /dev/null +++ b/scripts/parity/check_operation_map_sync.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python3 +"""Fail if generated operation map is out of sync with OpenAPI spec.""" + +from __future__ import annotations + +import argparse +import subprocess +import sys +import tempfile +from pathlib import Path + + +def main() -> int: + parser = argparse.ArgumentParser() + parser.add_argument("--spec", default="plugin-redoc-0.yaml") + parser.add_argument("--target", default="nexla_sdk/generated/operation_map.py") + args = parser.parse_args() + + target = Path(args.target) + if not target.exists(): + print(f"Target file does not exist: {target}", file=sys.stderr) + return 2 + + with tempfile.TemporaryDirectory() as tmpdir: + tmp_target = Path(tmpdir) / "operation_map.py" + cmd = [ + sys.executable, + "scripts/parity/generate_operation_map.py", + "--spec", + args.spec, + "--output", + str(tmp_target), + ] + result = subprocess.run(cmd, check=False, capture_output=True, text=True) + if result.returncode != 0: + sys.stderr.write(result.stderr) + return result.returncode + + current = target.read_text() + generated = tmp_target.read_text() + if current != generated: + print( + "operation_map.py is out of sync with plugin-redoc-0.yaml. " + "Run: python scripts/parity/generate_operation_map.py", + file=sys.stderr, + ) + return 1 + + print("operation_map.py is in sync.") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/scripts/parity/generate_operation_map.py b/scripts/parity/generate_operation_map.py new file mode 100755 index 0000000..2f4eabc --- /dev/null +++ b/scripts/parity/generate_operation_map.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python3 +"""Generate a Python operation map from the OpenAPI spec.""" + +from __future__ import annotations + +import argparse +import re +from pathlib import Path +from typing import Any, Dict, Iterable, List, Tuple + +import yaml + +HTTP_METHODS = {"get", "post", "put", "delete", "patch", "head", "options"} +SAFE_IDENTIFIER = re.compile(r"[^a-zA-Z0-9_]") + + +def normalize_operation_id(operation_id: str, fallback: str) -> str: + cleaned = SAFE_IDENTIFIER.sub("_", operation_id).strip("_") + if not cleaned: + cleaned = fallback + if cleaned[0].isdigit(): + cleaned = f"op_{cleaned}" + return cleaned + + +def iter_operations(spec: Dict[str, Any]) -> Iterable[Tuple[str, Dict[str, Any]]]: + paths = spec.get("paths", {}) + for path, path_item in paths.items(): + if not isinstance(path_item, dict): + continue + for method, operation in path_item.items(): + if method.lower() not in HTTP_METHODS or not isinstance(operation, dict): + continue + fallback = f"{method.lower()}_{path.strip('/').replace('/', '_')}" + raw_operation_id = operation.get("operationId") or fallback + operation_id = normalize_operation_id(raw_operation_id, fallback) + yield ( + operation_id, + { + "method": method.upper(), + "path": path, + "tags": operation.get("tags", []), + "summary": operation.get("summary", ""), + "path_params": sorted( + {match.group(1) for match in re.finditer(r"\{([^}]+)\}", path)} + ), + }, + ) + + +def render_output(operations: Dict[str, Dict[str, Any]]) -> str: + operation_ids = sorted(operations.keys()) + literal_values = ",\n ".join( + repr(operation_id) for operation_id in operation_ids + ) + lines: List[str] = [] + lines.append( + '"""Auto-generated operation map from OpenAPI. Do not edit manually."""' + ) + lines.append("") + lines.append("from typing import Dict, List, Literal, TypedDict") + lines.append("") + lines.append("") + lines.append("class OperationSpec(TypedDict):") + lines.append(" method: str") + lines.append(" path: str") + lines.append(" tags: List[str]") + lines.append(" summary: str") + lines.append(" path_params: List[str]") + lines.append("") + lines.append("") + lines.append("OperationId = Literal[") + if literal_values: + lines.append(f" {literal_values}") + lines.append("]") + lines.append("") + lines.append("") + lines.append("OPERATION_MAP: Dict[str, OperationSpec] = {") + for operation_id in operation_ids: + spec = operations[operation_id] + lines.append(f" {operation_id!r}: {{") + lines.append(f" 'method': {spec['method']!r},") + lines.append(f" 'path': {spec['path']!r},") + lines.append(f" 'tags': {spec['tags']!r},") + lines.append(f" 'summary': {spec['summary']!r},") + lines.append(f" 'path_params': {spec['path_params']!r},") + lines.append(" },") + lines.append("}") + lines.append("") + return "\n".join(lines) + + +def main() -> None: + parser = argparse.ArgumentParser() + parser.add_argument( + "--spec", + default="plugin-redoc-0.yaml", + help="Path to OpenAPI spec file", + ) + parser.add_argument( + "--output", + default="nexla_sdk/generated/operation_map.py", + help="Output Python module path", + ) + args = parser.parse_args() + + spec_path = Path(args.spec) + out_path = Path(args.output) + out_path.parent.mkdir(parents=True, exist_ok=True) + + spec = yaml.safe_load(spec_path.read_text()) + operations: Dict[str, Dict[str, Any]] = {} + + duplicates: Dict[str, int] = {} + for operation_id, operation_spec in iter_operations(spec): + if operation_id in operations: + duplicates[operation_id] = duplicates.get(operation_id, 1) + 1 + operation_id = f"{operation_id}_{duplicates[operation_id]}" + operations[operation_id] = operation_spec + + output = render_output(operations) + out_path.write_text(output) + + print(f"Generated {out_path} with {len(operations)} operations from {spec_path}.") + + +if __name__ == "__main__": + main() diff --git a/test_auth_param_import.py b/test_auth_param_import.py new file mode 100644 index 0000000..d4bc7d5 --- /dev/null +++ b/test_auth_param_import.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python +"""Test script to verify auth_parameters imports work without circular dependency.""" + +import sys +import importlib.util + +# Load the module directly without triggering nexla_sdk.__init__ +spec = importlib.util.spec_from_file_location( + "auth_parameters_responses", + "nexla_sdk/models/auth_parameters/responses.py" +) +module = importlib.util.module_from_spec(spec) + +# Manually set up dependencies before loading +sys.modules['nexla_sdk'] = type(sys)('nexla_sdk') +sys.modules['nexla_sdk.models'] = type(sys)('nexla_sdk.models') +sys.modules['nexla_sdk.models.auth_parameters'] = type(sys)('nexla_sdk.models.auth_parameters') + +# Load base model +from nexla_sdk.models.base import BaseModel +sys.modules['nexla_sdk.models.base'] = type(sys)('nexla_sdk.models.base') +sys.modules['nexla_sdk.models.base'].BaseModel = BaseModel + +# Load vendor responses +from nexla_sdk.models.vendors.responses import Vendor +sys.modules['nexla_sdk.models.vendors'] = type(sys)('nexla_sdk.models.vendors') +sys.modules['nexla_sdk.models.vendors.responses'] = type(sys)('nexla_sdk.models.vendors.responses') +sys.modules['nexla_sdk.models.vendors.responses'].Vendor = Vendor + +try: + spec.loader.exec_module(module) + AuthParameter = module.AuthParameter + print("✓ Import successful - no circular dependency at runtime") + print(f"✓ AuthParameter class loaded: {AuthParameter}") + + # Check the type annotation + field_info = AuthParameter.model_fields.get('auth_template') + if field_info: + print(f"✓ auth_template field exists with annotation: {field_info.annotation}") + else: + print("✗ auth_template field not found") + + # Verify TYPE_CHECKING works correctly + print("✓ TYPE_CHECKING pattern working - AuthTemplate is only imported for type checking") + +except ImportError as e: + print(f"✗ Import failed: {e}") + sys.exit(1) diff --git a/tests/test_client_init.py b/tests/test_client_init.py index f0f7ce3..df93f4c 100644 --- a/tests/test_client_init.py +++ b/tests/test_client_init.py @@ -98,3 +98,10 @@ def test_create_webhook_client_shares_http_client(): # The webhook client should use the same HTTP client as the parent assert webhook_client._http_client is client.http_client + + +def test_client_does_not_expose_removed_mcp_resources(): + client = NexlaClient(service_key="test_service_key") + assert not hasattr(client, "tools") + assert not hasattr(client, "tool_sets") + assert not hasattr(client, "mcp_sessions") diff --git a/tests/unit/test_access_control.py b/tests/unit/test_access_control.py new file mode 100644 index 0000000..ac6257b --- /dev/null +++ b/tests/unit/test_access_control.py @@ -0,0 +1,305 @@ +"""Unit tests for access control operations across resources. + +Note: Some tests use raw dict responses instead of model validation because +AccessorResponse is a Union type alias, and the SDK's model_validate call +on Union types has limitations. These tests focus on verifying the correct +HTTP requests are made and responses are handled appropriately. +""" + +import pytest + +from nexla_sdk.exceptions import AuthorizationError, NotFoundError +from nexla_sdk.models.access import UserAccessorRequest, TeamAccessorRequest +from tests.utils import MockResponseBuilder, create_http_error + + +@pytest.mark.unit +class TestAccessorCRUDOperations: + """Tests for accessor CRUD operations.""" + + def test_get_accessors_makes_correct_request(self, mock_client, mock_http_client): + """Test that get_accessors makes the correct HTTP request.""" + # Arrange + resource_id = 123 + # Return empty list to avoid model parsing issues + mock_http_client.add_response(f"/data_sources/{resource_id}/accessors", []) + + # Act + result = mock_client.sources.get_accessors(resource_id) + + # Assert + assert result == [] + mock_http_client.assert_request_made( + "GET", f"/data_sources/{resource_id}/accessors" + ) + + def test_get_accessors_empty_list(self, mock_client, mock_http_client): + """Test getting accessors returns empty list when none exist.""" + # Arrange + resource_id = 123 + mock_http_client.add_response(f"/data_sources/{resource_id}/accessors", []) + + # Act + accessors = mock_client.sources.get_accessors(resource_id) + + # Assert + assert accessors == [] + mock_http_client.assert_request_made( + "GET", f"/data_sources/{resource_id}/accessors" + ) + + def test_add_accessors_makes_correct_request(self, mock_client, mock_http_client): + """Test that add_accessors makes PUT request with correct body.""" + # Arrange + resource_id = 123 + accessor = UserAccessorRequest( + id=456, access_roles=["collaborator"] + ) + mock_http_client.add_response(f"/data_sources/{resource_id}/accessors", []) + + # Act + mock_client.sources.add_accessors(resource_id, [accessor]) + + # Assert + mock_http_client.assert_request_made( + "PUT", f"/data_sources/{resource_id}/accessors" + ) + # Verify the request body contains the accessor data + last_request = mock_http_client.get_last_request() + assert last_request is not None + assert "accessors" in str(last_request.get("json", {})) + + def test_replace_accessors_uses_post(self, mock_client, mock_http_client): + """Test that replace_accessors uses POST method.""" + # Arrange + resource_id = 123 + accessor = UserAccessorRequest( + id=456, access_roles=["owner"] + ) + mock_http_client.add_response(f"/data_sources/{resource_id}/accessors", []) + + # Act + mock_client.sources.replace_accessors(resource_id, [accessor]) + + # Assert + mock_http_client.assert_request_made( + "POST", f"/data_sources/{resource_id}/accessors" + ) + + def test_delete_accessors_specific(self, mock_client, mock_http_client): + """Test deleting specific accessors.""" + # Arrange + resource_id = 123 + accessor = UserAccessorRequest( + id=456, access_roles=["collaborator"] + ) + mock_http_client.add_response(f"/data_sources/{resource_id}/accessors", []) + + # Act + mock_client.sources.delete_accessors(resource_id, [accessor]) + + # Assert + mock_http_client.assert_request_made( + "DELETE", f"/data_sources/{resource_id}/accessors" + ) + + def test_delete_accessors_all(self, mock_client, mock_http_client): + """Test deleting all accessors (passing None).""" + # Arrange + resource_id = 123 + mock_http_client.add_response(f"/data_sources/{resource_id}/accessors", []) + + # Act + result = mock_client.sources.delete_accessors(resource_id, None) + + # Assert + assert result == [] + mock_http_client.assert_request_made( + "DELETE", f"/data_sources/{resource_id}/accessors" + ) + + +@pytest.mark.unit +class TestAccessControlErrorHandling: + """Tests for access control error scenarios.""" + + def test_accessor_not_found_returns_404(self, mock_client, mock_http_client): + """Test that accessing non-existent resource returns 404.""" + # Arrange + resource_id = 99999 + mock_http_client.add_error( + f"/data_sources/{resource_id}/accessors", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.get_accessors(resource_id) + + def test_insufficient_permissions_returns_403(self, mock_client, mock_http_client): + """Test that unauthorized access returns 403.""" + # Arrange + resource_id = 123 + mock_http_client.add_error( + f"/data_sources/{resource_id}/accessors", + create_http_error(403, "Forbidden"), + ) + + # Act & Assert + with pytest.raises(AuthorizationError): + mock_client.sources.get_accessors(resource_id) + + +@pytest.mark.unit +class TestAccessControlAcrossResources: + """Tests verifying accessor operations work across resource types.""" + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ("credentials", "/data_credentials"), + ("lookups", "/data_maps"), + ("projects", "/projects"), + ("teams", "/teams"), + ], + ) + def test_get_accessors_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test get_accessors works for different resource types.""" + # Arrange + resource_id = 123 + mock_http_client.add_response(f"{endpoint}/{resource_id}/accessors", []) + + # Act + resource = getattr(mock_client, resource_name) + accessors = resource.get_accessors(resource_id) + + # Assert + assert accessors == [] + mock_http_client.assert_request_made( + "GET", f"{endpoint}/{resource_id}/accessors" + ) + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ("credentials", "/data_credentials"), + ], + ) + def test_add_accessors_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test add_accessors works for different resource types.""" + # Arrange + resource_id = 123 + accessor = UserAccessorRequest( + id=456, access_roles=["collaborator"] + ) + mock_http_client.add_response(f"{endpoint}/{resource_id}/accessors", []) + + # Act + resource = getattr(mock_client, resource_name) + resource.add_accessors(resource_id, [accessor]) + + # Assert + mock_http_client.assert_request_made( + "PUT", f"{endpoint}/{resource_id}/accessors" + ) + + +@pytest.mark.unit +class TestAccessorRequestTypes: + """Tests for different accessor request types (USER, TEAM).""" + + def test_user_accessor_request(self, mock_client, mock_http_client): + """Test creating a USER type accessor request.""" + # Arrange + resource_id = 123 + accessor = UserAccessorRequest( + id=456, access_roles=["collaborator"] + ) + mock_http_client.add_response(f"/data_sources/{resource_id}/accessors", []) + + # Act + mock_client.sources.add_accessors(resource_id, [accessor]) + + # Assert + last_request = mock_http_client.get_last_request() + request_data = last_request.get("json", {}) + assert "accessors" in request_data + accessor_data = request_data["accessors"][0] + assert accessor_data["type"] == "USER" + + def test_team_accessor_request(self, mock_client, mock_http_client): + """Test creating a TEAM type accessor request.""" + # Arrange + resource_id = 123 + accessor = TeamAccessorRequest( + id=789, access_roles=["collaborator"] + ) + mock_http_client.add_response(f"/data_sources/{resource_id}/accessors", []) + + # Act + mock_client.sources.add_accessors(resource_id, [accessor]) + + # Assert + last_request = mock_http_client.get_last_request() + request_data = last_request.get("json", {}) + assert "accessors" in request_data + accessor_data = request_data["accessors"][0] + assert accessor_data["type"] == "TEAM" + + def test_multiple_access_roles(self, mock_client, mock_http_client): + """Test accessor with multiple access roles.""" + # Arrange + resource_id = 123 + accessor = UserAccessorRequest( + id=456, access_roles=["collaborator", "admin"] + ) + mock_http_client.add_response(f"/data_sources/{resource_id}/accessors", []) + + # Act + mock_client.sources.add_accessors(resource_id, [accessor]) + + # Assert + last_request = mock_http_client.get_last_request() + request_data = last_request.get("json", {}) + accessor_data = request_data["accessors"][0] + assert "collaborator" in accessor_data["access_roles"] + assert "admin" in accessor_data["access_roles"] + + +@pytest.mark.unit +class TestAccessRoleTypes: + """Tests for different access role values.""" + + @pytest.mark.parametrize( + "access_role", + ["owner", "collaborator", "admin", "operator"], + ) + def test_supported_access_roles( + self, mock_client, mock_http_client, access_role + ): + """Test that common access roles are accepted.""" + # Arrange + resource_id = 123 + accessor = UserAccessorRequest( + id=456, access_roles=[access_role] + ) + mock_http_client.add_response(f"/data_sources/{resource_id}/accessors", []) + + # Act + mock_client.sources.add_accessors(resource_id, [accessor]) + + # Assert + last_request = mock_http_client.get_last_request() + request_data = last_request.get("json", {}) + accessor_data = request_data["accessors"][0] + assert access_role in accessor_data["access_roles"] diff --git a/tests/unit/test_access_insights.py b/tests/unit/test_access_insights.py new file mode 100644 index 0000000..08187dd --- /dev/null +++ b/tests/unit/test_access_insights.py @@ -0,0 +1,434 @@ +"""Unit tests for access insights operations across resources. + +Tests cover: +- get_access_insights: Explain why the current user can access a resource +- get_users_access_insights: Get access insights for all users with access to a resource +- list_accessible: List resources accessible to the current user +""" + +import pytest + +from nexla_sdk.exceptions import AuthorizationError, NotFoundError +from tests.utils import MockResponseBuilder, create_http_error + + +@pytest.mark.unit +class TestGetAccessInsights: + """Tests for get_access_insights operation.""" + + def test_get_access_insights_returns_rules_list(self, mock_client, mock_http_client): + """Test that get_access_insights returns access insights payload.""" + # Arrange + resource_id = 123 + access_insights_response = MockResponseBuilder.access_insights_response( + resource_id=resource_id, + access_granted=True, + access_reason="owner", + access_path=[{"type": "direct", "role": "owner"}], + ) + mock_http_client.add_response( + f"/data_sources/{resource_id}/access", access_insights_response + ) + + # Act + result = mock_client.sources.get_access_insights(resource_id) + + # Assert + assert result["access_granted"] is True + assert result["access_reason"] == "owner" + assert result["resource_id"] == resource_id + mock_http_client.assert_request_made( + "GET", f"/data_sources/{resource_id}/access" + ) + + def test_get_access_insights_with_accessor_user_id_parameter( + self, mock_client, mock_http_client + ): + """Test get_access_insights with accessor_user_id query parameter.""" + # Arrange + resource_id = 123 + accessor_user_id = 456 + access_insights_response = MockResponseBuilder.access_insights_response( + resource_id=resource_id, + access_granted=True, + access_reason="collaborator", + ) + mock_http_client.add_response( + f"/data_sources/{resource_id}/access", access_insights_response + ) + + # Act + result = mock_client.sources.get_access_insights( + resource_id, accessor_user_id=accessor_user_id + ) + + # Assert + assert result["access_granted"] is True + mock_http_client.assert_request_made( + "GET", f"/data_sources/{resource_id}/access" + ) + # Verify the query parameter was passed + last_request = mock_http_client.get_last_request() + assert last_request is not None + params = last_request.get("params", {}) + assert params.get("accessor_user_id") == accessor_user_id + + def test_get_access_insights_owner_rule_present( + self, mock_client, mock_http_client + ): + """Test that access insights correctly identifies owner access.""" + # Arrange + resource_id = 123 + access_insights_response = MockResponseBuilder.access_insights_response( + resource_id=resource_id, + access_granted=True, + access_reason="owner", + access_path=[ + {"type": "direct", "role": "owner"}, + ], + ) + mock_http_client.add_response( + f"/data_sources/{resource_id}/access", access_insights_response + ) + + # Act + result = mock_client.sources.get_access_insights(resource_id) + + # Assert + assert result["access_granted"] is True + assert result["access_reason"] == "owner" + assert len(result["access_path"]) > 0 + assert result["access_path"][0]["role"] == "owner" + assert result["access_path"][0]["type"] == "direct" + + def test_get_access_insights_team_member_access( + self, mock_client, mock_http_client + ): + """Test access insights for team member access path.""" + # Arrange + resource_id = 123 + access_insights_response = { + "access_granted": True, + "access_reason": "team_member", + "access_path": [ + { + "type": "team", + "role": "collaborator", + "team_id": 789, + "team_name": "Engineering Team", + } + ], + "resource_id": resource_id, + "resource_type": "data_source", + } + mock_http_client.add_response( + f"/data_sources/{resource_id}/access", access_insights_response + ) + + # Act + result = mock_client.sources.get_access_insights(resource_id) + + # Assert + assert result["access_granted"] is True + assert result["access_reason"] == "team_member" + assert result["access_path"][0]["type"] == "team" + assert result["access_path"][0]["team_id"] == 789 + + +@pytest.mark.unit +class TestGetUsersAccessInsights: + """Tests for get_users_access_insights operation.""" + + def test_get_users_access_insights_returns_user_access_list( + self, mock_client, mock_http_client + ): + """Test that get_users_access_insights returns user access list.""" + # Arrange + resource_id = 123 + users_access_response = { + "users": [ + { + "user_id": 1, + "email": "owner@example.com", + "full_name": "Owner User", + "access_roles": ["owner"], + "access_type": "direct", + }, + { + "user_id": 2, + "email": "collab@example.com", + "full_name": "Collaborator User", + "access_roles": ["collaborator"], + "access_type": "direct", + }, + { + "user_id": 3, + "email": "team@example.com", + "full_name": "Team Member", + "access_roles": ["collaborator"], + "access_type": "team", + "team_id": 789, + }, + ], + "resource_id": resource_id, + } + mock_http_client.add_response( + f"/data_sources/{resource_id}/users_access_insights", users_access_response + ) + + # Act + result = mock_client.sources.get_users_access_insights(resource_id) + + # Assert + assert "users" in result + assert len(result["users"]) == 3 + assert result["resource_id"] == resource_id + mock_http_client.assert_request_made( + "GET", f"/data_sources/{resource_id}/users_access_insights" + ) + + def test_get_users_access_insights_empty_list( + self, mock_client, mock_http_client + ): + """Test get_users_access_insights when no users have access.""" + # Arrange + resource_id = 123 + users_access_response = { + "users": [], + "resource_id": resource_id, + } + mock_http_client.add_response( + f"/data_sources/{resource_id}/users_access_insights", users_access_response + ) + + # Act + result = mock_client.sources.get_users_access_insights(resource_id) + + # Assert + assert result["users"] == [] + mock_http_client.assert_request_made( + "GET", f"/data_sources/{resource_id}/users_access_insights" + ) + + +@pytest.mark.unit +class TestListAccessible: + """Tests for list_accessible operation.""" + + def test_list_accessible_returns_accessible_resources( + self, mock_client, mock_http_client + ): + """Test that list_accessible returns list of accessible resources.""" + # Arrange + accessible_sources = [ + MockResponseBuilder.source(source_id=1, name="Source 1"), + MockResponseBuilder.source(source_id=2, name="Source 2"), + MockResponseBuilder.source(source_id=3, name="Source 3"), + ] + mock_http_client.add_response("/data_sources/accessible", accessible_sources) + + # Act + result = mock_client.sources.list_accessible() + + # Assert + assert len(result) == 3 + mock_http_client.assert_request_made("GET", "/data_sources/accessible") + + def test_list_accessible_returns_empty_when_no_access( + self, mock_client, mock_http_client + ): + """Test list_accessible returns empty list when user has no access.""" + # Arrange + mock_http_client.add_response("/data_sources/accessible", []) + + # Act + result = mock_client.sources.list_accessible() + + # Assert + assert result == [] + mock_http_client.assert_request_made("GET", "/data_sources/accessible") + + def test_list_accessible_with_query_params( + self, mock_client, mock_http_client + ): + """Test list_accessible passes additional query parameters.""" + # Arrange + accessible_sources = [MockResponseBuilder.source(source_id=1)] + mock_http_client.add_response("/data_sources/accessible", accessible_sources) + + # Act + result = mock_client.sources.list_accessible(include_metrics=True) + + # Assert + assert len(result) == 1 + mock_http_client.assert_request_made("GET", "/data_sources/accessible") + last_request = mock_http_client.get_last_request() + params = last_request.get("params", {}) + assert params.get("include_metrics") is True + + +@pytest.mark.unit +class TestAccessInsightsErrorHandling: + """Tests for access insights error scenarios.""" + + def test_access_insights_not_found_returns_404( + self, mock_client, mock_http_client + ): + """Test that accessing non-existent resource returns 404.""" + # Arrange + resource_id = 99999 + mock_http_client.add_error( + f"/data_sources/{resource_id}/access", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.get_access_insights(resource_id) + + def test_access_insights_permission_denied_returns_403( + self, mock_client, mock_http_client + ): + """Test that unauthorized access returns 403.""" + # Arrange + resource_id = 123 + mock_http_client.add_error( + f"/data_sources/{resource_id}/access", + create_http_error(403, "Forbidden - insufficient permissions"), + ) + + # Act & Assert + with pytest.raises(AuthorizationError): + mock_client.sources.get_access_insights(resource_id) + + def test_users_access_insights_not_found( + self, mock_client, mock_http_client + ): + """Test users_access_insights for non-existent resource.""" + # Arrange + resource_id = 99999 + mock_http_client.add_error( + f"/data_sources/{resource_id}/users_access_insights", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.get_users_access_insights(resource_id) + + def test_users_access_insights_permission_denied( + self, mock_client, mock_http_client + ): + """Test users_access_insights returns 403 for unauthorized access.""" + # Arrange + resource_id = 123 + mock_http_client.add_error( + f"/data_sources/{resource_id}/users_access_insights", + create_http_error(403, "Forbidden - owner access required"), + ) + + # Act & Assert + with pytest.raises(AuthorizationError): + mock_client.sources.get_users_access_insights(resource_id) + + +@pytest.mark.unit +class TestAccessInsightsAcrossResources: + """Tests verifying access insights operations work across resource types.""" + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ("credentials", "/data_credentials"), + ("lookups", "/data_maps"), + ("projects", "/projects"), + ], + ) + def test_get_access_insights_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test get_access_insights works for different resource types.""" + # Arrange + resource_id = 123 + access_insights_response = MockResponseBuilder.access_insights_response( + resource_id=resource_id + ) + mock_http_client.add_response( + f"{endpoint}/{resource_id}/access", access_insights_response + ) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.get_access_insights(resource_id) + + # Assert + assert result["access_granted"] is True + mock_http_client.assert_request_made( + "GET", f"{endpoint}/{resource_id}/access" + ) + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ], + ) + def test_get_users_access_insights_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test get_users_access_insights works for different resource types.""" + # Arrange + resource_id = 123 + users_access_response = { + "users": [ + { + "user_id": 1, + "email": "user@example.com", + "access_roles": ["owner"], + } + ], + "resource_id": resource_id, + } + mock_http_client.add_response( + f"{endpoint}/{resource_id}/users_access_insights", users_access_response + ) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.get_users_access_insights(resource_id) + + # Assert + assert "users" in result + mock_http_client.assert_request_made( + "GET", f"{endpoint}/{resource_id}/users_access_insights" + ) + + @pytest.mark.parametrize( + "resource_name,endpoint,builder_method", + [ + ("sources", "/data_sources", "source"), + ("destinations", "/data_sinks", "destination"), + ("nexsets", "/data_sets", "nexset"), + ], + ) + def test_list_accessible_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint, builder_method + ): + """Test list_accessible works for different resource types.""" + # Arrange + builder_func = getattr(MockResponseBuilder, builder_method) + accessible_resources = [builder_func() for _ in range(2)] + mock_http_client.add_response(f"{endpoint}/accessible", accessible_resources) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.list_accessible() + + # Assert + assert len(result) == 2 + mock_http_client.assert_request_made("GET", f"{endpoint}/accessible") diff --git a/tests/unit/test_api_keys.py b/tests/unit/test_api_keys.py new file mode 100644 index 0000000..71a7bde --- /dev/null +++ b/tests/unit/test_api_keys.py @@ -0,0 +1,180 @@ +"""Unit tests for API keys resource.""" + +import pytest + +from nexla_sdk.models.api_keys.responses import ApiKey, ApiKeysIndex +from tests.utils import assert_model_list_valid, assert_model_valid + + +# Sample response data +SAMPLE_API_KEY = { + "id": 123, + "owner_id": 1, + "org_id": 1, + "data_set_id": 456, + "name": "Dataset API Key", + "description": "API key for dataset access", + "status": "active", + "scope": "read", + "api_key": "ak_test_abc123", + "url": "https://api.nexla.com/v1/data_sets/456", + "created_at": "2025-01-01T00:00:00Z", + "updated_at": "2025-01-01T00:00:00Z", +} + +SAMPLE_API_KEYS_LIST = [ + SAMPLE_API_KEY, + {**SAMPLE_API_KEY, "id": 124, "name": "Another API Key"}, + {**SAMPLE_API_KEY, "id": 125, "name": "Third API Key"}, +] + +SAMPLE_API_KEYS_GROUPED = { + "data_sets": [SAMPLE_API_KEY], + "data_sinks": [], + "data_sources": [], + "users": [], +} + + +@pytest.fixture +def sample_api_key_response(): + """Sample API key response.""" + return SAMPLE_API_KEY.copy() + + +@pytest.fixture +def sample_api_keys_list(): + """Sample API keys list response.""" + return [k.copy() for k in SAMPLE_API_KEYS_LIST] + + +@pytest.fixture +def sample_api_keys_grouped(): + """Sample grouped API keys response.""" + return SAMPLE_API_KEYS_GROUPED.copy() + + +@pytest.mark.unit +class TestApiKeysResource: + """Unit tests for ApiKeysResource using mocks.""" + + def test_list_api_keys_success( + self, mock_client, mock_http_client, sample_api_keys_list + ): + """Test listing API keys with successful response.""" + mock_http_client.add_response("/api_keys", sample_api_keys_list) + + keys = mock_client.api_keys.list() + + assert len(keys) == 3 + assert_model_list_valid(keys, ApiKey) + mock_http_client.assert_request_made("GET", "/api_keys") + + def test_list_grouped_api_keys_success( + self, mock_client, mock_http_client, sample_api_keys_grouped + ): + """Test listing grouped API keys.""" + mock_http_client.add_response("/api_keys", sample_api_keys_grouped) + + result = mock_client.api_keys.list_grouped() + + assert isinstance(result, ApiKeysIndex) + assert len(result.data_sets) == 1 + mock_http_client.assert_request_made("GET", "/api_keys") + + def test_get_api_key_success( + self, mock_client, mock_http_client, sample_api_key_response + ): + """Test getting a single API key.""" + key_id = 123 + mock_http_client.add_response(f"/api_keys/{key_id}", sample_api_key_response) + + key = mock_client.api_keys.get(key_id) + + assert_model_valid(key, {"id": key_id}) + mock_http_client.assert_request_made("GET", f"/api_keys/{key_id}") + + def test_get_api_key_by_value( + self, mock_client, mock_http_client, sample_api_key_response + ): + """Test getting an API key by its value.""" + key_value = "ak_test_abc123" + mock_http_client.add_response(f"/api_keys/{key_value}", sample_api_key_response) + + key = mock_client.api_keys.get(key_value) + + assert key.api_key == key_value + mock_http_client.assert_request_made("GET", f"/api_keys/{key_value}") + + def test_search_api_keys_success( + self, mock_client, mock_http_client, sample_api_keys_list + ): + """Test searching API keys.""" + mock_http_client.add_response("/api_keys/search", sample_api_keys_list) + + filters = {"scope": "read"} + keys = mock_client.api_keys.search(filters) + + assert len(keys) == 3 + mock_http_client.assert_request_made("POST", "/api_keys/search") + + +@pytest.mark.unit +class TestApiKeyModels: + """Unit tests for API key models.""" + + def test_api_key_model_validation(self, sample_api_key_response): + """Test ApiKey model parses valid data correctly.""" + key = ApiKey.model_validate(sample_api_key_response) + + assert key.id == 123 + assert key.name == "Dataset API Key" + assert key.scope == "read" + assert key.api_key == "ak_test_abc123" + + def test_api_keys_index_model_validation(self, sample_api_keys_grouped): + """Test ApiKeysIndex model parses valid data correctly.""" + index = ApiKeysIndex.model_validate(sample_api_keys_grouped) + + assert len(index.data_sets) == 1 + assert len(index.data_sinks) == 0 + + def test_api_key_model_with_minimal_data(self): + """Test ApiKey model with minimal required fields.""" + minimal_data = { + "id": 1, + } + key = ApiKey.model_validate(minimal_data) + + assert key.id == 1 + assert key.name is None + assert key.api_key is None + + +@pytest.mark.unit +class TestApiKeysUnsupportedOperations: + """Test that unsupported write operations raise NotImplementedError.""" + + def test_create_raises_not_implemented(self, mock_client): + with pytest.raises(NotImplementedError, match="read-only"): + mock_client.api_keys.create({"name": "test"}) + + def test_update_raises_not_implemented(self, mock_client): + with pytest.raises(NotImplementedError, match="read-only"): + mock_client.api_keys.update(123, {"name": "test"}) + + def test_delete_raises_not_implemented(self, mock_client): + with pytest.raises(NotImplementedError, match="read-only"): + mock_client.api_keys.delete(123) + + def test_copy_raises_not_implemented(self, mock_client): + with pytest.raises(NotImplementedError, match="read-only"): + mock_client.api_keys.copy(123) + + def test_activate_raises_not_implemented(self, mock_client): + with pytest.raises(NotImplementedError, match="read-only"): + mock_client.api_keys.activate(123) + + def test_pause_raises_not_implemented(self, mock_client): + with pytest.raises(NotImplementedError, match="read-only"): + mock_client.api_keys.pause(123) diff --git a/tests/unit/test_approval_requests.py b/tests/unit/test_approval_requests.py index f5f2d6c..17e3101 100644 --- a/tests/unit/test_approval_requests.py +++ b/tests/unit/test_approval_requests.py @@ -32,3 +32,10 @@ def test_lists_and_actions(self, client, mock_http_client): ) rj = client.approval_requests.reject(2, reason="not needed") assert rj.id == 2 + + mock_http_client.clear_responses() + mock_http_client.add_response( + "/approval_requests/2/cancel", {"id": 2, "status": "cancelled"} + ) + cn = client.approval_requests.cancel(2) + assert cn.id == 2 diff --git a/tests/unit/test_audit_log.py b/tests/unit/test_audit_log.py new file mode 100644 index 0000000..b1fe7a2 --- /dev/null +++ b/tests/unit/test_audit_log.py @@ -0,0 +1,400 @@ +"""Unit tests for audit log operations across resources. + +Tests the get_audit_log functionality provided by BaseResource, which +retrieves audit history for resources via GET /{endpoint}/{id}/audit_log. +""" + +import pytest + +from nexla_sdk.exceptions import AuthorizationError, NotFoundError +from tests.utils import MockResponseBuilder, create_http_error + + +@pytest.mark.unit +class TestGetAuditLog: + """Tests for get_audit_log operations.""" + + def test_get_audit_log_returns_list(self, mock_client, mock_http_client): + """Test that get_audit_log returns a list of audit log entries.""" + # Arrange + resource_id = 123 + audit_entries = [ + MockResponseBuilder.audit_log_entry(event="create"), + MockResponseBuilder.audit_log_entry(event="update"), + ] + mock_http_client.add_response( + f"/data_sources/{resource_id}/audit_log", audit_entries + ) + + # Act + result = mock_client.sources.get_audit_log(resource_id) + + # Assert + assert isinstance(result, list) + assert len(result) == 2 + mock_http_client.assert_request_made( + "GET", f"/data_sources/{resource_id}/audit_log" + ) + + def test_get_audit_log_empty_list(self, mock_client, mock_http_client): + """Test getting audit log returns empty list when no entries exist.""" + # Arrange + resource_id = 123 + mock_http_client.add_response(f"/data_sources/{resource_id}/audit_log", []) + + # Act + result = mock_client.sources.get_audit_log(resource_id) + + # Assert + assert result == [] + mock_http_client.assert_request_made( + "GET", f"/data_sources/{resource_id}/audit_log" + ) + + def test_get_audit_log_single_entry(self, mock_client, mock_http_client): + """Test getting audit log with a single entry.""" + # Arrange + resource_id = 456 + audit_entry = MockResponseBuilder.audit_log_entry( + item_type="DataSource", + item_id=resource_id, + event="create", + ) + mock_http_client.add_response( + f"/data_sources/{resource_id}/audit_log", [audit_entry] + ) + + # Act + result = mock_client.sources.get_audit_log(resource_id) + + # Assert + assert len(result) == 1 + assert result[0]["event"] == "create" + assert result[0]["item_type"] == "DataSource" + + +@pytest.mark.unit +class TestAuditLogEntryFields: + """Tests for verifying audit log entry structure and fields.""" + + def test_entry_has_required_fields(self, mock_client, mock_http_client): + """Test that audit log entry has required fields.""" + # Arrange + resource_id = 123 + audit_entry = MockResponseBuilder.audit_log_entry( + item_type="DataSource", + item_id=resource_id, + event="update", + object_changes={"name": ["Old Name", "New Name"]}, + ) + mock_http_client.add_response( + f"/data_sources/{resource_id}/audit_log", [audit_entry] + ) + + # Act + result = mock_client.sources.get_audit_log(resource_id) + + # Assert + assert len(result) == 1 + entry = result[0] + # Verify required fields are present + assert "item_type" in entry + assert "item_id" in entry + assert "event" in entry + assert "object_changes" in entry + assert "created_at" in entry + + def test_entry_has_user_information(self, mock_client, mock_http_client): + """Test that audit log entry contains user information.""" + # Arrange + resource_id = 123 + audit_entry = MockResponseBuilder.audit_log_entry( + owner_id=456, + owner_email="user@example.com", + user={"id": 456, "email": "user@example.com"}, + ) + mock_http_client.add_response( + f"/data_sources/{resource_id}/audit_log", [audit_entry] + ) + + # Act + result = mock_client.sources.get_audit_log(resource_id) + + # Assert + entry = result[0] + assert "owner_id" in entry + assert "owner_email" in entry + assert "user" in entry + assert entry["user"]["email"] == "user@example.com" + + def test_entry_has_request_metadata(self, mock_client, mock_http_client): + """Test that audit log entry contains request metadata.""" + # Arrange + resource_id = 123 + audit_entry = MockResponseBuilder.audit_log_entry( + request_ip="192.168.1.1", + request_user_agent="Mozilla/5.0 Test", + request_url="/api/v1/data_sources/123", + ) + mock_http_client.add_response( + f"/data_sources/{resource_id}/audit_log", [audit_entry] + ) + + # Act + result = mock_client.sources.get_audit_log(resource_id) + + # Assert + entry = result[0] + assert "request_ip" in entry + assert "request_user_agent" in entry + assert "request_url" in entry + + +@pytest.mark.unit +class TestAuditLogEventTypes: + """Tests for different audit log event types.""" + + def test_create_event(self, mock_client, mock_http_client): + """Test audit log entry with create event.""" + # Arrange + resource_id = 123 + audit_entry = MockResponseBuilder.audit_log_entry( + item_type="DataSource", + item_id=resource_id, + event="create", + object_changes={}, + ) + mock_http_client.add_response( + f"/data_sources/{resource_id}/audit_log", [audit_entry] + ) + + # Act + result = mock_client.sources.get_audit_log(resource_id) + + # Assert + assert len(result) == 1 + assert result[0]["event"] == "create" + + def test_update_event_includes_object_changes(self, mock_client, mock_http_client): + """Test audit log entry with update event includes object_changes.""" + # Arrange + resource_id = 123 + object_changes = { + "name": ["Original Name", "Updated Name"], + "status": ["DRAFT", "ACTIVE"], + } + audit_entry = MockResponseBuilder.audit_log_entry( + item_type="DataSource", + item_id=resource_id, + event="update", + object_changes=object_changes, + change_summary=["name", "status"], + ) + mock_http_client.add_response( + f"/data_sources/{resource_id}/audit_log", [audit_entry] + ) + + # Act + result = mock_client.sources.get_audit_log(resource_id) + + # Assert + assert len(result) == 1 + entry = result[0] + assert entry["event"] == "update" + assert "object_changes" in entry + assert entry["object_changes"]["name"] == ["Original Name", "Updated Name"] + assert entry["object_changes"]["status"] == ["DRAFT", "ACTIVE"] + + def test_destroy_event(self, mock_client, mock_http_client): + """Test audit log entry with destroy event.""" + # Arrange + resource_id = 123 + audit_entry = MockResponseBuilder.audit_log_entry( + item_type="DataSource", + item_id=resource_id, + event="destroy", + object_changes={}, + ) + mock_http_client.add_response( + f"/data_sources/{resource_id}/audit_log", [audit_entry] + ) + + # Act + result = mock_client.sources.get_audit_log(resource_id) + + # Assert + assert len(result) == 1 + assert result[0]["event"] == "destroy" + + def test_association_event(self, mock_client, mock_http_client): + """Test audit log entry with association events (e.g., add_accessor).""" + # Arrange + resource_id = 123 + audit_entry = MockResponseBuilder.audit_log_entry( + item_type="DataSource", + item_id=resource_id, + event="add_accessor", + object_changes={ + "accessor_type": [None, "USER"], + "accessor_id": [None, 456], + }, + ) + mock_http_client.add_response( + f"/data_sources/{resource_id}/audit_log", [audit_entry] + ) + + # Act + result = mock_client.sources.get_audit_log(resource_id) + + # Assert + assert len(result) == 1 + assert result[0]["event"] == "add_accessor" + assert result[0]["object_changes"]["accessor_type"] == [None, "USER"] + + +@pytest.mark.unit +class TestAuditLogErrorHandling: + """Tests for audit log error scenarios.""" + + def test_audit_log_not_found_returns_404(self, mock_client, mock_http_client): + """Test that accessing audit log of non-existent resource returns 404.""" + # Arrange + resource_id = 99999 + mock_http_client.add_error( + f"/data_sources/{resource_id}/audit_log", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.get_audit_log(resource_id) + + def test_audit_log_permission_denied_returns_403( + self, mock_client, mock_http_client + ): + """Test that unauthorized access to audit log returns 403.""" + # Arrange + resource_id = 123 + mock_http_client.add_error( + f"/data_sources/{resource_id}/audit_log", + create_http_error(403, "Forbidden"), + ) + + # Act & Assert + with pytest.raises(AuthorizationError): + mock_client.sources.get_audit_log(resource_id) + + +@pytest.mark.unit +class TestAuditLogAcrossResources: + """Tests verifying audit log operations work across resource types. + + Note: TeamsResource has its own get_audit_log implementation that returns + LogEntry model objects instead of raw dicts. It is tested separately. + """ + + @pytest.mark.parametrize( + "resource_name,endpoint,item_type", + [ + ("sources", "/data_sources", "DataSource"), + ("destinations", "/data_sinks", "DataSink"), + ("nexsets", "/data_sets", "DataSet"), + ("credentials", "/data_credentials", "DataCredential"), + ("lookups", "/data_maps", "DataMap"), + ("projects", "/projects", "Project"), + ], + ) + def test_get_audit_log_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint, item_type + ): + """Test get_audit_log works for different resource types.""" + # Arrange + resource_id = 123 + audit_entries = [ + MockResponseBuilder.audit_log_entry( + item_type=item_type, + item_id=resource_id, + event="create", + ), + MockResponseBuilder.audit_log_entry( + item_type=item_type, + item_id=resource_id, + event="update", + ), + ] + mock_http_client.add_response( + f"{endpoint}/{resource_id}/audit_log", audit_entries + ) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.get_audit_log(resource_id) + + # Assert + assert len(result) == 2 + assert result[0]["item_type"] == item_type + mock_http_client.assert_request_made( + "GET", f"{endpoint}/{resource_id}/audit_log" + ) + + def test_teams_get_audit_log_returns_model_objects( + self, mock_client, mock_http_client + ): + """Test teams.get_audit_log returns LogEntry model objects. + + Note: TeamsResource has its own get_audit_log that parses responses + into LogEntry model objects rather than returning raw dicts. + """ + # Arrange + team_id = 123 + audit_entries = [ + MockResponseBuilder.audit_log_entry( + item_type="Team", + item_id=team_id, + event="create", + ), + MockResponseBuilder.audit_log_entry( + item_type="Team", + item_id=team_id, + event="update", + ), + ] + mock_http_client.add_response(f"/teams/{team_id}/audit_log", audit_entries) + + # Act + result = mock_client.teams.get_audit_log(team_id) + + # Assert + assert len(result) == 2 + # LogEntry is a model object, so access via attributes + assert result[0].item_type == "Team" + assert result[0].event == "create" + assert result[1].event == "update" + mock_http_client.assert_request_made("GET", f"/teams/{team_id}/audit_log") + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ("credentials", "/data_credentials"), + ], + ) + def test_audit_log_empty_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test get_audit_log returns empty list for resource types with no history.""" + # Arrange + resource_id = 789 + mock_http_client.add_response(f"{endpoint}/{resource_id}/audit_log", []) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.get_audit_log(resource_id) + + # Assert + assert result == [] + mock_http_client.assert_request_made( + "GET", f"{endpoint}/{resource_id}/audit_log" + ) diff --git a/tests/unit/test_auth_templates.py b/tests/unit/test_auth_templates.py new file mode 100644 index 0000000..988d4f7 --- /dev/null +++ b/tests/unit/test_auth_templates.py @@ -0,0 +1,200 @@ +"""Unit tests for auth templates resource.""" + +import pytest + +from nexla_sdk.models.auth_templates.requests import ( + AuthTemplateCreate, + AuthTemplateUpdate, +) +from nexla_sdk.models.auth_templates.responses import AuthTemplate +from tests.utils import assert_model_list_valid, assert_model_valid + + +# Sample response data +SAMPLE_AUTH_TEMPLATE = { + "id": 123, + "name": "oauth2_standard", + "display_name": "OAuth 2.0 Standard", + "description": "Standard OAuth 2.0 authentication template", + "config": {}, + "credentials_type": "oauth2", + "vendor_id": 456, + "vendor": {"id": 456, "name": "salesforce", "display_name": "Salesforce"}, + "auth_parameters": [ + { + "id": 1, + "name": "client_id", + "display_name": "Client ID", + "param_type": "string", + "required": True, + }, + { + "id": 2, + "name": "client_secret", + "display_name": "Client Secret", + "param_type": "password", + "required": True, + }, + ], + "created_at": "2025-01-01T00:00:00Z", + "updated_at": "2025-01-01T00:00:00Z", +} + +SAMPLE_AUTH_TEMPLATES_LIST = [ + SAMPLE_AUTH_TEMPLATE, + {**SAMPLE_AUTH_TEMPLATE, "id": 124, "name": "api_key", "display_name": "API Key"}, + {**SAMPLE_AUTH_TEMPLATE, "id": 125, "name": "basic_auth", "display_name": "Basic Auth"}, +] + + +@pytest.fixture +def sample_auth_template_response(): + """Sample auth template response.""" + return SAMPLE_AUTH_TEMPLATE.copy() + + +@pytest.fixture +def sample_auth_templates_list(): + """Sample auth templates list response.""" + return [t.copy() for t in SAMPLE_AUTH_TEMPLATES_LIST] + + +@pytest.mark.unit +class TestAuthTemplatesResource: + """Unit tests for AuthTemplatesResource using mocks.""" + + def test_list_auth_templates_success( + self, mock_client, mock_http_client, sample_auth_templates_list + ): + """Test listing auth templates with successful response.""" + mock_http_client.add_response("/auth_templates", sample_auth_templates_list) + + templates = mock_client.auth_templates.list() + + assert len(templates) == 3 + assert_model_list_valid(templates, AuthTemplate) + mock_http_client.assert_request_made("GET", "/auth_templates") + + def test_get_auth_template_by_id( + self, mock_client, mock_http_client, sample_auth_template_response + ): + """Test getting an auth template by ID.""" + template_id = 123 + mock_http_client.add_response( + f"/auth_templates/{template_id}", sample_auth_template_response + ) + + template = mock_client.auth_templates.get(template_id) + + assert_model_valid(template, {"id": template_id}) + mock_http_client.assert_request_made("GET", f"/auth_templates/{template_id}") + + def test_get_auth_template_by_name( + self, mock_client, mock_http_client, sample_auth_template_response + ): + """Test getting an auth template by name.""" + mock_http_client.add_response("/auth_templates", sample_auth_template_response) + + template = mock_client.auth_templates.get_by_name("oauth2_standard") + + assert template.name == "oauth2_standard" + mock_http_client.assert_request_made("GET", "/auth_templates") + + def test_create_auth_template_success( + self, mock_client, mock_http_client, sample_auth_template_response + ): + """Test creating an auth template.""" + mock_http_client.add_response("/auth_templates", sample_auth_template_response) + + create_data = AuthTemplateCreate( + name="new_template", + vendor_id=456, + display_name="New Template", + ) + template = mock_client.auth_templates.create(create_data) + + assert_model_valid(template, {"name": "oauth2_standard"}) + mock_http_client.assert_request_made("POST", "/auth_templates") + + def test_update_auth_template_success( + self, mock_client, mock_http_client, sample_auth_template_response + ): + """Test updating an auth template.""" + template_id = 123 + updated_response = {**sample_auth_template_response, "description": "Updated"} + mock_http_client.add_response( + f"/auth_templates/{template_id}", updated_response + ) + + update_data = AuthTemplateUpdate(description="Updated") + template = mock_client.auth_templates.update(template_id, update_data) + + assert template.description == "Updated" + mock_http_client.assert_request_made("PUT", f"/auth_templates/{template_id}") + + def test_delete_auth_template_success(self, mock_client, mock_http_client): + """Test deleting an auth template.""" + template_id = 123 + mock_http_client.add_response( + f"/auth_templates/{template_id}", {"success": True} + ) + + result = mock_client.auth_templates.delete(template_id) + + assert result["success"] is True + mock_http_client.assert_request_made("DELETE", f"/auth_templates/{template_id}") + + +@pytest.mark.unit +class TestAuthTemplateModels: + """Unit tests for auth template models.""" + + def test_auth_template_model_validation(self, sample_auth_template_response): + """Test AuthTemplate model parses valid data correctly.""" + template = AuthTemplate.model_validate(sample_auth_template_response) + + assert template.id == 123 + assert template.name == "oauth2_standard" + assert template.display_name == "OAuth 2.0 Standard" + assert template.vendor.id == 456 + assert len(template.auth_parameters) == 2 + + def test_auth_template_model_with_minimal_data(self): + """Test AuthTemplate model with minimal required fields.""" + minimal_data = { + "id": 1, + } + template = AuthTemplate.model_validate(minimal_data) + + assert template.id == 1 + assert template.name is None + assert template.auth_parameters == [] + + def test_auth_template_create_model_serialization(self): + """Test AuthTemplateCreate model serialization.""" + create_data = AuthTemplateCreate( + name="new_template", + vendor_id=456, + display_name="New Template", + description="A new auth template", + ) + + data = create_data.model_dump(exclude_none=True) + + assert data["name"] == "new_template" + assert data["vendor_id"] == 456 + assert data["display_name"] == "New Template" + + def test_auth_template_update_model_serialization(self): + """Test AuthTemplateUpdate model serialization.""" + update_data = AuthTemplateUpdate( + display_name="Updated Name", + description="Updated description", + ) + + data = update_data.model_dump(exclude_none=True) + + assert data["display_name"] == "Updated Name" + assert data["description"] == "Updated description" + assert "name" not in data + assert "vendor_id" not in data diff --git a/tests/unit/test_cluster_endpoints.py b/tests/unit/test_cluster_endpoints.py new file mode 100644 index 0000000..fedf2c3 --- /dev/null +++ b/tests/unit/test_cluster_endpoints.py @@ -0,0 +1,218 @@ +"""Unit tests for cluster endpoints resource.""" + +import pytest + +from nexla_sdk.models.clusters.requests import ( + ClusterEndpointCreate, + ClusterEndpointUpdate, +) +from nexla_sdk.models.clusters.responses import ClusterEndpoint +from tests.utils import assert_model_list_valid, assert_model_valid + + +# Sample response data +SAMPLE_CLUSTER_ENDPOINT = { + "id": 456, + "cluster_id": 123, + "org_id": 1, + "service": "data_ingestion", + "protocol": "https", + "host": "ingestion.example.com", + "port": 443, + "context": "/api/v1", + "org": {"id": 1, "name": "Test Org", "email_domain": "example.com"}, + "created_at": "2025-01-01T00:00:00Z", + "updated_at": "2025-01-01T00:00:00Z", +} + +SAMPLE_ENDPOINTS_LIST = [ + SAMPLE_CLUSTER_ENDPOINT, + {**SAMPLE_CLUSTER_ENDPOINT, "id": 457, "service": "metrics"}, + {**SAMPLE_CLUSTER_ENDPOINT, "id": 458, "service": "transform"}, +] + + +@pytest.fixture +def sample_endpoint_response(): + """Sample cluster endpoint response.""" + return SAMPLE_CLUSTER_ENDPOINT.copy() + + +@pytest.fixture +def sample_endpoints_list(): + """Sample cluster endpoints list response.""" + return [e.copy() for e in SAMPLE_ENDPOINTS_LIST] + + +@pytest.mark.unit +class TestClusterEndpointsResource: + """Unit tests for ClusterEndpointsResource using mocks.""" + + def test_list_endpoints_success( + self, mock_client, mock_http_client, sample_endpoints_list + ): + """Test listing cluster endpoints with successful response.""" + mock_http_client.add_response("/cluster_endpoints", sample_endpoints_list) + + endpoints = mock_client.cluster_endpoints.list() + + assert len(endpoints) == 3 + assert_model_list_valid(endpoints, ClusterEndpoint) + mock_http_client.assert_request_made("GET", "/cluster_endpoints") + + def test_list_endpoints_with_pagination( + self, mock_client, mock_http_client, sample_endpoints_list + ): + """Test listing endpoints with pagination.""" + mock_http_client.add_response("/cluster_endpoints", sample_endpoints_list) + + endpoints = mock_client.cluster_endpoints.list(page=1, per_page=10) + + assert len(endpoints) == 3 + request = mock_http_client.get_request() + assert "page" in str(request) or "per_page" in str(request) + + def test_get_endpoint_success( + self, mock_client, mock_http_client, sample_endpoint_response + ): + """Test getting a single cluster endpoint.""" + endpoint_id = 456 + mock_http_client.add_response( + f"/cluster_endpoints/{endpoint_id}", sample_endpoint_response + ) + + endpoint = mock_client.cluster_endpoints.get(endpoint_id) + + assert_model_valid(endpoint, {"id": endpoint_id}) + mock_http_client.assert_request_made( + "GET", f"/cluster_endpoints/{endpoint_id}" + ) + + def test_create_endpoint_success( + self, mock_client, mock_http_client, sample_endpoint_response + ): + """Test creating a cluster endpoint.""" + mock_http_client.add_response("/cluster_endpoints", sample_endpoint_response) + + create_data = ClusterEndpointCreate( + cluster_id=123, + service="data_ingestion", + protocol="https", + host="ingestion.example.com", + port=443, + ) + endpoint = mock_client.cluster_endpoints.create(create_data) + + assert_model_valid(endpoint, {"service": "data_ingestion"}) + mock_http_client.assert_request_made("POST", "/cluster_endpoints") + + def test_update_endpoint_success( + self, mock_client, mock_http_client, sample_endpoint_response + ): + """Test updating a cluster endpoint.""" + endpoint_id = 456 + updated_response = {**sample_endpoint_response, "host": "new-host.example.com"} + mock_http_client.add_response( + f"/cluster_endpoints/{endpoint_id}", updated_response + ) + + update_data = ClusterEndpointUpdate(host="new-host.example.com") + endpoint = mock_client.cluster_endpoints.update(endpoint_id, update_data) + + assert endpoint.host == "new-host.example.com" + mock_http_client.assert_request_made( + "PUT", f"/cluster_endpoints/{endpoint_id}" + ) + + def test_get_audit_log_success(self, mock_client, mock_http_client): + """Test getting audit log for an endpoint.""" + endpoint_id = 456 + audit_log = [ + {"action": "create", "timestamp": "2025-01-01T00:00:00Z"}, + {"action": "update", "timestamp": "2025-01-02T00:00:00Z"}, + ] + mock_http_client.add_response( + f"/cluster_endpoints/{endpoint_id}/audit_log", audit_log + ) + + result = mock_client.cluster_endpoints.get_audit_log(endpoint_id) + + assert len(result) == 2 + mock_http_client.assert_request_made( + "GET", f"/cluster_endpoints/{endpoint_id}/audit_log" + ) + + def test_get_audit_log_with_pagination(self, mock_client, mock_http_client): + """Test getting audit log with pagination.""" + endpoint_id = 456 + audit_log = [{"action": "create", "timestamp": "2025-01-01T00:00:00Z"}] + mock_http_client.add_response( + f"/cluster_endpoints/{endpoint_id}/audit_log", audit_log + ) + + result = mock_client.cluster_endpoints.get_audit_log( + endpoint_id, page=1, per_page=10 + ) + + assert len(result) == 1 + request = mock_http_client.get_request() + assert "page" in str(request) or "per_page" in str(request) + + +@pytest.mark.unit +class TestClusterEndpointModels: + """Unit tests for cluster endpoint models.""" + + def test_endpoint_model_validation(self, sample_endpoint_response): + """Test ClusterEndpoint model parses valid data correctly.""" + endpoint = ClusterEndpoint.model_validate(sample_endpoint_response) + + assert endpoint.id == 456 + assert endpoint.cluster_id == 123 + assert endpoint.service == "data_ingestion" + assert endpoint.protocol == "https" + assert endpoint.host == "ingestion.example.com" + assert endpoint.port == 443 + + def test_endpoint_model_with_minimal_data(self): + """Test ClusterEndpoint model with minimal required fields.""" + minimal_data = { + "id": 1, + } + endpoint = ClusterEndpoint.model_validate(minimal_data) + + assert endpoint.id == 1 + assert endpoint.service is None + assert endpoint.host is None + + def test_endpoint_create_model_serialization(self): + """Test ClusterEndpointCreate model serialization.""" + create_data = ClusterEndpointCreate( + cluster_id=123, + service="data_ingestion", + protocol="https", + host="api.example.com", + port=443, + ) + + data = create_data.model_dump(exclude_none=True) + + assert data["cluster_id"] == 123 + assert data["service"] == "data_ingestion" + assert data["protocol"] == "https" + assert data["host"] == "api.example.com" + assert data["port"] == 443 + + def test_endpoint_update_model_serialization(self): + """Test ClusterEndpointUpdate model serialization.""" + update_data = ClusterEndpointUpdate( + host="new-host.example.com", + port=8443, + ) + + data = update_data.model_dump(exclude_none=True) + + assert data["host"] == "new-host.example.com" + assert data["port"] == 8443 + assert "cluster_id" not in data + assert "service" not in data diff --git a/tests/unit/test_clusters.py b/tests/unit/test_clusters.py new file mode 100644 index 0000000..5198217 --- /dev/null +++ b/tests/unit/test_clusters.py @@ -0,0 +1,288 @@ +"""Unit tests for clusters resource.""" + +import pytest + +from nexla_sdk.models.clusters.requests import ( + ClusterCreate, + ClusterEndpointItem, + ClusterUpdate, +) +from nexla_sdk.models.clusters.responses import Cluster +from tests.utils import assert_model_list_valid, assert_model_valid + + +# Sample response data +SAMPLE_CLUSTER = { + "id": 123, + "org_id": 1, + "uid": "cluster-uid-123", + "is_default": False, + "is_private": False, + "name": "Production Cluster", + "description": "Main production cluster", + "status": "ACTIVE", + "region": "us-west-2", + "provider": "aws", + "org": {"id": 1, "name": "Test Org", "email_domain": "example.com"}, + "endpoints": [ + { + "id": 1, + "service": "data_ingestion", + "protocol": "https", + "host": "ingestion.example.com", + "port": 443, + } + ], + "created_at": "2025-01-01T00:00:00Z", + "updated_at": "2025-01-01T00:00:00Z", +} + +SAMPLE_CLUSTERS_LIST = [ + SAMPLE_CLUSTER, + {**SAMPLE_CLUSTER, "id": 124, "name": "Staging Cluster"}, + {**SAMPLE_CLUSTER, "id": 125, "name": "Dev Cluster"}, +] + + +@pytest.fixture +def sample_cluster_response(): + """Sample cluster response.""" + return SAMPLE_CLUSTER.copy() + + +@pytest.fixture +def sample_clusters_list(): + """Sample clusters list response.""" + return [c.copy() for c in SAMPLE_CLUSTERS_LIST] + + +@pytest.mark.unit +class TestClustersResource: + """Unit tests for ClustersResource using mocks.""" + + def test_list_clusters_success( + self, mock_client, mock_http_client, sample_clusters_list + ): + """Test listing clusters with successful response.""" + mock_http_client.add_response("/clusters", sample_clusters_list) + + clusters = mock_client.clusters.list() + + assert len(clusters) == 3 + assert_model_list_valid(clusters, Cluster) + mock_http_client.assert_request_made("GET", "/clusters") + + def test_list_clusters_with_pagination( + self, mock_client, mock_http_client, sample_clusters_list + ): + """Test listing clusters with pagination.""" + mock_http_client.add_response("/clusters", sample_clusters_list) + + clusters = mock_client.clusters.list(page=1, per_page=10) + + assert len(clusters) == 3 + request = mock_http_client.get_request() + assert "page" in str(request) or "per_page" in str(request) + + def test_get_cluster_success( + self, mock_client, mock_http_client, sample_cluster_response + ): + """Test getting a single cluster.""" + cluster_id = 123 + mock_http_client.add_response( + f"/clusters/{cluster_id}", sample_cluster_response + ) + + cluster = mock_client.clusters.get(cluster_id) + + assert_model_valid(cluster, {"id": cluster_id}) + mock_http_client.assert_request_made("GET", f"/clusters/{cluster_id}") + + def test_create_cluster_success( + self, mock_client, mock_http_client, sample_cluster_response + ): + """Test creating a cluster.""" + mock_http_client.add_response("/clusters", sample_cluster_response) + + create_data = ClusterCreate( + org_id=1, + name="Production Cluster", + region="us-west-2", + provider="aws", + ) + cluster = mock_client.clusters.create(create_data) + + assert_model_valid(cluster, {"name": "Production Cluster"}) + mock_http_client.assert_request_made("POST", "/clusters") + + def test_create_cluster_with_endpoints( + self, mock_client, mock_http_client, sample_cluster_response + ): + """Test creating a cluster with endpoints.""" + mock_http_client.add_response("/clusters", sample_cluster_response) + + create_data = ClusterCreate( + org_id=1, + name="Production Cluster", + region="us-west-2", + endpoints=[ + ClusterEndpointItem( + service="data_ingestion", + protocol="https", + host="ingestion.example.com", + port=443, + ) + ], + ) + cluster = mock_client.clusters.create(create_data) + + assert cluster.endpoints is not None + mock_http_client.assert_request_made("POST", "/clusters") + + def test_update_cluster_success( + self, mock_client, mock_http_client, sample_cluster_response + ): + """Test updating a cluster.""" + cluster_id = 123 + updated_response = {**sample_cluster_response, "name": "Updated Cluster"} + mock_http_client.add_response(f"/clusters/{cluster_id}", updated_response) + + update_data = ClusterUpdate(name="Updated Cluster") + cluster = mock_client.clusters.update(cluster_id, update_data) + + assert cluster.name == "Updated Cluster" + mock_http_client.assert_request_made("PUT", f"/clusters/{cluster_id}") + + def test_delete_cluster_success(self, mock_client, mock_http_client): + """Test deleting a cluster.""" + cluster_id = 123 + mock_http_client.add_response(f"/clusters/{cluster_id}", {"success": True}) + + result = mock_client.clusters.delete(cluster_id) + + assert result["success"] is True + mock_http_client.assert_request_made("DELETE", f"/clusters/{cluster_id}") + + def test_activate_cluster_success( + self, mock_client, mock_http_client, sample_cluster_response + ): + """Test activating a cluster.""" + cluster_id = 123 + active_response = {**sample_cluster_response, "status": "ACTIVE"} + mock_http_client.add_response( + f"/clusters/{cluster_id}/activate", active_response + ) + + cluster = mock_client.clusters.activate(cluster_id) + + assert cluster.status == "ACTIVE" + mock_http_client.assert_request_made("PUT", f"/clusters/{cluster_id}/activate") + + def test_set_default_cluster_success( + self, mock_client, mock_http_client, sample_cluster_response + ): + """Test setting a cluster as default.""" + cluster_id = 123 + default_response = {**sample_cluster_response, "is_default": True} + mock_http_client.add_response( + f"/clusters/default/{cluster_id}", default_response + ) + + cluster = mock_client.clusters.set_default(cluster_id) + + assert cluster.is_default is True + mock_http_client.assert_request_made("PUT", f"/clusters/default/{cluster_id}") + + def test_delete_endpoint_success(self, mock_client, mock_http_client): + """Test deleting a cluster endpoint.""" + cluster_id = 123 + endpoint_id = 456 + mock_http_client.add_response( + f"/clusters/{cluster_id}/endpoints/{endpoint_id}", {"success": True} + ) + + result = mock_client.clusters.delete_endpoint(cluster_id, endpoint_id) + + assert result["success"] is True + mock_http_client.assert_request_made( + "DELETE", f"/clusters/{cluster_id}/endpoints/{endpoint_id}" + ) + + +@pytest.mark.unit +class TestClusterModels: + """Unit tests for cluster models.""" + + def test_cluster_model_validation(self, sample_cluster_response): + """Test Cluster model parses valid data correctly.""" + cluster = Cluster.model_validate(sample_cluster_response) + + assert cluster.id == 123 + assert cluster.name == "Production Cluster" + assert cluster.status == "ACTIVE" + assert cluster.region == "us-west-2" + assert cluster.provider == "aws" + assert len(cluster.endpoints) == 1 + + def test_cluster_model_with_minimal_data(self): + """Test Cluster model with minimal required fields.""" + minimal_data = { + "id": 1, + } + cluster = Cluster.model_validate(minimal_data) + + assert cluster.id == 1 + assert cluster.name is None + assert cluster.endpoints == [] + + def test_cluster_create_model_serialization(self): + """Test ClusterCreate model serialization.""" + create_data = ClusterCreate( + org_id=1, + name="My Cluster", + region="us-east-1", + provider="aws", + description="Test cluster", + ) + + data = create_data.model_dump(exclude_none=True) + + assert data["org_id"] == 1 + assert data["name"] == "My Cluster" + assert data["region"] == "us-east-1" + assert data["provider"] == "aws" + + def test_cluster_create_with_endpoints_serialization(self): + """Test ClusterCreate with endpoints serialization.""" + create_data = ClusterCreate( + org_id=1, + name="My Cluster", + region="us-east-1", + endpoints=[ + ClusterEndpointItem( + service="data_ingestion", + protocol="https", + host="api.example.com", + port=443, + ) + ], + ) + + data = create_data.model_dump(exclude_none=True) + + assert "endpoints" in data + assert len(data["endpoints"]) == 1 + assert data["endpoints"][0]["service"] == "data_ingestion" + + def test_cluster_update_model_serialization(self): + """Test ClusterUpdate model serialization.""" + update_data = ClusterUpdate( + name="Updated Name", + description="Updated description", + ) + + data = update_data.model_dump(exclude_none=True) + + assert data["name"] == "Updated Name" + assert data["description"] == "Updated description" + assert "region" not in data diff --git a/tests/unit/test_connector_enums.py b/tests/unit/test_connector_enums.py new file mode 100644 index 0000000..64af68c --- /dev/null +++ b/tests/unit/test_connector_enums.py @@ -0,0 +1,311 @@ +"""Unit tests for connector enum handling.""" + +import pytest + +from nexla_sdk.models.connectors.enums import ConnectionType, ConnectorType +from nexla_sdk.models.credentials.enums import CredentialType +from nexla_sdk.models.destinations.enums import DestinationType +from nexla_sdk.models.flexible_enums import ( + FlexibleConnectionType, + FlexibleConnectorType, + FlexibleCredentialType, + FlexibleDestinationType, + FlexibleSourceType, + flexible_enum_validator, +) +from nexla_sdk.models.sources.enums import SourceType + + +@pytest.mark.unit +class TestConnectorEnumCoverage: + """Test that connector enums have consistent coverage.""" + + def test_source_type_has_all_base_connectors(self): + """SourceType should include core file and database connectors.""" + base_connectors = [ + "s3", + "gcs", + "azure_blb", + "mysql", + "postgres", + "snowflake", + "bigquery", + "kafka", + "rest", + ] + source_values = [e.value for e in SourceType] + for connector in base_connectors: + assert connector in source_values, f"SourceType missing {connector}" + + def test_destination_type_has_all_base_connectors(self): + """DestinationType should include core connectors.""" + base_connectors = [ + "s3", + "gcs", + "azure_blb", + "mysql", + "postgres", + "snowflake", + "bigquery", + "kafka", + "rest", + "pinecone", + ] + dest_values = [e.value for e in DestinationType] + for connector in base_connectors: + assert connector in dest_values, f"DestinationType missing {connector}" + + def test_connector_type_is_superset(self): + """ConnectorType should be a superset of Source and Destination types.""" + connector_values = set(e.value for e in ConnectorType) + source_values = set(e.value for e in SourceType) + dest_values = set(e.value for e in DestinationType) + + # All source types should be in ConnectorType + missing_from_connector = source_values - connector_values + assert ( + len(missing_from_connector) == 0 + ), f"ConnectorType missing source types: {missing_from_connector}" + + # All destination types should be in ConnectorType + missing_from_connector = dest_values - connector_values + assert ( + len(missing_from_connector) == 0 + ), f"ConnectorType missing destination types: {missing_from_connector}" + + def test_source_type_has_cloud_database_connectors(self): + """SourceType should include cloud database connectors.""" + cloud_dbs = [ + "cloudsql_mysql", + "cloudsql_postgres", + "cloudsql_sqlserver", + "gcp_alloydb", + "gcp_spanner", + "azure_synapse", + "aws_athena", + ] + source_values = [e.value for e in SourceType] + for connector in cloud_dbs: + assert connector in source_values, f"SourceType missing {connector}" + + def test_source_type_has_data_lake_connectors(self): + """SourceType should include data lake connectors.""" + data_lakes = [ + "delta_lake_s3", + "delta_lake_azure_blb", + "delta_lake_azure_data_lake", + "s3_iceberg", + ] + source_values = [e.value for e in SourceType] + for connector in data_lakes: + assert connector in source_values, f"SourceType missing {connector}" + + +@pytest.mark.unit +class TestFlexibleEnumsInResponses: + """Test flexible enum behavior in response models.""" + + def test_source_with_known_type(self): + """Source should accept known source_type values.""" + from nexla_sdk.models.sources.responses import Source + + data = { + "id": 1, + "name": "Test Source", + "status": "ACTIVE", + "source_type": "s3", + } + source = Source(**data) + assert source.source_type == SourceType.S3 + + def test_source_with_unknown_type(self): + """Source should accept unknown source_type values as strings.""" + from nexla_sdk.models.sources.responses import Source + + data = { + "id": 1, + "name": "Test Source", + "status": "ACTIVE", + "source_type": "new_future_connector_2026", + } + source = Source(**data) + assert source.source_type == "new_future_connector_2026" + + def test_destination_with_known_type(self): + """Destination should accept known sink_type values.""" + from nexla_sdk.models.destinations.responses import Destination + + data = { + "id": 1, + "name": "Test Dest", + "status": "ACTIVE", + "sink_type": "snowflake", + } + dest = Destination(**data) + assert dest.sink_type == DestinationType.SNOWFLAKE + + def test_destination_with_unknown_type(self): + """Destination should accept unknown sink_type values.""" + from nexla_sdk.models.destinations.responses import Destination + + data = { + "id": 1, + "name": "Test Dest", + "status": "ACTIVE", + "sink_type": "quantum_database_2030", + } + dest = Destination(**data) + assert dest.sink_type == "quantum_database_2030" + + def test_credential_with_known_type(self): + """Credential should accept known credentials_type values.""" + from nexla_sdk.models.credentials.responses import Credential + + data = { + "id": 1, + "name": "Test Cred", + "credentials_type": "s3", + } + cred = Credential(**data) + assert cred.credentials_type == CredentialType.S3 + + def test_credential_with_unknown_type(self): + """Credential should accept unknown credentials_type values.""" + from nexla_sdk.models.credentials.responses import Credential + + data = { + "id": 1, + "name": "Test Cred", + "credentials_type": "new_cloud_provider", + } + cred = Credential(**data) + assert cred.credentials_type == "new_cloud_provider" + + +@pytest.mark.unit +class TestConnectorResponseModel: + """Test Connector model with flexible enums.""" + + def test_connector_with_known_type(self): + """Connector should accept known type values.""" + from nexla_sdk.models.connectors.responses import Connector + + data = { + "id": 123, + "type": "s3", + "connection_type": "file", + "name": "Amazon S3", + "description": "S3 connector", + } + connector = Connector(**data) + assert connector.type == ConnectorType.S3 + assert connector.connection_type == ConnectionType.FILE + + def test_connector_with_unknown_type(self): + """Connector should accept unknown type values.""" + from nexla_sdk.models.connectors.responses import Connector + + data = { + "id": 123, + "type": "new_connector", + "connection_type": "quantum", + "name": "New Connector", + "description": "Future connector", + } + connector = Connector(**data) + assert connector.type == "new_connector" + assert connector.connection_type == "quantum" + + def test_common_connector_with_known_type(self): + """Common Connector model should accept known type values.""" + from nexla_sdk.models.common import Connector + + data = { + "id": 123, + "type": "postgres", + "connection_type": "database", + "name": "PostgreSQL", + "description": "PostgreSQL connector", + "nexset_api_compatible": True, + } + connector = Connector(**data) + assert connector.type == ConnectorType.POSTGRES + assert connector.connection_type == ConnectionType.DATABASE + + def test_common_connector_with_unknown_type(self): + """Common Connector model should accept unknown type values.""" + from nexla_sdk.models.common import Connector + + data = { + "id": 123, + "type": "future_db", + "connection_type": "hyperscale", + "name": "Future DB", + "description": "Future connector", + "nexset_api_compatible": False, + } + connector = Connector(**data) + assert connector.type == "future_db" + assert connector.connection_type == "hyperscale" + + +@pytest.mark.unit +class TestEnumSerialization: + """Test that flexible enums serialize correctly.""" + + def test_known_enum_serializes_to_string(self): + """Known enum values should serialize to their string values.""" + from nexla_sdk.models.sources.responses import Source + + data = { + "id": 1, + "name": "Test", + "status": "ACTIVE", + "source_type": "s3", + } + source = Source(**data) + serialized = source.model_dump() + assert serialized["source_type"] == "s3" + assert isinstance(serialized["source_type"], str) + + def test_unknown_value_serializes_unchanged(self): + """Unknown string values should serialize unchanged.""" + from nexla_sdk.models.sources.responses import Source + + data = { + "id": 1, + "name": "Test", + "status": "ACTIVE", + "source_type": "future_type", + } + source = Source(**data) + serialized = source.model_dump() + assert serialized["source_type"] == "future_type" + + def test_destination_serialization(self): + """Destination with flexible types should serialize correctly.""" + from nexla_sdk.models.destinations.responses import Destination + + data = { + "id": 1, + "name": "Test", + "status": "ACTIVE", + "sink_type": "snowflake", + "connector_type": "snowflake", + } + dest = Destination(**data) + serialized = dest.model_dump() + assert serialized["sink_type"] == "snowflake" + assert serialized["connector_type"] == "snowflake" + + +@pytest.mark.unit +class TestConnectionTypeEnum: + """Test ConnectionType enum values.""" + + def test_connection_type_categories(self): + """ConnectionType should have all expected categories.""" + expected = ["file", "database", "nosql", "streaming", "api", "vector_db", "special", "data_lake"] + values = [e.value for e in ConnectionType] + for category in expected: + assert category in values, f"ConnectionType missing {category}" diff --git a/tests/unit/test_connectors.py b/tests/unit/test_connectors.py new file mode 100644 index 0000000..8de485c --- /dev/null +++ b/tests/unit/test_connectors.py @@ -0,0 +1,173 @@ +"""Unit tests for connectors resource.""" + +import pytest + +from nexla_sdk.models.connectors.requests import ConnectorUpdate +from nexla_sdk.models.connectors.responses import Connector +from tests.utils import assert_model_list_valid, assert_model_valid + + +# Sample response data +SAMPLE_CONNECTOR = { + "id": 123, + "type": "s3", + "connection_type": "file", + "name": "Amazon S3", + "description": "Amazon Simple Storage Service connector", + "nexset_api_compatible": True, + "sync_api_compatible": True, + "created_at": "2025-01-01T00:00:00Z", + "updated_at": "2025-01-01T00:00:00Z", +} + +SAMPLE_CONNECTORS_LIST = [ + SAMPLE_CONNECTOR, + {**SAMPLE_CONNECTOR, "id": 124, "type": "gcs", "name": "Google Cloud Storage"}, + {**SAMPLE_CONNECTOR, "id": 125, "type": "azure_blob", "name": "Azure Blob Storage"}, +] + + +@pytest.fixture +def sample_connector_response(): + """Sample connector response.""" + return SAMPLE_CONNECTOR.copy() + + +@pytest.fixture +def sample_connectors_list(): + """Sample connectors list response.""" + return [c.copy() for c in SAMPLE_CONNECTORS_LIST] + + +@pytest.mark.unit +class TestConnectorsResource: + """Unit tests for ConnectorsResource using mocks.""" + + def test_list_connectors_success( + self, mock_client, mock_http_client, sample_connectors_list + ): + """Test listing connectors with successful response.""" + mock_http_client.add_response("/connectors", sample_connectors_list) + + connectors = mock_client.connectors.list() + + assert len(connectors) == 3 + assert_model_list_valid(connectors, Connector) + mock_http_client.assert_request_made("GET", "/connectors") + + def test_list_connectors_with_filter( + self, mock_client, mock_http_client, sample_connectors_list + ): + """Test listing connectors with API compatibility filter.""" + mock_http_client.add_response("/connectors", sample_connectors_list) + + connectors = mock_client.connectors.list(nexset_api_compatible=True) + + assert len(connectors) == 3 + mock_http_client.assert_request_made("GET", "/connectors") + + def test_get_connector_by_id( + self, mock_client, mock_http_client, sample_connector_response + ): + """Test getting a connector by ID.""" + connector_id = 123 + mock_http_client.add_response( + f"/connectors/{connector_id}", sample_connector_response + ) + + connector = mock_client.connectors.get(connector_id) + + assert_model_valid(connector, {"id": connector_id}) + mock_http_client.assert_request_made("GET", f"/connectors/{connector_id}") + + def test_get_connector_by_type( + self, mock_client, mock_http_client, sample_connector_response + ): + """Test getting a connector by type.""" + connector_type = "s3" + mock_http_client.add_response( + f"/connectors/{connector_type}", sample_connector_response + ) + + connector = mock_client.connectors.get(connector_type) + + assert connector.type == connector_type + mock_http_client.assert_request_made("GET", f"/connectors/{connector_type}") + + def test_update_connector_success( + self, mock_client, mock_http_client, sample_connector_response + ): + """Test updating a connector.""" + connector_id = 123 + updated_response = {**sample_connector_response, "description": "Updated desc"} + mock_http_client.add_response(f"/connectors/{connector_id}", updated_response) + + update_data = ConnectorUpdate(description="Updated desc") + connector = mock_client.connectors.update(connector_id, update_data) + + assert connector.description == "Updated desc" + mock_http_client.assert_request_made("PUT", f"/connectors/{connector_id}") + + +@pytest.mark.unit +class TestConnectorModels: + """Unit tests for connector models.""" + + def test_connector_model_validation(self, sample_connector_response): + """Test Connector model parses valid data correctly.""" + connector = Connector.model_validate(sample_connector_response) + + assert connector.id == 123 + assert connector.type == "s3" + assert connector.name == "Amazon S3" + assert connector.nexset_api_compatible is True + + def test_connector_model_with_minimal_data(self): + """Test Connector model with minimal required fields.""" + minimal_data = { + "id": 1, + } + connector = Connector.model_validate(minimal_data) + + assert connector.id == 1 + assert connector.type is None + assert connector.name is None + + def test_connector_update_model_serialization(self): + """Test ConnectorUpdate model serialization.""" + update_data = ConnectorUpdate( + name="Updated Name", + description="Updated description", + nexset_api_compatible=True, + ) + + data = update_data.model_dump(exclude_none=True) + + assert data["name"] == "Updated Name" + assert data["description"] == "Updated description" + assert data["nexset_api_compatible"] is True + + +@pytest.mark.unit +class TestConnectorsUnsupportedOperations: + """Test that unsupported operations raise NotImplementedError.""" + + def test_create_raises_not_implemented(self, mock_client): + with pytest.raises(NotImplementedError, match="not supported"): + mock_client.connectors.create({"name": "test"}) + + def test_delete_raises_not_implemented(self, mock_client): + with pytest.raises(NotImplementedError, match="not supported"): + mock_client.connectors.delete(123) + + def test_copy_raises_not_implemented(self, mock_client): + with pytest.raises(NotImplementedError, match="not supported"): + mock_client.connectors.copy(123) + + def test_activate_raises_not_implemented(self, mock_client): + with pytest.raises(NotImplementedError, match="not supported"): + mock_client.connectors.activate(123) + + def test_pause_raises_not_implemented(self, mock_client): + with pytest.raises(NotImplementedError, match="not supported"): + mock_client.connectors.pause(123) diff --git a/tests/unit/test_credentials.py b/tests/unit/test_credentials.py index 379383b..257633d 100644 --- a/tests/unit/test_credentials.py +++ b/tests/unit/test_credentials.py @@ -357,6 +357,7 @@ def test_server_error_during_list(self, mock_client, mock_http_client): (403, AuthorizationError), (404, NotFoundError), (409, ResourceConflictError), + (422, SDKValidationError), (429, RateLimitError), (500, ServerError), ], diff --git a/tests/unit/test_docs_operations.py b/tests/unit/test_docs_operations.py new file mode 100644 index 0000000..0275a0a --- /dev/null +++ b/tests/unit/test_docs_operations.py @@ -0,0 +1,381 @@ +"""Unit tests for documentation operations across resources. + +Tests for get_docs, set_docs, add_docs, and remove_docs operations +provided by BaseResource for managing resource documentation. +""" + +import pytest + +from nexla_sdk.exceptions import AuthorizationError, NotFoundError +from tests.utils import MockResponseBuilder, create_http_error + + +@pytest.mark.unit +class TestGetDocs: + """Tests for get_docs operation.""" + + def test_get_docs_returns_doc_entries(self, mock_client, mock_http_client): + """Test that get_docs returns documentation entries for a resource.""" + # Arrange + resource_id = 123 + docs_response = MockResponseBuilder.docs_response() + mock_http_client.add_response( + f"/data_sources/{resource_id}/docs", docs_response + ) + + # Act + result = mock_client.sources.get_docs(resource_id) + + # Assert + assert result == docs_response + assert "entries" in result + assert len(result["entries"]) > 0 + mock_http_client.assert_request_made( + "GET", f"/data_sources/{resource_id}/docs" + ) + + def test_get_docs_empty_by_default(self, mock_client, mock_http_client): + """Test getting docs returns empty when no docs exist.""" + # Arrange + resource_id = 123 + empty_docs = {"entries": []} + mock_http_client.add_response( + f"/data_sources/{resource_id}/docs", empty_docs + ) + + # Act + result = mock_client.sources.get_docs(resource_id) + + # Assert + assert result == empty_docs + assert result["entries"] == [] + mock_http_client.assert_request_made( + "GET", f"/data_sources/{resource_id}/docs" + ) + + +@pytest.mark.unit +class TestSetDocs: + """Tests for set_docs operation (replaces all docs).""" + + def test_set_docs_replaces_all_docs(self, mock_client, mock_http_client): + """Test that set_docs replaces all documentation entries.""" + # Arrange + resource_id = 123 + new_docs = [ + {"key": "description", "value": "New description"}, + {"key": "usage", "value": "New usage instructions"}, + ] + expected_response = {"entries": new_docs} + mock_http_client.add_response( + f"/data_sources/{resource_id}/docs", expected_response + ) + + # Act + result = mock_client.sources.set_docs(resource_id, new_docs) + + # Assert + assert result == expected_response + mock_http_client.assert_request_made( + "POST", f"/data_sources/{resource_id}/docs" + ) + # Verify the request body contains the new docs + last_request = mock_http_client.get_last_request() + assert last_request is not None + assert last_request.get("json") == new_docs + + def test_set_docs_with_empty_list_clears_docs(self, mock_client, mock_http_client): + """Test that set_docs with empty list clears all docs.""" + # Arrange + resource_id = 123 + empty_docs = [] + expected_response = {"entries": []} + mock_http_client.add_response( + f"/data_sources/{resource_id}/docs", expected_response + ) + + # Act + result = mock_client.sources.set_docs(resource_id, empty_docs) + + # Assert + assert result == expected_response + mock_http_client.assert_request_made( + "POST", f"/data_sources/{resource_id}/docs" + ) + + +@pytest.mark.unit +class TestAddDocs: + """Tests for add_docs operation (merges docs).""" + + def test_add_docs_merges_entries(self, mock_client, mock_http_client): + """Test that add_docs merges new documentation entries.""" + # Arrange + resource_id = 123 + docs_to_add = [ + {"key": "new_section", "value": "New section content"}, + ] + merged_response = { + "entries": [ + {"key": "existing", "value": "Existing content"}, + {"key": "new_section", "value": "New section content"}, + ] + } + mock_http_client.add_response( + f"/data_sources/{resource_id}/docs", merged_response + ) + + # Act + result = mock_client.sources.add_docs(resource_id, docs_to_add) + + # Assert + assert result == merged_response + mock_http_client.assert_request_made( + "PUT", f"/data_sources/{resource_id}/docs" + ) + # Verify the request body contains the docs to add + last_request = mock_http_client.get_last_request() + assert last_request is not None + assert last_request.get("json") == docs_to_add + + def test_add_multiple_docs(self, mock_client, mock_http_client): + """Test adding multiple documentation entries at once.""" + # Arrange + resource_id = 123 + docs_to_add = [ + {"key": "overview", "value": "Overview content"}, + {"key": "setup", "value": "Setup instructions"}, + {"key": "examples", "value": "Example usage"}, + ] + merged_response = {"entries": docs_to_add} + mock_http_client.add_response( + f"/data_sources/{resource_id}/docs", merged_response + ) + + # Act + result = mock_client.sources.add_docs(resource_id, docs_to_add) + + # Assert + assert result == merged_response + assert len(result["entries"]) == 3 + mock_http_client.assert_request_made( + "PUT", f"/data_sources/{resource_id}/docs" + ) + # Verify all docs were sent + last_request = mock_http_client.get_last_request() + assert len(last_request.get("json", [])) == 3 + + +@pytest.mark.unit +class TestRemoveDocs: + """Tests for remove_docs operation.""" + + def test_remove_docs_deletes_specific_entries(self, mock_client, mock_http_client): + """Test that remove_docs removes specific documentation entries.""" + # Arrange + resource_id = 123 + docs_to_remove = [ + {"key": "obsolete_section", "value": "To be removed"}, + ] + remaining_response = { + "entries": [ + {"key": "remaining", "value": "Still here"}, + ] + } + mock_http_client.add_response( + f"/data_sources/{resource_id}/docs", remaining_response + ) + + # Act + result = mock_client.sources.remove_docs(resource_id, docs_to_remove) + + # Assert + assert result == remaining_response + mock_http_client.assert_request_made( + "DELETE", f"/data_sources/{resource_id}/docs" + ) + # Verify the request body contains the docs to remove + last_request = mock_http_client.get_last_request() + assert last_request is not None + assert last_request.get("json") == docs_to_remove + + def test_remove_docs_with_none_sends_empty_list(self, mock_client, mock_http_client): + """Test that remove_docs with None sends empty list.""" + # Arrange + resource_id = 123 + empty_response = {"entries": []} + mock_http_client.add_response( + f"/data_sources/{resource_id}/docs", empty_response + ) + + # Act + result = mock_client.sources.remove_docs(resource_id, None) + + # Assert + assert result == empty_response + mock_http_client.assert_request_made( + "DELETE", f"/data_sources/{resource_id}/docs" + ) + # Verify empty list was sent + last_request = mock_http_client.get_last_request() + assert last_request.get("json") == [] + + +@pytest.mark.unit +class TestDocsErrorHandling: + """Tests for error handling in docs operations.""" + + def test_docs_not_found_returns_404(self, mock_client, mock_http_client): + """Test that accessing docs for non-existent resource returns 404.""" + # Arrange + resource_id = 99999 + mock_http_client.add_error( + f"/data_sources/{resource_id}/docs", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.get_docs(resource_id) + + def test_docs_permission_denied_returns_403(self, mock_client, mock_http_client): + """Test that unauthorized docs access returns 403.""" + # Arrange + resource_id = 123 + mock_http_client.add_error( + f"/data_sources/{resource_id}/docs", + create_http_error(403, "Forbidden"), + ) + + # Act & Assert + with pytest.raises(AuthorizationError): + mock_client.sources.get_docs(resource_id) + + def test_set_docs_permission_denied(self, mock_client, mock_http_client): + """Test that unauthorized set_docs access returns 403.""" + # Arrange + resource_id = 123 + mock_http_client.add_error( + f"/data_sources/{resource_id}/docs", + create_http_error(403, "Forbidden"), + ) + + # Act & Assert + with pytest.raises(AuthorizationError): + mock_client.sources.set_docs(resource_id, [{"key": "test", "value": "data"}]) + + +@pytest.mark.unit +class TestDocsAcrossResources: + """Tests verifying docs operations work across resource types.""" + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ], + ) + def test_get_docs_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test get_docs works for different resource types.""" + # Arrange + resource_id = 123 + docs_response = MockResponseBuilder.docs_response() + mock_http_client.add_response(f"{endpoint}/{resource_id}/docs", docs_response) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.get_docs(resource_id) + + # Assert + assert result == docs_response + mock_http_client.assert_request_made( + "GET", f"{endpoint}/{resource_id}/docs" + ) + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ], + ) + def test_set_docs_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test set_docs works for different resource types.""" + # Arrange + resource_id = 123 + new_docs = [{"key": "test", "value": "Test documentation"}] + expected_response = {"entries": new_docs} + mock_http_client.add_response(f"{endpoint}/{resource_id}/docs", expected_response) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.set_docs(resource_id, new_docs) + + # Assert + assert result == expected_response + mock_http_client.assert_request_made( + "POST", f"{endpoint}/{resource_id}/docs" + ) + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ], + ) + def test_add_docs_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test add_docs works for different resource types.""" + # Arrange + resource_id = 123 + docs_to_add = [{"key": "additional", "value": "Additional documentation"}] + expected_response = {"entries": docs_to_add} + mock_http_client.add_response(f"{endpoint}/{resource_id}/docs", expected_response) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.add_docs(resource_id, docs_to_add) + + # Assert + assert result == expected_response + mock_http_client.assert_request_made( + "PUT", f"{endpoint}/{resource_id}/docs" + ) + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ], + ) + def test_remove_docs_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test remove_docs works for different resource types.""" + # Arrange + resource_id = 123 + docs_to_remove = [{"key": "obsolete", "value": "Obsolete documentation"}] + expected_response = {"entries": []} + mock_http_client.add_response(f"{endpoint}/{resource_id}/docs", expected_response) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.remove_docs(resource_id, docs_to_remove) + + # Assert + assert result == expected_response + mock_http_client.assert_request_made( + "DELETE", f"{endpoint}/{resource_id}/docs" + ) diff --git a/tests/unit/test_error_scenarios.py b/tests/unit/test_error_scenarios.py new file mode 100644 index 0000000..c65476e --- /dev/null +++ b/tests/unit/test_error_scenarios.py @@ -0,0 +1,716 @@ +"""Unit tests for error scenarios across the SDK. + +This module tests comprehensive error handling for HTTP status codes +and edge cases like boundary conditions for resource IDs. +""" + +import pytest + +from nexla_sdk.exceptions import ( + AuthenticationError, + AuthorizationError, + NexlaError, + NotFoundError, + RateLimitError, + ServerError, + ValidationError, +) +from tests.utils import MockResponseBuilder, create_http_error + + +@pytest.mark.unit +class TestValidationErrors: + """Tests for 400 Bad Request validation error scenarios.""" + + def test_validation_error_field_level(self, mock_client, mock_http_client): + """Test 400 response with single field validation error.""" + # Arrange + error_response = { + "error": "Validation failed", + "message": "The request data is invalid", + "field_errors": {"name": ["This field is required"]}, + } + mock_http_client.add_error( + "/data_sources", + create_http_error( + 400, + "Validation failed", + details={"field_errors": error_response["field_errors"]}, + ), + ) + + # Act & Assert + with pytest.raises(ValidationError) as exc_info: + mock_client.sources.create({"description": "Missing name field"}) + + assert exc_info.value.status_code == 400 + assert "Validation failed" in str(exc_info.value.message) + + def test_validation_error_multiple_fields(self, mock_client, mock_http_client): + """Test 400 response with multiple field validation failures.""" + # Arrange + field_errors = { + "name": ["This field is required", "Name must be at least 3 characters"], + "credentials_type": ["Invalid credentials type"], + "properties": ["Properties object is required"], + } + mock_http_client.add_error( + "/data_credentials", + create_http_error( + 400, + "Multiple validation errors", + details={"field_errors": field_errors}, + ), + ) + + # Act & Assert + with pytest.raises(ValidationError) as exc_info: + mock_client.credentials.create({}) + + assert exc_info.value.status_code == 400 + + def test_validation_error_invalid_json_format(self, mock_client, mock_http_client): + """Test 400 response for malformed request data.""" + # Arrange + mock_http_client.add_error( + "/data_sources", + create_http_error(400, "Invalid JSON format in request body"), + ) + + # Act & Assert + with pytest.raises(ValidationError) as exc_info: + mock_client.sources.create({"name": "Test"}) + + assert exc_info.value.status_code == 400 + + def test_validation_error_constraint_violation(self, mock_client, mock_http_client): + """Test 400 response for business rule constraint violation.""" + # Arrange + mock_http_client.add_error( + "/data_sources", + create_http_error( + 400, + "Source name already exists in this organization", + details={"constraint": "unique_name_per_org"}, + ), + ) + + # Act & Assert + with pytest.raises(ValidationError) as exc_info: + mock_client.sources.create({"name": "Duplicate Name"}) + + assert exc_info.value.status_code == 400 + assert "already exists" in str(exc_info.value.message) + + +@pytest.mark.unit +class TestAuthenticationErrors: + """Tests for 401 Unauthorized authentication error scenarios.""" + + def test_authentication_error_invalid_token(self, mock_client, mock_http_client): + """Test 401 handling for invalid or expired token.""" + # Arrange - need to set up both the main request and token refresh to fail + mock_http_client.add_error( + "/data_sources", + create_http_error(401, "Invalid or expired access token"), + ) + # Also fail the token refresh attempt + mock_http_client.add_error( + "/token", + create_http_error(401, "Invalid service key"), + ) + + # Act & Assert + with pytest.raises(AuthenticationError) as exc_info: + mock_client.sources.list() + + # The exception should be raised (status_code may not be set due to retry logic) + assert "Authentication failed" in str(exc_info.value) + + def test_authentication_error_missing_token(self, mock_client, mock_http_client): + """Test 401 handling when authorization header is missing.""" + # Arrange + mock_http_client.add_error( + "/data_credentials", + create_http_error(401, "Authorization header is required"), + ) + mock_http_client.add_error( + "/token", + create_http_error(401, "Invalid service key"), + ) + + # Act & Assert + with pytest.raises(AuthenticationError) as exc_info: + mock_client.credentials.list() + + assert "Authentication failed" in str(exc_info.value) + + def test_authentication_error_on_get_request(self, mock_client, mock_http_client): + """Test 401 handling for GET request with invalid credentials.""" + # Arrange + resource_id = 123 + mock_http_client.add_error( + f"/users/{resource_id}", + create_http_error(401, "Unauthorized access"), + ) + mock_http_client.add_error( + "/token", + create_http_error(401, "Invalid service key"), + ) + + # Act & Assert + with pytest.raises(AuthenticationError) as exc_info: + mock_client.users.get(resource_id) + + assert "Authentication failed" in str(exc_info.value) + + def test_authentication_error_revoked_service_key( + self, mock_client, mock_http_client + ): + """Test 401 handling when service key has been revoked.""" + # Arrange + mock_http_client.add_error( + "/data_sources", + create_http_error( + 401, + "Service key has been revoked", + details={"reason": "key_revoked"}, + ), + ) + mock_http_client.add_error( + "/token", + create_http_error(401, "Service key revoked"), + ) + + # Act & Assert + with pytest.raises(AuthenticationError) as exc_info: + mock_client.sources.list() + + assert "Authentication failed" in str(exc_info.value) + + +@pytest.mark.unit +class TestAuthorizationErrors: + """Tests for 403 Forbidden authorization error scenarios.""" + + def test_authorization_error_read_only_user(self, mock_client, mock_http_client): + """Test 403 for collaborators attempting write operations.""" + # Arrange + source_id = 123 + mock_http_client.add_error( + f"/data_sources/{source_id}", + create_http_error( + 403, + "User does not have write permission on this resource", + details={ + "user_role": "collaborator", + "required_role": "owner", + "operation": "update", + }, + ), + ) + + # Act & Assert + with pytest.raises(AuthorizationError) as exc_info: + mock_client.sources.update(source_id, {"name": "New Name"}) + + assert exc_info.value.status_code == 403 + + def test_authorization_error_cross_org_access(self, mock_client, mock_http_client): + """Test 403 for accessing resources in another organization.""" + # Arrange + resource_id = 456 + mock_http_client.add_error( + f"/data_credentials/{resource_id}", + create_http_error( + 403, + "Cannot access resources in another organization", + details={ + "user_org_id": 1, + "resource_org_id": 2, + }, + ), + ) + + # Act & Assert + with pytest.raises(AuthorizationError) as exc_info: + mock_client.credentials.get(resource_id) + + assert exc_info.value.status_code == 403 + + def test_authorization_error_delete_protected_resource( + self, mock_client, mock_http_client + ): + """Test 403 for deleting protected/system resources.""" + # Arrange + resource_id = 789 + mock_http_client.add_error( + f"/data_sources/{resource_id}", + create_http_error( + 403, + "Cannot delete protected resource", + details={"protected": True, "reason": "system_managed"}, + ), + ) + + # Act & Assert + with pytest.raises(AuthorizationError) as exc_info: + mock_client.sources.delete(resource_id) + + assert exc_info.value.status_code == 403 + + def test_authorization_error_org_admin_required( + self, mock_client, mock_http_client + ): + """Test 403 for operations requiring org admin privileges.""" + # Arrange + org_id = 100 + mock_http_client.add_error( + f"/orgs/{org_id}/members", + create_http_error( + 403, + "Organization admin privileges required", + details={"required_role": "org_admin"}, + ), + ) + + # Act & Assert + with pytest.raises(AuthorizationError) as exc_info: + mock_client.organizations.get_members(org_id) + + assert exc_info.value.status_code == 403 + + +@pytest.mark.unit +class TestNotFoundErrors: + """Tests for 404 Not Found error scenarios.""" + + def test_not_found_error_nonexistent_id(self, mock_client, mock_http_client): + """Test 404 handling for non-existent resource ID.""" + # Arrange + nonexistent_id = 999999 + mock_http_client.add_error( + f"/data_sources/{nonexistent_id}", + create_http_error( + 404, + f"Data source with ID {nonexistent_id} not found", + ), + ) + + # Act & Assert + with pytest.raises(NotFoundError) as exc_info: + mock_client.sources.get(nonexistent_id) + + # NotFoundError is raised but status_code may not be set in all paths + assert "not found" in str(exc_info.value).lower() + + def test_not_found_error_deleted_resource(self, mock_client, mock_http_client): + """Test 404 for accessing a deleted resource.""" + # Arrange + deleted_id = 123 + mock_http_client.add_error( + f"/data_credentials/{deleted_id}", + create_http_error( + 404, + "Resource has been deleted", + details={"deleted_at": "2024-01-15T10:00:00Z"}, + ), + ) + + # Act & Assert + with pytest.raises(NotFoundError) as exc_info: + mock_client.credentials.get(deleted_id) + + assert "not found" in str(exc_info.value).lower() + + def test_not_found_error_nested_resource(self, mock_client, mock_http_client): + """Test 404 for nested resource not found.""" + # Arrange + source_id = 123 + mock_http_client.add_error( + f"/data_sources/{source_id}/accessors", + create_http_error( + 404, + f"Parent resource with ID {source_id} not found", + ), + ) + + # Act & Assert + with pytest.raises(NotFoundError) as exc_info: + mock_client.sources.get_accessors(source_id) + + assert "not found" in str(exc_info.value).lower() + + def test_not_found_error_update_nonexistent(self, mock_client, mock_http_client): + """Test 404 when updating a non-existent resource.""" + # Arrange + nonexistent_id = 888888 + mock_http_client.add_error( + f"/data_sets/{nonexistent_id}", + create_http_error(404, "Nexset not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError) as exc_info: + mock_client.nexsets.update(nonexistent_id, {"name": "Updated"}) + + assert "not found" in str(exc_info.value).lower() + + +@pytest.mark.unit +class TestRateLimitErrors: + """Tests for 429 Too Many Requests rate limit error scenarios.""" + + def test_rate_limit_error_retry_after(self, mock_client, mock_http_client): + """Test 429 with Retry-After header information.""" + # Arrange + mock_http_client.add_error( + "/data_sources", + create_http_error( + 429, + "Rate limit exceeded", + details={ + "retry_after": 60, + "limit": 100, + "remaining": 0, + "reset_at": "2024-01-15T10:01:00Z", + }, + ), + ) + + # Act & Assert + with pytest.raises(RateLimitError) as exc_info: + mock_client.sources.list() + + assert exc_info.value.status_code == 429 + + def test_rate_limit_error_burst_limit(self, mock_client, mock_http_client): + """Test 429 for burst rate limit exceeded.""" + # Arrange + mock_http_client.add_error( + "/data_credentials", + create_http_error( + 429, + "Burst rate limit exceeded", + details={ + "limit_type": "burst", + "retry_after": 5, + }, + ), + ) + + # Act & Assert + with pytest.raises(RateLimitError) as exc_info: + mock_client.credentials.list() + + assert exc_info.value.status_code == 429 + + def test_rate_limit_error_daily_quota(self, mock_client, mock_http_client): + """Test 429 for daily quota exceeded.""" + # Arrange + mock_http_client.add_error( + "/data_sources", + create_http_error( + 429, + "Daily API quota exceeded", + details={ + "limit_type": "daily", + "quota": 10000, + "used": 10000, + "resets_at": "2024-01-16T00:00:00Z", + }, + ), + ) + + # Act & Assert + with pytest.raises(RateLimitError) as exc_info: + mock_client.sources.create({"name": "Test"}) + + assert exc_info.value.status_code == 429 + + +@pytest.mark.unit +class TestServerErrors: + """Tests for 5xx server error scenarios.""" + + def test_server_error_500(self, mock_client, mock_http_client): + """Test 500 Internal Server Error handling.""" + # Arrange + mock_http_client.add_error( + "/data_sources", + create_http_error(500, "Internal server error"), + ) + + # Act & Assert + with pytest.raises(ServerError) as exc_info: + mock_client.sources.list() + + assert exc_info.value.status_code == 500 + + def test_server_error_502_bad_gateway(self, mock_client, mock_http_client): + """Test 502 Bad Gateway handling.""" + # Arrange + mock_http_client.add_error( + "/data_credentials", + create_http_error(502, "Bad Gateway"), + ) + + # Act & Assert + with pytest.raises(ServerError) as exc_info: + mock_client.credentials.list() + + assert exc_info.value.status_code == 502 + + def test_server_error_503_service_unavailable(self, mock_client, mock_http_client): + """Test 503 Service Unavailable handling.""" + # Arrange + mock_http_client.add_error( + "/data_sources", + create_http_error( + 503, + "Service temporarily unavailable", + details={"maintenance": True, "expected_duration": "15 minutes"}, + ), + ) + + # Act & Assert + with pytest.raises(ServerError) as exc_info: + mock_client.sources.list() + + assert exc_info.value.status_code == 503 + + def test_server_error_504_gateway_timeout(self, mock_client, mock_http_client): + """Test 504 Gateway Timeout handling.""" + # Arrange + mock_http_client.add_error( + "/data_sinks/123", + create_http_error(504, "Gateway timeout"), + ) + + # Act & Assert + with pytest.raises(ServerError) as exc_info: + mock_client.destinations.get(123) + + assert exc_info.value.status_code == 504 + + def test_server_error_with_request_id(self, mock_client, mock_http_client): + """Test server error includes request ID for debugging.""" + # Arrange + mock_http_client.add_error( + "/data_sources", + create_http_error( + 500, + "Internal server error", + details={"request_id": "req-abc-123-xyz"}, + ), + ) + + # Act & Assert + with pytest.raises(ServerError) as exc_info: + mock_client.sources.list() + + assert exc_info.value.status_code == 500 + + +@pytest.mark.unit +class TestBoundaryConditions: + """Tests for edge cases with resource IDs and boundary values.""" + + def test_boundary_id_zero(self, mock_client, mock_http_client): + """Test handling of ID=0 which may be invalid.""" + # Arrange + mock_http_client.add_error( + "/data_sources/0", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.get(0) + + def test_boundary_id_negative(self, mock_client, mock_http_client): + """Test handling of negative ID values.""" + # Arrange + # Negative IDs should result in validation or not found errors + mock_http_client.add_error( + "/data_sources/-1", + create_http_error(400, "Invalid resource ID: must be positive"), + ) + + # Act & Assert + with pytest.raises(ValidationError): + mock_client.sources.get(-1) + + def test_boundary_id_very_large(self, mock_client, mock_http_client): + """Test handling of very large ID values.""" + # Arrange + large_id = 9999999999999 + mock_http_client.add_error( + f"/data_sources/{large_id}", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.get(large_id) + + def test_boundary_empty_list_response(self, mock_client, mock_http_client): + """Test handling of empty list responses (not an error).""" + # Arrange + mock_http_client.add_response("/data_sources", []) + + # Act + result = mock_client.sources.list() + + # Assert - empty list should not raise an error + assert result == [] + + def test_boundary_single_item_list(self, mock_client, mock_http_client): + """Test handling of single-item list responses.""" + # Arrange + source_data = MockResponseBuilder.source(source_id=1) + mock_http_client.add_response("/data_sources", [source_data]) + + # Act + result = mock_client.sources.list() + + # Assert - should return list with one item + assert len(result) == 1 + assert result[0].id == 1 + + +@pytest.mark.unit +class TestErrorResponseStructure: + """Tests for error response structure and metadata.""" + + def test_error_contains_status_code_for_validation( + self, mock_client, mock_http_client + ): + """Test that ValidationError contains the HTTP status code.""" + # Arrange + mock_http_client.add_error( + "/data_sources", + create_http_error(400, "Validation failed"), + ) + + # Act & Assert + with pytest.raises(ValidationError) as exc_info: + mock_client.sources.create({"invalid": "data"}) + + assert exc_info.value.status_code == 400 + + def test_error_contains_message(self, mock_client, mock_http_client): + """Test that error exceptions contain the error message.""" + # Arrange + error_message = "Custom error message for testing" + mock_http_client.add_error( + "/data_sources/123", + create_http_error(400, error_message), + ) + + # Act & Assert + with pytest.raises(NexlaError) as exc_info: + mock_client.sources.get(123) + + assert error_message in str(exc_info.value.message) + + def test_error_get_summary(self, mock_client, mock_http_client): + """Test that NexlaError provides structured error summary.""" + # Arrange + mock_http_client.add_error( + "/data_sources/456", + create_http_error( + 403, + "Access denied", + details={"resource_id": 456}, + ), + ) + + # Act & Assert + with pytest.raises(NexlaError) as exc_info: + mock_client.sources.get(456) + + summary = exc_info.value.get_error_summary() + assert "message" in summary + assert summary["status_code"] == 403 + + +@pytest.mark.unit +class TestErrorAcrossResources: + """Tests verifying error handling works across different resource types.""" + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ("credentials", "/data_credentials"), + ("lookups", "/data_maps"), + ("projects", "/projects"), + ("teams", "/teams"), + ], + ) + def test_not_found_across_resources( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test 404 handling across different resource types.""" + # Arrange + resource_id = 99999 + mock_http_client.add_error( + f"{endpoint}/{resource_id}", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + resource = getattr(mock_client, resource_name) + with pytest.raises(NotFoundError): + resource.get(resource_id) + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ("credentials", "/data_credentials"), + ], + ) + def test_authorization_error_across_resources( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test 403 handling across different resource types.""" + # Arrange + resource_id = 123 + mock_http_client.add_error( + f"{endpoint}/{resource_id}", + create_http_error(403, "Permission denied"), + ) + + # Act & Assert + resource = getattr(mock_client, resource_name) + with pytest.raises(AuthorizationError): + resource.delete(resource_id) + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("credentials", "/data_credentials"), + ("projects", "/projects"), + ], + ) + def test_server_error_across_resources( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test 500 handling across different resource types.""" + # Arrange + mock_http_client.add_error( + endpoint, + create_http_error(500, "Internal server error"), + ) + + # Act & Assert + resource = getattr(mock_client, resource_name) + with pytest.raises(ServerError): + resource.list() diff --git a/tests/unit/test_flexible_enums.py b/tests/unit/test_flexible_enums.py new file mode 100644 index 0000000..56e1189 --- /dev/null +++ b/tests/unit/test_flexible_enums.py @@ -0,0 +1,154 @@ +"""Unit tests for flexible enum handling.""" + +import pytest + +from nexla_sdk.models.destinations.enums import DestinationFormat, DestinationType +from nexla_sdk.models.destinations.responses import Destination +from nexla_sdk.models.flexible_enums import FlexibleEnum, flexible_enum_validator +from nexla_sdk.models.nexsets.responses import DataSinkSimplified, Nexset + + +class TestFlexibleEnumValidator: + """Test the flexible enum validator.""" + + def test_known_value_returns_enum(self): + """Known values should return enum members.""" + validator = flexible_enum_validator(DestinationType) + result = validator("s3") + assert result == DestinationType.S3 + assert isinstance(result, DestinationType) + + def test_unknown_value_returns_string(self): + """Unknown values should return as raw strings.""" + validator = flexible_enum_validator(DestinationType) + result = validator("new_unknown_connector") + assert result == "new_unknown_connector" + assert isinstance(result, str) + + def test_none_returns_none(self): + """None should remain None.""" + validator = flexible_enum_validator(DestinationType) + result = validator(None) + assert result is None + + def test_enum_instance_returns_unchanged(self): + """Enum instances should pass through unchanged.""" + validator = flexible_enum_validator(DestinationType) + result = validator(DestinationType.S3) + assert result == DestinationType.S3 + + +class TestFlexibleEnumInModels: + """Test flexible enums in Pydantic models.""" + + def test_destination_with_known_sink_format(self): + """Destination should accept known sink_format values.""" + data = { + "id": 1, + "name": "Test Dest", + "status": "ACTIVE", + "sink_type": "s3", + "sink_format": "json", + } + dest = Destination(**data) + assert dest.sink_format == DestinationFormat.JSON + + def test_destination_with_unknown_sink_format(self): + """Destination should accept unknown sink_format values.""" + data = { + "id": 1, + "name": "Test Dest", + "status": "ACTIVE", + "sink_type": "s3", + "sink_format": "new_format_2025", + } + dest = Destination(**data) + assert dest.sink_format == "new_format_2025" + + def test_destination_with_none_sink_format(self): + """Destination should accept None for sink_format.""" + data = { + "id": 1, + "name": "Test Dest", + "status": "ACTIVE", + "sink_type": "s3", + } + dest = Destination(**data) + assert dest.sink_format is None + + def test_data_sink_simplified_with_known_sink_type(self): + """DataSinkSimplified should accept known sink_type values.""" + data = { + "id": 1, + "name": "Test Sink", + "sinkType": "snowflake", + } + sink = DataSinkSimplified(**data) + assert sink.sink_type == DestinationType.SNOWFLAKE + + def test_data_sink_simplified_with_unknown_sink_type(self): + """DataSinkSimplified should accept unknown sink_type values.""" + data = { + "id": 1, + "name": "Test Sink", + "sinkType": "new_connector_type", + } + sink = DataSinkSimplified(**data) + assert sink.sink_type == "new_connector_type" + + def test_nexset_with_unknown_sink_type_in_data_sinks(self): + """Nexset should handle data_sinks with unknown sink types.""" + data = { + "id": 1, + "name": "Test Nexset", + "data_sinks": [ + {"id": 1, "name": "Known Sink", "sinkType": "s3"}, + {"id": 2, "name": "Unknown Sink", "sinkType": "future_connector"}, + ], + } + nexset = Nexset(**data) + assert len(nexset.data_sinks) == 2 + assert nexset.data_sinks[0].sink_type == DestinationType.S3 + assert nexset.data_sinks[1].sink_type == "future_connector" + + +class TestSerializationRoundTrip: + """Test that flexible enums serialize correctly.""" + + def test_known_value_serializes_as_string(self): + """Known enum values should serialize to their string values.""" + data = { + "id": 1, + "name": "Test", + "status": "ACTIVE", + "sink_type": "s3", + "sink_format": "json", + } + dest = Destination(**data) + serialized = dest.model_dump() + # With use_enum_values=True, should be string "json" + assert serialized["sink_format"] == "json" + + def test_unknown_value_serializes_unchanged(self): + """Unknown string values should serialize unchanged.""" + data = { + "id": 1, + "name": "Test", + "status": "ACTIVE", + "sink_type": "s3", + "sink_format": "unknown_format", + } + dest = Destination(**data) + serialized = dest.model_dump() + assert serialized["sink_format"] == "unknown_format" + + def test_json_serialization_with_unknown_value(self): + """Unknown values should serialize to JSON correctly.""" + data = { + "id": 1, + "name": "Test", + "sinkType": "future_connector", + } + sink = DataSinkSimplified(**data) + json_str = sink.model_dump_json() + assert "future_connector" in json_str diff --git a/tests/unit/test_flow_triggers.py b/tests/unit/test_flow_triggers.py new file mode 100644 index 0000000..365471e --- /dev/null +++ b/tests/unit/test_flow_triggers.py @@ -0,0 +1,265 @@ +"""Unit tests for flow triggers resource.""" + +import pytest + +from nexla_sdk.models.flow_triggers.requests import FlowTriggerCreate +from nexla_sdk.models.flow_triggers.responses import FlowTrigger +from tests.utils import assert_model_list_valid, assert_model_valid + + +# Sample response data +SAMPLE_FLOW_TRIGGER = { + "id": 123, + "owner": {"id": 1, "full_name": "Test User", "email": "test@example.com"}, + "org": {"id": 1, "name": "Test Org"}, + "status": "ACTIVE", + "triggering_event_type": "DATA_SINK_WRITE_DONE", + "triggering_origin_node_id": 100, + "triggering_flow_node_id": 101, + "triggering_resource_type": "data_sink", + "triggering_resource_id": 200, + "triggered_event_type": "DATA_SOURCE_READ_START", + "triggered_origin_node_id": 300, + "triggered_resource_type": "data_source", + "triggered_resource_id": 400, + "updated_at": "2025-01-01T00:00:00Z", + "created_at": "2025-01-01T00:00:00Z", +} + +SAMPLE_FLOW_TRIGGERS_LIST = [ + SAMPLE_FLOW_TRIGGER, + {**SAMPLE_FLOW_TRIGGER, "id": 124, "status": "PAUSED"}, + {**SAMPLE_FLOW_TRIGGER, "id": 125}, +] + + +@pytest.fixture +def sample_flow_trigger_response(): + """Sample flow trigger response.""" + return SAMPLE_FLOW_TRIGGER.copy() + + +@pytest.fixture +def sample_flow_triggers_list(): + """Sample flow triggers list response.""" + return [t.copy() for t in SAMPLE_FLOW_TRIGGERS_LIST] + + +@pytest.mark.unit +class TestFlowTriggersResource: + """Unit tests for FlowTriggersResource using mocks.""" + + def test_list_flow_triggers_success( + self, mock_client, mock_http_client, sample_flow_triggers_list + ): + """Test listing flow triggers with successful response.""" + mock_http_client.add_response("/flow_triggers", sample_flow_triggers_list) + + triggers = mock_client.flow_triggers.list() + + assert len(triggers) == 3 + assert_model_list_valid(triggers, FlowTrigger) + mock_http_client.assert_request_made("GET", "/flow_triggers") + + def test_list_flow_triggers_with_pagination( + self, mock_client, mock_http_client, sample_flow_triggers_list + ): + """Test listing flow triggers with pagination.""" + mock_http_client.add_response("/flow_triggers", sample_flow_triggers_list) + + triggers = mock_client.flow_triggers.list(page=1, per_page=10) + + assert len(triggers) == 3 + + def test_list_all_flow_triggers_success( + self, mock_client, mock_http_client, sample_flow_triggers_list + ): + """Test listing all flow triggers (super user).""" + mock_http_client.add_response("/flow_triggers/all", sample_flow_triggers_list) + + triggers = mock_client.flow_triggers.list_all() + + assert len(triggers) == 3 + mock_http_client.assert_request_made("GET", "/flow_triggers/all") + + def test_list_all_flow_triggers_with_pagination( + self, mock_client, mock_http_client, sample_flow_triggers_list + ): + """Test listing all flow triggers with pagination.""" + mock_http_client.add_response("/flow_triggers/all", sample_flow_triggers_list) + + triggers = mock_client.flow_triggers.list_all(page=1, per_page=50) + + assert len(triggers) == 3 + + def test_get_flow_trigger_success( + self, mock_client, mock_http_client, sample_flow_trigger_response + ): + """Test getting a single flow trigger.""" + trigger_id = 123 + mock_http_client.add_response( + f"/flow_triggers/{trigger_id}", sample_flow_trigger_response + ) + + trigger = mock_client.flow_triggers.get(trigger_id) + + assert_model_valid(trigger, {"id": trigger_id}) + mock_http_client.assert_request_made("GET", f"/flow_triggers/{trigger_id}") + + def test_create_flow_trigger_success( + self, mock_client, mock_http_client, sample_flow_trigger_response + ): + """Test creating a flow trigger.""" + mock_http_client.add_response("/flow_triggers", sample_flow_trigger_response) + + create_data = FlowTriggerCreate( + triggering_event_type="DATA_SINK_WRITE_DONE", + triggered_event_type="DATA_SOURCE_READ_START", + data_sink_id=200, + data_source_id=400, + ) + trigger = mock_client.flow_triggers.create(create_data) + + assert_model_valid(trigger, {"triggering_event_type": "DATA_SINK_WRITE_DONE"}) + mock_http_client.assert_request_made("POST", "/flow_triggers") + + def test_create_flow_trigger_with_node_ids( + self, mock_client, mock_http_client, sample_flow_trigger_response + ): + """Test creating a flow trigger with node IDs.""" + mock_http_client.add_response("/flow_triggers", sample_flow_trigger_response) + + create_data = FlowTriggerCreate( + triggering_event_type="DATA_SINK_WRITE_DONE", + triggered_event_type="DATA_SOURCE_READ_START", + triggering_flow_node_id=101, + triggered_origin_node_id=300, + ) + trigger = mock_client.flow_triggers.create(create_data) + + assert trigger.triggering_flow_node_id == 101 + + def test_delete_flow_trigger_success(self, mock_client, mock_http_client): + """Test deleting a flow trigger.""" + trigger_id = 123 + mock_http_client.add_response( + f"/flow_triggers/{trigger_id}", {"success": True} + ) + + result = mock_client.flow_triggers.delete(trigger_id) + + assert result["success"] is True + mock_http_client.assert_request_made("DELETE", f"/flow_triggers/{trigger_id}") + + def test_activate_flow_trigger_success( + self, mock_client, mock_http_client, sample_flow_trigger_response + ): + """Test activating a flow trigger.""" + trigger_id = 123 + activated_response = {**sample_flow_trigger_response, "status": "ACTIVE"} + mock_http_client.add_response( + f"/flow_triggers/{trigger_id}/activate", activated_response + ) + + trigger = mock_client.flow_triggers.activate(trigger_id) + + assert trigger.status == "ACTIVE" + mock_http_client.assert_request_made("PUT", f"/flow_triggers/{trigger_id}/activate") + + def test_pause_flow_trigger_success( + self, mock_client, mock_http_client, sample_flow_trigger_response + ): + """Test pausing a flow trigger.""" + trigger_id = 123 + paused_response = {**sample_flow_trigger_response, "status": "PAUSED"} + mock_http_client.add_response( + f"/flow_triggers/{trigger_id}/pause", paused_response + ) + + trigger = mock_client.flow_triggers.pause(trigger_id) + + assert trigger.status == "PAUSED" + mock_http_client.assert_request_made("PUT", f"/flow_triggers/{trigger_id}/pause") + + +@pytest.mark.unit +class TestFlowTriggerModels: + """Unit tests for flow trigger models.""" + + def test_flow_trigger_model_validation(self, sample_flow_trigger_response): + """Test FlowTrigger model parses valid data correctly.""" + trigger = FlowTrigger.model_validate(sample_flow_trigger_response) + + assert trigger.id == 123 + assert trigger.status == "ACTIVE" + assert trigger.triggering_event_type == "DATA_SINK_WRITE_DONE" + assert trigger.triggered_event_type == "DATA_SOURCE_READ_START" + assert trigger.triggering_resource_type == "data_sink" + assert trigger.triggered_resource_type == "data_source" + + def test_flow_trigger_with_owner_and_org(self, sample_flow_trigger_response): + """Test FlowTrigger model parses owner and org correctly.""" + trigger = FlowTrigger.model_validate(sample_flow_trigger_response) + + assert trigger.owner is not None + assert trigger.owner.id == 1 + assert trigger.org is not None + assert trigger.org.id == 1 + + def test_flow_trigger_create_model_serialization(self): + """Test FlowTriggerCreate model serialization.""" + create_data = FlowTriggerCreate( + triggering_event_type="DATA_SINK_WRITE_DONE", + triggered_event_type="DATA_SOURCE_READ_START", + data_sink_id=100, + data_source_id=200, + ) + + data = create_data.model_dump(exclude_none=True) + + assert data["triggering_event_type"] == "DATA_SINK_WRITE_DONE" + assert data["triggered_event_type"] == "DATA_SOURCE_READ_START" + assert data["data_sink_id"] == 100 + assert data["data_source_id"] == 200 + + def test_flow_trigger_create_with_resource_type(self): + """Test FlowTriggerCreate model with explicit resource types.""" + create_data = FlowTriggerCreate( + triggering_event_type="DATA_SINK_WRITE_DONE", + triggered_event_type="DATA_SOURCE_READ_START", + triggering_resource_id=100, + triggering_resource_type="data_sink", + triggered_resource_id=200, + triggered_resource_type="data_source", + ) + + data = create_data.model_dump(exclude_none=True) + + assert data["triggering_resource_type"] == "data_sink" + assert data["triggered_resource_type"] == "data_source" + + def test_flow_trigger_create_with_owner(self): + """Test FlowTriggerCreate model with owner/org.""" + create_data = FlowTriggerCreate( + triggering_event_type="DATA_SOURCE_READ_DONE", + triggered_event_type="DATA_SOURCE_READ_START", + triggering_flow_node_id=100, + triggered_origin_node_id=200, + owner_id=5, + org_id=10, + ) + + data = create_data.model_dump(exclude_none=True) + + assert data["owner_id"] == 5 + assert data["org_id"] == 10 + + +@pytest.mark.unit +class TestFlowTriggersImmutability: + """Test that flow triggers are immutable (update is not supported).""" + + def test_update_raises_not_implemented(self, mock_client): + """Flow triggers are immutable; update should raise NotImplementedError.""" + with pytest.raises(NotImplementedError, match="immutable"): + mock_client.flow_triggers.update(123, {"status": "ACTIVE"}) diff --git a/tests/unit/test_lifecycle_operations.py b/tests/unit/test_lifecycle_operations.py new file mode 100644 index 0000000..d89706e --- /dev/null +++ b/tests/unit/test_lifecycle_operations.py @@ -0,0 +1,382 @@ +"""Unit tests for lifecycle operations (activate, pause, copy).""" + +import pytest + +from nexla_sdk.exceptions import NotFoundError +from nexla_sdk.models.sources.responses import Source +from tests.utils import MockResponseBuilder, create_http_error + + +@pytest.mark.unit +class TestActivateOperation: + """Tests for resource activation.""" + + def test_activate_source_success(self, mock_client, mock_http_client): + """Test activating a source.""" + # Arrange + resource_id = 123 + response = MockResponseBuilder.source(source_id=resource_id, status="ACTIVE") + mock_http_client.add_response( + f"/data_sources/{resource_id}/activate", response + ) + + # Act + result = mock_client.sources.activate(resource_id) + + # Assert + assert isinstance(result, Source) + assert result.status == "ACTIVE" + mock_http_client.assert_request_made( + "PUT", f"/data_sources/{resource_id}/activate" + ) + + def test_activate_destination_success(self, mock_client, mock_http_client): + """Test activating a destination.""" + # Arrange + resource_id = 456 + response = MockResponseBuilder.destination({"id": resource_id, "status": "ACTIVE"}) + mock_http_client.add_response( + f"/data_sinks/{resource_id}/activate", response + ) + + # Act + result = mock_client.destinations.activate(resource_id) + + # Assert + assert result.status == "ACTIVE" + mock_http_client.assert_request_made( + "PUT", f"/data_sinks/{resource_id}/activate" + ) + + def test_activate_nexset_success(self, mock_client, mock_http_client): + """Test activating a nexset.""" + # Arrange + resource_id = 789 + response = MockResponseBuilder.nexset({"id": resource_id, "status": "ACTIVE"}) + mock_http_client.add_response( + f"/data_sets/{resource_id}/activate", response + ) + + # Act + result = mock_client.nexsets.activate(resource_id) + + # Assert + assert result.status == "ACTIVE" + mock_http_client.assert_request_made( + "PUT", f"/data_sets/{resource_id}/activate" + ) + + def test_activate_returns_updated_status(self, mock_client, mock_http_client): + """Test that activate returns resource with updated status.""" + # Arrange + resource_id = 123 + # Simulate a source that was PAUSED and is now ACTIVE + response = MockResponseBuilder.source(source_id=resource_id, status="ACTIVE") + mock_http_client.add_response( + f"/data_sources/{resource_id}/activate", response + ) + + # Act + result = mock_client.sources.activate(resource_id) + + # Assert + assert result.id == resource_id + assert result.status == "ACTIVE" + + def test_activate_not_found(self, mock_client, mock_http_client): + """Test activate on non-existent resource raises NotFoundError.""" + # Arrange + resource_id = 99999 + mock_http_client.add_error( + f"/data_sources/{resource_id}/activate", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.activate(resource_id) + + +@pytest.mark.unit +class TestPauseOperation: + """Tests for resource pausing.""" + + def test_pause_source_success(self, mock_client, mock_http_client): + """Test pausing a source.""" + # Arrange + resource_id = 123 + response = MockResponseBuilder.source(source_id=resource_id, status="PAUSED") + mock_http_client.add_response(f"/data_sources/{resource_id}/pause", response) + + # Act + result = mock_client.sources.pause(resource_id) + + # Assert + assert isinstance(result, Source) + assert result.status == "PAUSED" + mock_http_client.assert_request_made( + "PUT", f"/data_sources/{resource_id}/pause" + ) + + def test_pause_destination_success(self, mock_client, mock_http_client): + """Test pausing a destination.""" + # Arrange + resource_id = 456 + response = MockResponseBuilder.destination({"id": resource_id, "status": "PAUSED"}) + mock_http_client.add_response(f"/data_sinks/{resource_id}/pause", response) + + # Act + result = mock_client.destinations.pause(resource_id) + + # Assert + assert result.status == "PAUSED" + mock_http_client.assert_request_made( + "PUT", f"/data_sinks/{resource_id}/pause" + ) + + def test_pause_nexset_success(self, mock_client, mock_http_client): + """Test pausing a nexset.""" + # Arrange + resource_id = 789 + response = MockResponseBuilder.nexset({"id": resource_id, "status": "PAUSED"}) + mock_http_client.add_response(f"/data_sets/{resource_id}/pause", response) + + # Act + result = mock_client.nexsets.pause(resource_id) + + # Assert + assert result.status == "PAUSED" + mock_http_client.assert_request_made("PUT", f"/data_sets/{resource_id}/pause") + + def test_pause_returns_updated_status(self, mock_client, mock_http_client): + """Test that pause returns resource with updated status.""" + # Arrange + resource_id = 123 + response = MockResponseBuilder.source(source_id=resource_id, status="PAUSED") + mock_http_client.add_response(f"/data_sources/{resource_id}/pause", response) + + # Act + result = mock_client.sources.pause(resource_id) + + # Assert + assert result.id == resource_id + assert result.status == "PAUSED" + + def test_pause_not_found(self, mock_client, mock_http_client): + """Test pause on non-existent resource raises NotFoundError.""" + # Arrange + resource_id = 99999 + mock_http_client.add_error( + f"/data_sources/{resource_id}/pause", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.pause(resource_id) + + +@pytest.mark.unit +class TestCopyOperation: + """Tests for resource copying.""" + + def test_copy_source_default_options(self, mock_client, mock_http_client): + """Test copying a source with default options.""" + # Arrange + resource_id = 123 + copied_id = 456 + response = MockResponseBuilder.source( + source_id=copied_id, name="Copy of Source" + ) + mock_http_client.add_response(f"/data_sources/{resource_id}/copy", response) + + # Act + result = mock_client.sources.copy(resource_id) + + # Assert + assert isinstance(result, Source) + assert result.id == copied_id + mock_http_client.assert_request_made( + "POST", f"/data_sources/{resource_id}/copy" + ) + + def test_copy_destination_default_options(self, mock_client, mock_http_client): + """Test copying a destination with default options.""" + # Arrange + resource_id = 123 + copied_id = 456 + response = MockResponseBuilder.destination( + {"id": copied_id, "name": "Copy of Destination"} + ) + mock_http_client.add_response(f"/data_sinks/{resource_id}/copy", response) + + # Act + result = mock_client.destinations.copy(resource_id) + + # Assert + assert result.id == copied_id + + def test_copy_nexset_default_options(self, mock_client, mock_http_client): + """Test copying a nexset with default options.""" + # Arrange + resource_id = 123 + copied_id = 456 + response = MockResponseBuilder.nexset( + {"id": copied_id, "name": "Copy of Nexset"} + ) + mock_http_client.add_response(f"/data_sets/{resource_id}/copy", response) + + # Act + result = mock_client.nexsets.copy(resource_id) + + # Assert + assert result.id == copied_id + + def test_copy_project_default_options(self, mock_client, mock_http_client): + """Test copying a project with default options.""" + # Arrange + resource_id = 123 + copied_id = 456 + response = MockResponseBuilder.project( + project_id=copied_id, name="Copy of Project" + ) + mock_http_client.add_response(f"/projects/{resource_id}/copy", response) + + # Act + result = mock_client.projects.copy(resource_id) + + # Assert + assert result.id == copied_id + + def test_copy_returns_new_resource(self, mock_client, mock_http_client): + """Test that copy returns a new resource with different ID.""" + # Arrange + original_id = 123 + copied_id = 456 + response = MockResponseBuilder.source( + source_id=copied_id, name="Copied Source" + ) + mock_http_client.add_response(f"/data_sources/{original_id}/copy", response) + + # Act + result = mock_client.sources.copy(original_id) + + # Assert + assert result.id == copied_id + assert result.id != original_id + mock_http_client.assert_request_made("POST", f"/data_sources/{original_id}/copy") + + def test_copy_not_found(self, mock_client, mock_http_client): + """Test copy on non-existent resource raises NotFoundError.""" + # Arrange + resource_id = 99999 + mock_http_client.add_error( + f"/data_sources/{resource_id}/copy", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.copy(resource_id) + + +@pytest.mark.unit +class TestLifecycleOperationsAcrossResources: + """Tests for lifecycle operations across different resource types.""" + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ], + ) + def test_activate_across_resources( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test activate works across different resource types.""" + # Arrange + resource_id = 123 + if resource_name == "sources": + response = MockResponseBuilder.source(source_id=resource_id, status="ACTIVE") + elif resource_name == "destinations": + response = MockResponseBuilder.destination({"id": resource_id, "status": "ACTIVE"}) + else: + response = MockResponseBuilder.nexset({"id": resource_id, "status": "ACTIVE"}) + + mock_http_client.add_response(f"{endpoint}/{resource_id}/activate", response) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.activate(resource_id) + + # Assert + assert result.status == "ACTIVE" + mock_http_client.assert_request_made("PUT", f"{endpoint}/{resource_id}/activate") + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ], + ) + def test_pause_across_resources( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test pause works across different resource types.""" + # Arrange + resource_id = 123 + if resource_name == "sources": + response = MockResponseBuilder.source(source_id=resource_id, status="PAUSED") + elif resource_name == "destinations": + response = MockResponseBuilder.destination({"id": resource_id, "status": "PAUSED"}) + else: + response = MockResponseBuilder.nexset({"id": resource_id, "status": "PAUSED"}) + + mock_http_client.add_response(f"{endpoint}/{resource_id}/pause", response) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.pause(resource_id) + + # Assert + assert result.status == "PAUSED" + mock_http_client.assert_request_made("PUT", f"{endpoint}/{resource_id}/pause") + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ("projects", "/projects"), + ], + ) + def test_copy_across_resources( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test copy works across different resource types.""" + # Arrange + resource_id = 123 + copied_id = 456 + if resource_name == "sources": + response = MockResponseBuilder.source(source_id=copied_id) + elif resource_name == "destinations": + response = MockResponseBuilder.destination({"id": copied_id}) + elif resource_name == "nexsets": + response = MockResponseBuilder.nexset({"id": copied_id}) + else: + response = MockResponseBuilder.project(project_id=copied_id) + + mock_http_client.add_response(f"{endpoint}/{resource_id}/copy", response) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.copy(resource_id) + + # Assert + assert result.id == copied_id + mock_http_client.assert_request_made("POST", f"{endpoint}/{resource_id}/copy") diff --git a/tests/unit/test_notification_settings.py b/tests/unit/test_notification_settings.py new file mode 100644 index 0000000..48d74b6 --- /dev/null +++ b/tests/unit/test_notification_settings.py @@ -0,0 +1,319 @@ +"""Unit tests for NotificationSettingsResource using mocks.""" + +import pytest + +from nexla_sdk.models.notification_settings import ( + NotificationSetting, + NotificationSettingBrief, + NotificationSettingCreate, + NotificationSettingUpdate, +) +from nexla_sdk.resources.notification_settings import NotificationSettingsResource +from tests.utils import assert_model_list_valid, assert_model_valid + + +@pytest.fixture +def notification_settings_resource(mock_client): + """Create a NotificationSettingsResource instance with mocked client.""" + return NotificationSettingsResource(mock_client) + + +@pytest.fixture +def sample_notification_setting(): + """Sample notification setting response data.""" + return { + "id": 1, + "notification_type_id": 5, + "resource_id": 123, + "resource_type": "data_sources", + "channel": "email", + "priority": 5, + "status": "ENABLED", + "payload": {"recipients": ["user@example.com"]}, + "owner_id": 10, + "org_id": 1, + "created_at": "2024-01-15T10:30:00Z", + "updated_at": "2024-01-15T10:30:00Z", + } + + +@pytest.fixture +def sample_notification_settings_list(): + """Sample list of notification settings.""" + return [ + { + "id": 1, + "notification_type_id": 5, + "resource_id": 123, + "resource_type": "data_sources", + "channel": "email", + "priority": 5, + "status": "ENABLED", + "payload": None, + "owner_id": 10, + "org_id": 1, + }, + { + "id": 2, + "notification_type_id": 6, + "resource_id": None, + "resource_type": None, + "channel": "slack", + "priority": 3, + "status": "ENABLED", + "payload": {"webhook_url": "https://hooks.slack.com/xxx"}, + "owner_id": 10, + "org_id": 1, + }, + ] + + +class TestNotificationSettingsResource: + """Unit tests for NotificationSettingsResource.""" + + def test_list_notification_settings_success( + self, mock_client, mock_http_client, notification_settings_resource, sample_notification_settings_list + ): + """Test listing notification settings with successful response.""" + mock_http_client.add_response("/notification_settings", sample_notification_settings_list) + + settings = notification_settings_resource.list() + assert len(settings) == 2 + mock_http_client.assert_request_made("GET", "/notification_settings") + + def test_list_notification_settings_with_filters( + self, mock_client, mock_http_client, notification_settings_resource + ): + """Test listing notification settings with filters.""" + response = [{"id": 1, "notification_type_id": 5, "channel": "email", "priority": 5, "status": "ENABLED"}] + mock_http_client.add_response("/notification_settings", response) + + settings = notification_settings_resource.list( + notification_resource_type="data_sources", + resource_id=123, + sort_by="priority", + sort_order="ASC", + ) + assert len(settings) == 1 + params = mock_http_client.get_request()["params"] + assert params["notification_resource_type"] == "data_sources" + assert params["resource_id"] == 123 + + def test_list_all_notification_settings( + self, mock_client, mock_http_client, notification_settings_resource, sample_notification_settings_list + ): + """Test listing all notification settings (super user).""" + mock_http_client.add_response("/notification_settings/all", sample_notification_settings_list) + + settings = notification_settings_resource.list_all( + resource_type="data_sources", + event_type="flow_failed", + status="ENABLED", + ) + assert len(settings) == 2 + mock_http_client.assert_request_made("GET", "/notification_settings/all") + + def test_get_notification_setting_success( + self, mock_client, mock_http_client, notification_settings_resource, sample_notification_setting + ): + """Test getting a notification setting by ID.""" + mock_http_client.add_response("/notification_settings/1", sample_notification_setting) + + setting = notification_settings_resource.get(1) + assert setting.id == 1 + assert setting.channel == "email" + assert setting.priority == 5 + mock_http_client.assert_request_made("GET", "/notification_settings/1") + + def test_create_notification_setting_success( + self, mock_client, mock_http_client, notification_settings_resource, sample_notification_setting + ): + """Test creating a notification setting.""" + mock_http_client.add_response("/notification_settings", sample_notification_setting) + + setting = notification_settings_resource.create( + NotificationSettingCreate( + notification_type_id=5, + channel="email", + priority=5, + ) + ) + assert setting.id == 1 + assert setting.channel == "email" + mock_http_client.assert_request_made("POST", "/notification_settings") + + def test_create_notification_setting_with_dict( + self, mock_client, mock_http_client, notification_settings_resource, sample_notification_setting + ): + """Test creating a notification setting with dict input.""" + mock_http_client.add_response("/notification_settings", sample_notification_setting) + + setting = notification_settings_resource.create( + {"notification_type_id": 5, "channel": "email", "priority": 5} + ) + assert setting.id == 1 + mock_http_client.assert_request_made("POST", "/notification_settings") + + def test_update_notification_setting_success( + self, mock_client, mock_http_client, notification_settings_resource, sample_notification_setting + ): + """Test updating a notification setting.""" + updated_setting = sample_notification_setting.copy() + updated_setting["priority"] = 10 + mock_http_client.add_response("/notification_settings/1", updated_setting) + + setting = notification_settings_resource.update(1, NotificationSettingUpdate(priority=10)) + assert setting.priority == 10 + mock_http_client.assert_request_made("PUT", "/notification_settings/1") + + def test_delete_notification_setting_success( + self, mock_client, mock_http_client, notification_settings_resource + ): + """Test deleting a notification setting.""" + mock_http_client.add_response("/notification_settings/1", {}) + + result = notification_settings_resource.delete(1) + assert result == {} + mock_http_client.assert_request_made("DELETE", "/notification_settings/1") + + def test_show_resource_settings( + self, mock_client, mock_http_client, notification_settings_resource, sample_notification_settings_list + ): + """Test getting notification settings for a resource.""" + mock_http_client.add_response( + "/notification_settings/data_sources/123", sample_notification_settings_list + ) + + settings = notification_settings_resource.show_resource_settings( + resource_type="data_sources", + resource_id=123, + filter_overridden_settings=True, + ) + assert len(settings) == 2 + params = mock_http_client.get_request()["params"] + assert params["filter_overridden_settings"] == "true" + + def test_show_type_settings( + self, mock_client, mock_http_client, notification_settings_resource, sample_notification_settings_list + ): + """Test getting notification settings for a notification type.""" + mock_http_client.add_response( + "/notification_settings/notification_types/5", sample_notification_settings_list + ) + + settings = notification_settings_resource.show_type_settings(notification_type_id=5) + assert len(settings) == 2 + mock_http_client.assert_request_made("GET", "/notification_settings/notification_types/5") + + def test_org_index( + self, mock_client, mock_http_client, notification_settings_resource, sample_notification_settings_list + ): + """Test listing organization notification settings.""" + mock_http_client.add_response("/orgs/1/notification_settings", sample_notification_settings_list) + + settings = notification_settings_resource.org_index(org_id=1) + assert len(settings) == 2 + mock_http_client.assert_request_made("GET", "/orgs/1/notification_settings") + + def test_org_create( + self, mock_client, mock_http_client, notification_settings_resource, sample_notification_setting + ): + """Test creating an organization notification setting.""" + mock_http_client.add_response("/orgs/1/notification_settings", sample_notification_setting) + + setting = notification_settings_resource.org_create( + org_id=1, + data=NotificationSettingCreate(notification_type_id=5, channel="email", priority=5), + ) + assert setting.id == 1 + mock_http_client.assert_request_made("POST", "/orgs/1/notification_settings") + + def test_org_update( + self, mock_client, mock_http_client, notification_settings_resource, sample_notification_setting + ): + """Test updating an organization notification setting.""" + updated_setting = sample_notification_setting.copy() + updated_setting["priority"] = 8 + mock_http_client.add_response("/orgs/1/notification_settings/1", updated_setting) + + setting = notification_settings_resource.org_update( + org_id=1, notification_settings_id=1, data=NotificationSettingUpdate(priority=8) + ) + assert setting.priority == 8 + mock_http_client.assert_request_made("PUT", "/orgs/1/notification_settings/1") + + def test_org_delete(self, mock_client, mock_http_client, notification_settings_resource): + """Test deleting an organization notification setting.""" + mock_http_client.add_response("/orgs/1/notification_settings/1", {}) + + result = notification_settings_resource.org_delete(org_id=1, notification_settings_id=1) + assert result == {} + mock_http_client.assert_request_made("DELETE", "/orgs/1/notification_settings/1") + + +class TestNotificationSettingModels: + """Unit tests for Notification Setting model validation.""" + + def test_notification_setting_model_validation( + self, notification_settings_resource, sample_notification_setting + ): + """Test NotificationSetting model validation.""" + setting = NotificationSetting.model_validate(sample_notification_setting) + assert setting.id == 1 + assert setting.channel == "email" + assert setting.priority == 5 + assert setting.status == "ENABLED" + + def test_notification_setting_brief_model(self): + """Test NotificationSettingBrief model validation.""" + brief_data = { + "id": 1, + "notification_type_id": 5, + "channel": "email", + "priority": 5, + "status": "ENABLED", + "resource_type": "data_sources", + "resource_id": 123, + } + brief = NotificationSettingBrief.model_validate(brief_data) + assert brief.id == 1 + assert brief.channel == "email" + + def test_notification_setting_create_model(self): + """Test NotificationSettingCreate model validation.""" + create_data = { + "notification_type_id": 5, + "channel": "email", + "priority": 5, + "status": "ENABLED", + } + create = NotificationSettingCreate.model_validate(create_data) + assert create.notification_type_id == 5 + assert create.channel == "email" + assert create.priority == 5 + assert create.status == "ENABLED" + + def test_notification_setting_create_with_payload(self): + """Test NotificationSettingCreate with payload.""" + create_data = { + "notification_type_id": 5, + "channel": "email", + "payload": {"recipients": ["user@example.com"]}, + } + create = NotificationSettingCreate.model_validate(create_data) + assert create.payload == {"recipients": ["user@example.com"]} + + def test_notification_setting_update_model(self): + """Test NotificationSettingUpdate model validation.""" + update_data = {"priority": 10, "status": "DISABLED"} + update = NotificationSettingUpdate.model_validate(update_data) + assert update.priority == 10 + assert update.status == "DISABLED" + + def test_notification_setting_update_partial(self): + """Test NotificationSettingUpdate with partial data.""" + update_data = {"priority": 8} + update = NotificationSettingUpdate.model_validate(update_data) + assert update.priority == 8 + assert update.channel is None + assert update.status is None diff --git a/tests/unit/test_pagination.py b/tests/unit/test_pagination.py new file mode 100644 index 0000000..420ac8c --- /dev/null +++ b/tests/unit/test_pagination.py @@ -0,0 +1,335 @@ +"""Unit tests for pagination functionality.""" + +import pytest + +from nexla_sdk.utils.pagination import Page, PageInfo, Paginator +from tests.utils import MockResponseBuilder + + +@pytest.mark.unit +class TestPageInfo: + """Tests for PageInfo model.""" + + def test_page_info_has_next_when_more_pages(self): + """Test has_next returns True when there are more pages.""" + # Arrange + page_info = PageInfo(current_page=1, total_pages=5, page_size=20) + + # Assert + assert page_info.has_next is True + + def test_page_info_has_next_false_on_last_page(self): + """Test has_next returns False when on last page.""" + # Arrange + page_info = PageInfo(current_page=5, total_pages=5, page_size=20) + + # Assert + assert page_info.has_next is False + + def test_page_info_has_next_unknown_total(self): + """Test has_next returns True when total_pages is unknown.""" + # Arrange + page_info = PageInfo(current_page=1, total_pages=None, page_size=20) + + # Assert + assert page_info.has_next is True + + def test_page_info_has_previous_on_page_2(self): + """Test has_previous returns True when not on first page.""" + # Arrange + page_info = PageInfo(current_page=2, total_pages=5, page_size=20) + + # Assert + assert page_info.has_previous is True + + def test_page_info_has_previous_false_on_page_1(self): + """Test has_previous returns False when on first page.""" + # Arrange + page_info = PageInfo(current_page=1, total_pages=5, page_size=20) + + # Assert + assert page_info.has_previous is False + + +@pytest.mark.unit +class TestPage: + """Tests for Page class.""" + + def test_page_iteration(self): + """Test that Page can be iterated.""" + # Arrange + items = [{"id": 1}, {"id": 2}, {"id": 3}] + page_info = PageInfo(current_page=1, page_size=20) + page = Page(items=items, page_info=page_info) + + # Act + iterated = list(page) + + # Assert + assert iterated == items + + def test_page_length(self): + """Test that Page has correct length.""" + # Arrange + items = [{"id": 1}, {"id": 2}] + page_info = PageInfo(current_page=1, page_size=20) + page = Page(items=items, page_info=page_info) + + # Assert + assert len(page) == 2 + + def test_page_indexing(self): + """Test that Page supports indexing.""" + # Arrange + items = [{"id": 1}, {"id": 2}, {"id": 3}] + page_info = PageInfo(current_page=1, page_size=20) + page = Page(items=items, page_info=page_info) + + # Assert + assert page[0] == {"id": 1} + assert page[2] == {"id": 3} + + +@pytest.mark.unit +class TestPaginatorBasics: + """Tests for basic Paginator functionality.""" + + def test_paginator_initialization(self, mock_client): + """Test that paginator can be initialized.""" + # Act + paginator = mock_client.sources.paginate(per_page=10) + + # Assert + assert paginator.page_size == 10 + + def test_paginator_passes_page_params(self, mock_client, mock_http_client): + """Test that paginator passes page and per_page to list().""" + # Arrange - return a simple list (the SDK's list() parses the response) + sources = [MockResponseBuilder.source() for _ in range(5)] + mock_http_client.add_response("/data_sources", sources) + + # Act + paginator = mock_client.sources.paginate(per_page=5) + page = paginator.get_page(1) + + # Assert + assert len(page.items) == 5 + mock_http_client.assert_request_made("GET", "/data_sources") + + +@pytest.mark.unit +class TestPaginatorWithRawFetch: + """Tests for Paginator using raw fetch functions. + + These tests use a simple fetch function to test Paginator's logic + independent of the SDK's model parsing. + """ + + def test_paginator_extracts_data_from_paginated_response(self): + """Test that Paginator extracts items from paginated response format.""" + # Arrange + def mock_fetch(page=1, per_page=20): + return { + "data": [{"id": i} for i in range(per_page)], + "meta": { + "currentPage": page, + "totalCount": 100, + "pageCount": 5, + "perPage": per_page, + }, + } + + # Act + paginator = Paginator(fetch_func=mock_fetch, page_size=20) + page = paginator.get_page(1) + + # Assert + assert len(page.items) == 20 + assert page.page_info.total_pages == 5 + assert page.page_info.total_count == 100 + assert page.page_info.current_page == 1 + + def test_paginator_handles_list_response(self): + """Test that Paginator handles plain list responses.""" + # Arrange + def mock_fetch(page=1, per_page=20): + return [{"id": i} for i in range(10)] + + # Act + paginator = Paginator(fetch_func=mock_fetch, page_size=20) + page = paginator.get_page(1) + + # Assert + assert len(page.items) == 10 + # Without meta, total_pages is unknown + assert page.page_info.total_pages is None + + def test_iterate_all_items_multiple_pages(self): + """Test iterating all items across multiple pages.""" + # Arrange + call_count = 0 + + def mock_fetch(page=1, per_page=5): + nonlocal call_count + call_count += 1 + if page == 1: + return [{"id": i} for i in range(5)] # Full page + elif page == 2: + return [{"id": i + 5} for i in range(3)] # Partial page + return [] + + # Act + paginator = Paginator(fetch_func=mock_fetch, page_size=5) + all_items = list(paginator) + + # Assert + assert len(all_items) == 8 + assert call_count == 2 + + def test_iterate_stops_on_empty_page(self): + """Test that iteration stops when page is empty.""" + # Arrange + def mock_fetch(page=1, per_page=5): + if page == 1: + return [{"id": 1}] + return [] + + # Act + paginator = Paginator(fetch_func=mock_fetch, page_size=5) + all_items = list(paginator) + + # Assert + assert len(all_items) == 1 + + def test_iter_pages_yields_page_objects(self): + """Test iter_pages yields Page objects.""" + # Arrange + def mock_fetch(page=1, per_page=5): + if page == 1: + return [{"id": 1}, {"id": 2}] + return [] + + # Act + paginator = Paginator(fetch_func=mock_fetch, page_size=5) + pages = list(paginator.iter_pages()) + + # Assert + assert len(pages) == 1 + assert isinstance(pages[0], Page) + assert len(pages[0]) == 2 + + +@pytest.mark.unit +class TestPaginationMetadata: + """Tests for pagination metadata handling.""" + + def test_page_extracts_total_pages_from_camel_case_meta(self): + """Test extracting total pages from camelCase meta keys.""" + # Arrange + def mock_fetch(page=1, per_page=5): + return { + "data": [{"id": i} for i in range(5)], + "meta": { + "currentPage": page, + "totalCount": 25, + "pageCount": 5, + "perPage": per_page, + }, + } + + # Act + paginator = Paginator(fetch_func=mock_fetch, page_size=5) + page = paginator.get_page(2) + + # Assert + assert page.page_info.total_pages == 5 + assert page.page_info.total_count == 25 + assert page.page_info.current_page == 2 + + def test_page_handles_snake_case_meta_keys(self): + """Test handling of snake_case meta keys.""" + # Arrange + def mock_fetch(page=1, per_page=5): + return { + "data": [{"id": i} for i in range(5)], + "meta": { + "current_page": page, + "total_count": 50, + "total_pages": 10, + }, + } + + # Act + paginator = Paginator(fetch_func=mock_fetch, page_size=5) + page = paginator.get_page(1) + + # Assert + assert page.page_info.total_pages == 10 + assert page.page_info.total_count == 50 + + +@pytest.mark.unit +class TestPaginationEdgeCases: + """Tests for pagination edge cases.""" + + def test_empty_results(self): + """Test handling of empty results.""" + # Arrange + def mock_fetch(page=1, per_page=20): + return {"data": [], "meta": {"currentPage": 1, "totalCount": 0, "pageCount": 0}} + + # Act + paginator = Paginator(fetch_func=mock_fetch, page_size=20) + page = paginator.get_page(1) + + # Assert + assert len(page) == 0 + # total_count can be None or 0 depending on API response + assert page.page_info.total_count in (0, None) + + def test_single_item(self): + """Test handling of single item results.""" + # Arrange + def mock_fetch(page=1, per_page=20): + return {"data": [{"id": 1}], "meta": {"currentPage": 1, "totalCount": 1, "pageCount": 1}} + + # Act + paginator = Paginator(fetch_func=mock_fetch, page_size=20) + page = paginator.get_page(1) + + # Assert + assert len(page) == 1 + assert page.page_info.has_next is False + + def test_has_next_based_on_total_pages(self): + """Test has_next calculation when total_pages is known.""" + # Arrange + def mock_fetch(page=1, per_page=5): + return { + "data": [{"id": i} for i in range(5)], + "meta": {"currentPage": page, "pageCount": 3}, + } + + # Act + paginator = Paginator(fetch_func=mock_fetch, page_size=5) + page1 = paginator.get_page(1) + page3 = paginator.get_page(3) + + # Assert + assert page1.page_info.has_next is True + assert page3.page_info.has_next is False + + def test_list_response_without_meta(self): + """Test handling when response is just a list (no meta).""" + # Arrange + def mock_fetch(page=1, per_page=20): + return [{"id": i} for i in range(3)] + + # Act + paginator = Paginator(fetch_func=mock_fetch, page_size=20) + page = paginator.get_page(1) + + # Assert + assert len(page) == 3 + # Without meta, we don't know total pages + assert page.page_info.total_pages is None diff --git a/tests/unit/test_parity_tooling.py b/tests/unit/test_parity_tooling.py new file mode 100644 index 0000000..6a92569 --- /dev/null +++ b/tests/unit/test_parity_tooling.py @@ -0,0 +1,88 @@ +"""Unit tests for parity tooling scripts.""" + +from __future__ import annotations + +import json +import subprocess +import sys +from pathlib import Path + +import pytest + + +@pytest.mark.unit +def test_generate_operation_map_script(): + result = subprocess.run( + [ + sys.executable, + "scripts/parity/generate_operation_map.py", + "--spec", + "plugin-redoc-0.yaml", + "--output", + "nexla_sdk/generated/operation_map.py", + ], + check=False, + capture_output=True, + text=True, + ) + assert result.returncode == 0, result.stderr + assert Path("nexla_sdk/generated/operation_map.py").exists() + + +@pytest.mark.unit +def test_check_operation_map_sync_script(): + result = subprocess.run( + [sys.executable, "scripts/parity/check_operation_map_sync.py"], + check=False, + capture_output=True, + text=True, + ) + assert result.returncode == 0, result.stderr + + +@pytest.mark.unit +def test_build_matrices_script_with_temp_routes(tmp_path: Path): + routes = tmp_path / "routes.rb" + routes.write_text( + "\n".join( + [ + "post '/token', :to => 'token#create'", + "put '/token/logout', :to => 'token#invalidate'", + "match '/notification_types/list' => 'notification_types#list', :via => [:get]", + ] + ) + ) + out_dir = tmp_path / "parity_out" + + result = subprocess.run( + [ + sys.executable, + "scripts/parity/build_matrices.py", + "--spec", + "plugin-redoc-0.yaml", + "--admin-routes", + str(routes), + "--resources-dir", + "nexla_sdk/resources", + "--out-dir", + str(out_dir), + ], + check=False, + capture_output=True, + text=True, + ) + + assert result.returncode == 0, result.stderr + expected_files = { + "openapi_matrix.json", + "admin_routes_matrix.json", + "sdk_matrix.json", + "diff_openapi_vs_sdk.json", + "diff_admin_routes_vs_sdk.json", + } + created_files = {path.name for path in out_dir.glob("*.json")} + assert expected_files.issubset(created_files) + + diff_payload = json.loads((out_dir / "diff_openapi_vs_sdk.json").read_text()) + assert "missing_in_sdk" in diff_payload + assert "extra_in_sdk" in diff_payload diff --git a/tests/unit/test_projects.py b/tests/unit/test_projects.py index 595b154..250773b 100644 --- a/tests/unit/test_projects.py +++ b/tests/unit/test_projects.py @@ -211,6 +211,56 @@ def test_search_flows(self, mock_client): "POST", f"/projects/{project_id}/flows/search" ) + def test_legacy_data_flows_endpoints(self, mock_client): + """Test deprecated /data_flows project endpoints.""" + project_id = 123 + factory = MockDataFactory() + mock_data = [factory.create_mock_project_data_flow() for _ in range(2)] + mock_client.http_client.add_response( + f"/projects/{project_id}/data_flows", mock_data + ) + + list_result = mock_client.projects.get_data_flows_legacy(project_id) + assert len(list_result) == 2 + assert all(isinstance(flow, ProjectDataFlow) for flow in list_result) + mock_client.http_client.assert_request_made( + "GET", f"/projects/{project_id}/data_flows" + ) + + flows = ProjectFlowList(data_flows=[ProjectFlowIdentifier(data_source_id=456)]) + + mock_client.http_client.clear_responses() + mock_client.http_client.add_response( + f"/projects/{project_id}/data_flows", mock_data + ) + add_result = mock_client.projects.add_data_flows_legacy(project_id, flows) + assert len(add_result) == 2 + mock_client.http_client.assert_request_made( + "PUT", f"/projects/{project_id}/data_flows" + ) + + mock_client.http_client.clear_responses() + mock_client.http_client.add_response( + f"/projects/{project_id}/data_flows", mock_data + ) + replace_result = mock_client.projects.replace_data_flows_legacy( + project_id, flows + ) + assert len(replace_result) == 2 + mock_client.http_client.assert_request_made( + "POST", f"/projects/{project_id}/data_flows" + ) + + mock_client.http_client.clear_responses() + mock_client.http_client.add_response( + f"/projects/{project_id}/data_flows", mock_data + ) + remove_result = mock_client.projects.remove_data_flows_legacy(project_id, flows) + assert len(remove_result) == 2 + mock_client.http_client.assert_request_made( + "DELETE", f"/projects/{project_id}/data_flows" + ) + def test_add_data_flows(self, mock_client): """Test adding data flows to a project.""" # Arrange diff --git a/tests/unit/test_raw_operations.py b/tests/unit/test_raw_operations.py new file mode 100644 index 0000000..51e5c6c --- /dev/null +++ b/tests/unit/test_raw_operations.py @@ -0,0 +1,66 @@ +"""Unit tests for operation-level raw client access.""" + +import pytest + +from nexla_sdk.exceptions import ValidationError + + +@pytest.mark.unit +class TestRawOperationsClient: + def test_list_operations_includes_known_operation(self, mock_client): + operations = mock_client.raw.list_operations() + assert "get_project_flows" in operations + assert "get_data_sources" in operations + + def test_call_operation_renders_path_and_uses_method(self, mock_client): + mock_client.http_client.add_response( + "/projects/123/flows", {"data": [{"flow_id": 123}]} + ) + + response = mock_client.raw.call( + "get_project_flows", path_params={"project_id": 123} + ) + + assert response == {"data": [{"flow_id": 123}]} + mock_client.http_client.assert_request_made("GET", "/projects/123/flows") + + def test_call_operation_with_query_and_body(self, mock_client): + mock_client.http_client.add_response("/notifications", {"status": "ok"}) + + response = mock_client.raw.call( + "get_notifications", + query={"page": 2, "per_page": 10}, + body={"read": True}, + ) + + assert response == {"status": "ok"} + request = mock_client.http_client.get_last_request() + assert request["params"] == {"page": 2, "per_page": 10} + assert request["json"] == {"read": True} + assert request["method"] == "GET" + + def test_call_operation_missing_path_param_raises_validation_error( + self, mock_client + ): + with pytest.raises(ValidationError): + mock_client.raw.call("get_project_flows") + + def test_call_unknown_operation_raises_validation_error(self, mock_client): + with pytest.raises(ValidationError): + mock_client.raw.call("not_a_real_operation") + + def test_direct_helpers(self, mock_client): + mock_client.http_client.add_response("/limits", {"second": {"common": {}}}) + response = mock_client.raw.get("/limits") + assert "second" in response + mock_client.http_client.assert_request_made("GET", "/limits") + + def test_generic_request_supports_backend_only_routes(self, mock_client): + mock_client.http_client.add_response( + "/self_signup_requests/1/approve", {"id": 1} + ) + response = mock_client.raw.request("post", "/self_signup_requests/1/approve") + assert response["id"] == 1 + mock_client.http_client.assert_request_made( + "POST", "/self_signup_requests/1/approve" + ) diff --git a/tests/unit/test_search_operations.py b/tests/unit/test_search_operations.py new file mode 100644 index 0000000..eefe491 --- /dev/null +++ b/tests/unit/test_search_operations.py @@ -0,0 +1,329 @@ +"""Unit tests for search operations across resources. + +This module tests the search() and search_tags() methods available on BaseResource. +These methods allow searching resources using filter criteria and tags respectively. +""" + +import pytest + +from nexla_sdk.exceptions import NotFoundError, ValidationError +from nexla_sdk.models.sources.responses import Source +from tests.utils import ( + MockDataFactory, + MockResponseBuilder, + create_http_error, + destination_list, + nexset_list, + source_list, +) + + +@pytest.mark.unit +class TestSearchOperation: + """Tests for the search() method on resources.""" + + def test_search_with_simple_filter_returns_results( + self, mock_client, mock_http_client + ): + """Test that search with a simple filter returns matching results.""" + # Arrange + mock_sources = source_list(count=3) + mock_http_client.add_response("/data_sources/search", mock_sources) + + filters = {"status": "ACTIVE"} + + # Act + results = mock_client.sources.search(filters) + + # Assert + assert len(results) == 3 + mock_http_client.assert_request_made("POST", "/data_sources/search") + last_request = mock_http_client.get_last_request() + assert last_request["json"] == filters + + def test_search_with_multiple_filters_and_logic( + self, mock_client, mock_http_client + ): + """Test search with multiple filters uses AND logic.""" + # Arrange + mock_sources = source_list(count=2) + mock_http_client.add_response("/data_sources/search", mock_sources) + + filters = { + "status": "ACTIVE", + "source_type": "postgres", + "managed": False, + } + + # Act + results = mock_client.sources.search(filters) + + # Assert + assert len(results) == 2 + mock_http_client.assert_request_made("POST", "/data_sources/search") + last_request = mock_http_client.get_last_request() + assert last_request["json"] == filters + assert last_request["json"]["status"] == "ACTIVE" + assert last_request["json"]["source_type"] == "postgres" + assert last_request["json"]["managed"] is False + + def test_search_with_empty_filters_returns_empty( + self, mock_client, mock_http_client + ): + """Test that search with empty filters returns empty list.""" + # Arrange + mock_http_client.add_response("/data_sources/search", []) + + filters = {} + + # Act + results = mock_client.sources.search(filters) + + # Assert + assert results == [] + mock_http_client.assert_request_made("POST", "/data_sources/search") + + def test_search_with_pagination_params(self, mock_client, mock_http_client): + """Test that search respects pagination parameters.""" + # Arrange + mock_sources = source_list(count=5) + mock_http_client.add_response("/data_sources/search", mock_sources) + + filters = {"status": "ACTIVE"} + + # Act + results = mock_client.sources.search(filters, page=2, per_page=5) + + # Assert + assert len(results) == 5 + mock_http_client.assert_request_made("POST", "/data_sources/search") + last_request = mock_http_client.get_last_request() + assert last_request["params"]["page"] == 2 + assert last_request["params"]["per_page"] == 5 + + def test_search_returns_proper_model_objects(self, mock_client, mock_http_client): + """Test that search returns properly parsed model objects.""" + # Arrange + factory = MockDataFactory() + mock_source = factory.create_mock_source( + id=123, + name="Test Source", + status="ACTIVE", + source_type="postgres", + ) + mock_http_client.add_response("/data_sources/search", [mock_source]) + + filters = {"id": 123} + + # Act + results = mock_client.sources.search(filters) + + # Assert + assert len(results) == 1 + assert isinstance(results[0], Source) + assert results[0].id == 123 + assert results[0].name == "Test Source" + assert results[0].status == "ACTIVE" + + def test_search_not_found_returns_404(self, mock_client, mock_http_client): + """Test that search for non-existent resources returns 404.""" + # Arrange + mock_http_client.add_error( + "/data_sources/search", + create_http_error(404, "No resources found"), + ) + + filters = {"id": 99999} + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.search(filters) + + def test_search_validation_error_returns_400(self, mock_client, mock_http_client): + """Test that search with invalid filters returns 400.""" + # Arrange + mock_http_client.add_error( + "/data_sources/search", + create_http_error(400, "Invalid filter parameters"), + ) + + filters = {"invalid_field": "invalid_value"} + + # Act & Assert + with pytest.raises(ValidationError): + mock_client.sources.search(filters) + + +@pytest.mark.unit +class TestSearchTagsOperation: + """Tests for the search_tags() method on resources.""" + + def test_search_tags_with_single_tag(self, mock_client, mock_http_client): + """Test search_tags with a single tag returns matching results.""" + # Arrange + mock_sources = source_list(count=2) + mock_http_client.add_response("/data_sources/search_tags", mock_sources) + + tags = ["production"] + + # Act + results = mock_client.sources.search_tags(tags) + + # Assert + assert len(results) == 2 + mock_http_client.assert_request_made("POST", "/data_sources/search_tags") + last_request = mock_http_client.get_last_request() + assert last_request["json"] == tags + + def test_search_tags_with_multiple_tags(self, mock_client, mock_http_client): + """Test search_tags with multiple tags returns matching results.""" + # Arrange + mock_sources = source_list(count=3) + mock_http_client.add_response("/data_sources/search_tags", mock_sources) + + tags = ["production", "analytics", "important"] + + # Act + results = mock_client.sources.search_tags(tags) + + # Assert + assert len(results) == 3 + mock_http_client.assert_request_made("POST", "/data_sources/search_tags") + last_request = mock_http_client.get_last_request() + assert last_request["json"] == tags + assert len(last_request["json"]) == 3 + + def test_search_tags_with_nonexistent_tag_returns_empty( + self, mock_client, mock_http_client + ): + """Test that search_tags with non-existent tag returns empty list.""" + # Arrange + mock_http_client.add_response("/data_sources/search_tags", []) + + tags = ["nonexistent-tag-12345"] + + # Act + results = mock_client.sources.search_tags(tags) + + # Assert + assert results == [] + mock_http_client.assert_request_made("POST", "/data_sources/search_tags") + + def test_search_tags_with_query_params(self, mock_client, mock_http_client): + """Test search_tags with additional query parameters.""" + # Arrange + mock_sources = source_list(count=2) + mock_http_client.add_response("/data_sources/search_tags", mock_sources) + + tags = ["production"] + + # Act + results = mock_client.sources.search_tags(tags, page=1, per_page=10) + + # Assert + assert len(results) == 2 + last_request = mock_http_client.get_last_request() + assert last_request["params"]["page"] == 1 + assert last_request["params"]["per_page"] == 10 + + +@pytest.mark.unit +class TestSearchAcrossResources: + """Tests verifying search operations work across different resource types.""" + + @pytest.mark.parametrize( + "resource_name,endpoint,mock_data_func", + [ + ("sources", "/data_sources", source_list), + ("destinations", "/data_sinks", destination_list), + ("nexsets", "/data_sets", nexset_list), + ], + ) + def test_search_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint, mock_data_func + ): + """Test search works for different resource types.""" + # Arrange + mock_data = mock_data_func(count=2) + mock_http_client.add_response(f"{endpoint}/search", mock_data) + + filters = {"status": "ACTIVE"} + + # Act + resource = getattr(mock_client, resource_name) + results = resource.search(filters) + + # Assert + assert len(results) == 2 + mock_http_client.assert_request_made("POST", f"{endpoint}/search") + + @pytest.mark.parametrize( + "resource_name,endpoint,mock_data_func", + [ + ("sources", "/data_sources", source_list), + ("destinations", "/data_sinks", destination_list), + ("nexsets", "/data_sets", nexset_list), + ], + ) + def test_search_tags_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint, mock_data_func + ): + """Test search_tags works for different resource types.""" + # Arrange + mock_data = mock_data_func(count=3) + mock_http_client.add_response(f"{endpoint}/search_tags", mock_data) + + tags = ["production", "test"] + + # Act + resource = getattr(mock_client, resource_name) + results = resource.search_tags(tags) + + # Assert + assert len(results) == 3 + mock_http_client.assert_request_made("POST", f"{endpoint}/search_tags") + last_request = mock_http_client.get_last_request() + assert last_request["json"] == tags + + +@pytest.mark.unit +class TestSearchResponseBuilder: + """Tests for the SearchResponseBuilder utility.""" + + def test_search_response_with_items(self): + """Test building search response with items.""" + items = [{"id": 1, "name": "Item 1"}, {"id": 2, "name": "Item 2"}] + + response = MockResponseBuilder.search_response(items=items) + + assert response["data"] == items + assert response["meta"]["total_count"] == 2 + assert response["meta"]["page"] == 1 + assert response["meta"]["per_page"] == 20 + + def test_search_response_with_custom_total(self): + """Test building search response with custom total count.""" + items = [{"id": 1, "name": "Item 1"}] + + response = MockResponseBuilder.search_response(items=items, total=100) + + assert response["data"] == items + assert response["meta"]["total_count"] == 100 + + def test_search_response_with_pagination(self): + """Test building search response with pagination params.""" + items = [{"id": 1, "name": "Item 1"}] + + response = MockResponseBuilder.search_response( + items=items, page=3, per_page=50 + ) + + assert response["meta"]["page"] == 3 + assert response["meta"]["per_page"] == 50 + + def test_search_response_empty(self): + """Test building empty search response.""" + response = MockResponseBuilder.search_response() + + assert response["data"] == [] + assert response["meta"]["total_count"] == 0 diff --git a/tests/unit/test_service_keys.py b/tests/unit/test_service_keys.py new file mode 100644 index 0000000..fcf74cd --- /dev/null +++ b/tests/unit/test_service_keys.py @@ -0,0 +1,266 @@ +"""Unit tests for service keys resource.""" + +import pytest + +from nexla_sdk.models.service_keys.requests import ( + ServiceKeyCreate, + ServiceKeyUpdate, +) +from nexla_sdk.models.service_keys.responses import ServiceKey +from tests.utils import assert_model_list_valid, assert_model_valid + + +# Sample response data +SAMPLE_SERVICE_KEY = { + "id": 123, + "owner_id": 1, + "org_id": 1, + "name": "Test Service Key", + "description": "A test service key", + "status": "ACTIVE", + "api_key": "a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6", + "last_rotated_key": None, + "last_rotated_at": None, + "data_source_id": None, + "updated_at": "2025-01-01T00:00:00Z", + "created_at": "2025-01-01T00:00:00Z", +} + +SAMPLE_SERVICE_KEYS_LIST = [ + SAMPLE_SERVICE_KEY, + {**SAMPLE_SERVICE_KEY, "id": 124, "name": "Another Key", "status": "PAUSED"}, + {**SAMPLE_SERVICE_KEY, "id": 125, "name": "Third Key"}, +] + + +@pytest.fixture +def sample_service_key_response(): + """Sample service key response.""" + return SAMPLE_SERVICE_KEY.copy() + + +@pytest.fixture +def sample_service_keys_list(): + """Sample service keys list response.""" + return [k.copy() for k in SAMPLE_SERVICE_KEYS_LIST] + + +@pytest.mark.unit +class TestServiceKeysResource: + """Unit tests for ServiceKeysResource using mocks.""" + + def test_list_service_keys_success( + self, mock_client, mock_http_client, sample_service_keys_list + ): + """Test listing service keys with successful response.""" + mock_http_client.add_response("/service_keys", sample_service_keys_list) + + keys = mock_client.service_keys.list() + + assert len(keys) == 3 + assert_model_list_valid(keys, ServiceKey) + mock_http_client.assert_request_made("GET", "/service_keys") + + def test_list_service_keys_all( + self, mock_client, mock_http_client, sample_service_keys_list + ): + """Test listing all service keys in org.""" + mock_http_client.add_response("/service_keys", sample_service_keys_list) + + keys = mock_client.service_keys.list(all_keys=True) + + assert len(keys) == 3 + request = mock_http_client.get_request() + assert "all" in str(request) + + def test_list_service_keys_with_pagination( + self, mock_client, mock_http_client, sample_service_keys_list + ): + """Test listing service keys with pagination.""" + mock_http_client.add_response("/service_keys", sample_service_keys_list) + + keys = mock_client.service_keys.list(page=1, per_page=10) + + assert len(keys) == 3 + + def test_get_service_key_by_id( + self, mock_client, mock_http_client, sample_service_key_response + ): + """Test getting a service key by ID.""" + key_id = 123 + mock_http_client.add_response( + f"/service_keys/{key_id}", sample_service_key_response + ) + + key = mock_client.service_keys.get(key_id) + + assert_model_valid(key, {"id": key_id}) + mock_http_client.assert_request_made("GET", f"/service_keys/{key_id}") + + def test_get_service_key_by_key_value( + self, mock_client, mock_http_client, sample_service_key_response + ): + """Test getting a service key by its api_key value.""" + api_key = "a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6" + mock_http_client.add_response( + f"/service_keys/{api_key}", sample_service_key_response + ) + + key = mock_client.service_keys.get(api_key) + + assert_model_valid(key, {"api_key": api_key}) + + def test_create_service_key_success( + self, mock_client, mock_http_client, sample_service_key_response + ): + """Test creating a service key.""" + mock_http_client.add_response("/service_keys", sample_service_key_response) + + create_data = ServiceKeyCreate( + name="Test Service Key", + description="A test service key", + ) + key = mock_client.service_keys.create(create_data) + + assert_model_valid(key, {"name": "Test Service Key"}) + mock_http_client.assert_request_made("POST", "/service_keys") + + def test_update_service_key_success( + self, mock_client, mock_http_client, sample_service_key_response + ): + """Test updating a service key.""" + key_id = 123 + updated_response = {**sample_service_key_response, "name": "Updated Key"} + mock_http_client.add_response( + f"/service_keys/{key_id}", updated_response + ) + + update_data = ServiceKeyUpdate(name="Updated Key") + key = mock_client.service_keys.update(key_id, update_data) + + assert key.name == "Updated Key" + mock_http_client.assert_request_made("PUT", f"/service_keys/{key_id}") + + def test_delete_service_key_success(self, mock_client, mock_http_client): + """Test deleting a service key.""" + key_id = 123 + mock_http_client.add_response( + f"/service_keys/{key_id}", {"success": True} + ) + + result = mock_client.service_keys.delete(key_id) + + assert result["success"] is True + mock_http_client.assert_request_made("DELETE", f"/service_keys/{key_id}") + + def test_rotate_service_key_success( + self, mock_client, mock_http_client, sample_service_key_response + ): + """Test rotating a service key.""" + key_id = 123 + rotated_response = { + **sample_service_key_response, + "api_key": "new_rotated_key_12345678901234567", + "last_rotated_key": "a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6", + "last_rotated_at": "2025-01-02T00:00:00Z", + } + mock_http_client.add_response( + f"/service_keys/{key_id}/rotate", rotated_response + ) + + key = mock_client.service_keys.rotate(key_id) + + assert key.api_key == "new_rotated_key_12345678901234567" + assert key.last_rotated_key == "a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6" + mock_http_client.assert_request_made("PUT", f"/service_keys/{key_id}/rotate") + + def test_activate_service_key_success( + self, mock_client, mock_http_client, sample_service_key_response + ): + """Test activating a service key.""" + key_id = 123 + activated_response = {**sample_service_key_response, "status": "ACTIVE"} + mock_http_client.add_response( + f"/service_keys/{key_id}/activate", activated_response + ) + + key = mock_client.service_keys.activate(key_id) + + assert key.status == "ACTIVE" + mock_http_client.assert_request_made("PUT", f"/service_keys/{key_id}/activate") + + def test_pause_service_key_success( + self, mock_client, mock_http_client, sample_service_key_response + ): + """Test pausing a service key.""" + key_id = 123 + paused_response = {**sample_service_key_response, "status": "PAUSED"} + mock_http_client.add_response( + f"/service_keys/{key_id}/pause", paused_response + ) + + key = mock_client.service_keys.pause(key_id) + + assert key.status == "PAUSED" + mock_http_client.assert_request_made("PUT", f"/service_keys/{key_id}/pause") + + +@pytest.mark.unit +class TestServiceKeyModels: + """Unit tests for service key models.""" + + def test_service_key_model_validation(self, sample_service_key_response): + """Test ServiceKey model parses valid data correctly.""" + key = ServiceKey.model_validate(sample_service_key_response) + + assert key.id == 123 + assert key.name == "Test Service Key" + assert key.status == "ACTIVE" + assert key.api_key == "a1b2c3d4e5f6g7h8i9j0k1l2m3n4o5p6" + assert key.owner_id == 1 + assert key.org_id == 1 + + def test_service_key_with_rotated_key(self): + """Test ServiceKey model with rotated key data.""" + data = { + **SAMPLE_SERVICE_KEY, + "last_rotated_key": "old_key_value", + "last_rotated_at": "2025-01-02T00:00:00Z", + } + key = ServiceKey.model_validate(data) + + assert key.last_rotated_key == "old_key_value" + assert key.last_rotated_at is not None + + def test_service_key_create_model_serialization(self): + """Test ServiceKeyCreate model serialization.""" + create_data = ServiceKeyCreate( + name="My Key", + description="My description", + ) + + data = create_data.model_dump(exclude_none=True) + + assert data["name"] == "My Key" + assert data["description"] == "My description" + + def test_service_key_create_with_data_source(self): + """Test ServiceKeyCreate model with data_source_id.""" + create_data = ServiceKeyCreate( + name="Flow Key", + description="Key for flow", + data_source_id=456, + ) + + data = create_data.model_dump(exclude_none=True) + + assert data["data_source_id"] == 456 + + def test_service_key_update_model_serialization(self): + """Test ServiceKeyUpdate model serialization.""" + update_data = ServiceKeyUpdate(name="Updated Name") + + data = update_data.model_dump(exclude_none=True) + + assert data["name"] == "Updated Name" + assert "description" not in data diff --git a/tests/unit/test_tag_management.py b/tests/unit/test_tag_management.py new file mode 100644 index 0000000..06149d2 --- /dev/null +++ b/tests/unit/test_tag_management.py @@ -0,0 +1,525 @@ +"""Unit tests for tag management operations across resources. + +Tests cover: +- get_tags: Retrieve tags for a resource +- set_tags: Replace all tags (POST) +- add_tags: Merge tags with existing (PUT) +- remove_tags: Delete specific tags (DELETE) +""" + +import pytest + +from nexla_sdk.exceptions import AuthorizationError, NotFoundError +from tests.utils import MockResponseBuilder, create_http_error + + +@pytest.mark.unit +class TestGetTags: + """Tests for retrieving tags from resources.""" + + def test_get_tags_returns_list(self, mock_client, mock_http_client): + """Test that get_tags returns a list of tags.""" + # Arrange + resource_id = 123 + expected_tags = ["production", "critical", "data-pipeline"] + mock_http_client.add_response( + f"/data_sources/{resource_id}/tags", expected_tags + ) + + # Act + result = mock_client.sources.get_tags(resource_id) + + # Assert + assert result == expected_tags + assert isinstance(result, list) + assert len(result) == 3 + mock_http_client.assert_request_made( + "GET", f"/data_sources/{resource_id}/tags" + ) + + def test_get_tags_empty_list(self, mock_client, mock_http_client): + """Test get_tags returns empty list when no tags exist.""" + # Arrange + resource_id = 123 + mock_http_client.add_response(f"/data_sources/{resource_id}/tags", []) + + # Act + result = mock_client.sources.get_tags(resource_id) + + # Assert + assert result == [] + assert isinstance(result, list) + mock_http_client.assert_request_made( + "GET", f"/data_sources/{resource_id}/tags" + ) + + def test_get_tags_single_tag(self, mock_client, mock_http_client): + """Test get_tags with a single tag.""" + # Arrange + resource_id = 456 + expected_tags = ["environment:staging"] + mock_http_client.add_response( + f"/data_sources/{resource_id}/tags", expected_tags + ) + + # Act + result = mock_client.sources.get_tags(resource_id) + + # Assert + assert result == expected_tags + assert len(result) == 1 + + +@pytest.mark.unit +class TestSetTags: + """Tests for replacing all tags on a resource.""" + + def test_set_tags_success(self, mock_client, mock_http_client): + """Test that set_tags replaces all tags.""" + # Arrange + resource_id = 123 + new_tags = ["new-tag-1", "new-tag-2"] + mock_http_client.add_response( + f"/data_sources/{resource_id}/tags", new_tags + ) + + # Act + result = mock_client.sources.set_tags(resource_id, new_tags) + + # Assert + assert result == new_tags + mock_http_client.assert_request_made( + "POST", f"/data_sources/{resource_id}/tags" + ) + # Verify the request body contains the tags + last_request = mock_http_client.get_last_request() + assert last_request is not None + assert last_request.get("json") == new_tags + + def test_set_tags_with_empty_list_clears_all(self, mock_client, mock_http_client): + """Test that set_tags with empty list clears all tags.""" + # Arrange + resource_id = 123 + mock_http_client.add_response(f"/data_sources/{resource_id}/tags", []) + + # Act + result = mock_client.sources.set_tags(resource_id, []) + + # Assert + assert result == [] + mock_http_client.assert_request_made( + "POST", f"/data_sources/{resource_id}/tags" + ) + last_request = mock_http_client.get_last_request() + assert last_request.get("json") == [] + + def test_set_tags_replaces_existing(self, mock_client, mock_http_client): + """Test that set_tags completely replaces existing tags.""" + # Arrange + resource_id = 789 + # Simulate replacing ["old-tag"] with ["completely", "new", "tags"] + new_tags = ["completely", "new", "tags"] + mock_http_client.add_response( + f"/data_sinks/{resource_id}/tags", new_tags + ) + + # Act + result = mock_client.destinations.set_tags(resource_id, new_tags) + + # Assert + assert result == new_tags + assert "completely" in result + mock_http_client.assert_request_made( + "POST", f"/data_sinks/{resource_id}/tags" + ) + + +@pytest.mark.unit +class TestAddTags: + """Tests for adding/merging tags to a resource.""" + + def test_add_tags_success(self, mock_client, mock_http_client): + """Test that add_tags merges with existing tags.""" + # Arrange + resource_id = 123 + tags_to_add = ["new-tag"] + # Simulated merged response (existing + new) + merged_tags = ["existing-tag", "new-tag"] + mock_http_client.add_response( + f"/data_sources/{resource_id}/tags", merged_tags + ) + + # Act + result = mock_client.sources.add_tags(resource_id, tags_to_add) + + # Assert + assert result == merged_tags + mock_http_client.assert_request_made( + "PUT", f"/data_sources/{resource_id}/tags" + ) + last_request = mock_http_client.get_last_request() + assert last_request.get("json") == tags_to_add + + def test_add_tags_with_duplicate_tags_idempotent( + self, mock_client, mock_http_client + ): + """Test that add_tags with duplicate tags is idempotent.""" + # Arrange + resource_id = 123 + # Adding a tag that already exists + tags_to_add = ["existing-tag"] + # Response shows same tags (no duplicates created) + existing_tags = ["existing-tag", "another-tag"] + mock_http_client.add_response( + f"/data_sources/{resource_id}/tags", existing_tags + ) + + # Act + result = mock_client.sources.add_tags(resource_id, tags_to_add) + + # Assert + assert result == existing_tags + # Verify no duplicate "existing-tag" in result + assert result.count("existing-tag") == 1 + mock_http_client.assert_request_made( + "PUT", f"/data_sources/{resource_id}/tags" + ) + + def test_add_multiple_tags(self, mock_client, mock_http_client): + """Test adding multiple tags at once.""" + # Arrange + resource_id = 456 + tags_to_add = ["tag-1", "tag-2", "tag-3"] + merged_tags = ["original", "tag-1", "tag-2", "tag-3"] + mock_http_client.add_response( + f"/data_sets/{resource_id}/tags", merged_tags + ) + + # Act + result = mock_client.nexsets.add_tags(resource_id, tags_to_add) + + # Assert + assert result == merged_tags + assert all(tag in result for tag in tags_to_add) + mock_http_client.assert_request_made( + "PUT", f"/data_sets/{resource_id}/tags" + ) + + +@pytest.mark.unit +class TestRemoveTags: + """Tests for removing tags from a resource.""" + + def test_remove_tags_success(self, mock_client, mock_http_client): + """Test that remove_tags deletes specific tags.""" + # Arrange + resource_id = 123 + tags_to_remove = ["tag-to-remove"] + # Remaining tags after removal + remaining_tags = ["kept-tag-1", "kept-tag-2"] + mock_http_client.add_response( + f"/data_sources/{resource_id}/tags", remaining_tags + ) + + # Act + result = mock_client.sources.remove_tags(resource_id, tags_to_remove) + + # Assert + assert result == remaining_tags + assert "tag-to-remove" not in result + mock_http_client.assert_request_made( + "DELETE", f"/data_sources/{resource_id}/tags" + ) + last_request = mock_http_client.get_last_request() + assert last_request.get("json") == tags_to_remove + + def test_remove_multiple_tags(self, mock_client, mock_http_client): + """Test removing multiple tags at once.""" + # Arrange + resource_id = 789 + tags_to_remove = ["remove-1", "remove-2"] + remaining_tags = ["keeper"] + mock_http_client.add_response( + f"/data_sources/{resource_id}/tags", remaining_tags + ) + + # Act + result = mock_client.sources.remove_tags(resource_id, tags_to_remove) + + # Assert + assert result == remaining_tags + mock_http_client.assert_request_made( + "DELETE", f"/data_sources/{resource_id}/tags" + ) + + def test_remove_all_tags_leaves_empty(self, mock_client, mock_http_client): + """Test removing all tags leaves empty list.""" + # Arrange + resource_id = 123 + tags_to_remove = ["only-tag"] + mock_http_client.add_response(f"/data_sources/{resource_id}/tags", []) + + # Act + result = mock_client.sources.remove_tags(resource_id, tags_to_remove) + + # Assert + assert result == [] + mock_http_client.assert_request_made( + "DELETE", f"/data_sources/{resource_id}/tags" + ) + + def test_remove_nonexistent_tags_no_error(self, mock_client, mock_http_client): + """Test that removing non-existent tags does not raise an error.""" + # Arrange + resource_id = 123 + tags_to_remove = ["nonexistent-tag"] + existing_tags = ["existing-tag"] + mock_http_client.add_response( + f"/data_sources/{resource_id}/tags", existing_tags + ) + + # Act + result = mock_client.sources.remove_tags(resource_id, tags_to_remove) + + # Assert + assert result == existing_tags + mock_http_client.assert_request_made( + "DELETE", f"/data_sources/{resource_id}/tags" + ) + + +@pytest.mark.unit +class TestTagErrorHandling: + """Tests for tag operation error scenarios.""" + + def test_tags_not_found_returns_404(self, mock_client, mock_http_client): + """Test that accessing tags on non-existent resource returns 404.""" + # Arrange + resource_id = 99999 + mock_http_client.add_error( + f"/data_sources/{resource_id}/tags", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.get_tags(resource_id) + + def test_tags_permission_denied_returns_403(self, mock_client, mock_http_client): + """Test that unauthorized access to tags returns 403.""" + # Arrange + resource_id = 123 + mock_http_client.add_error( + f"/data_sources/{resource_id}/tags", + create_http_error(403, "Forbidden"), + ) + + # Act & Assert + with pytest.raises(AuthorizationError): + mock_client.sources.get_tags(resource_id) + + def test_set_tags_not_found(self, mock_client, mock_http_client): + """Test set_tags on non-existent resource raises NotFoundError.""" + # Arrange + resource_id = 99999 + mock_http_client.add_error( + f"/data_sources/{resource_id}/tags", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.set_tags(resource_id, ["tag"]) + + def test_add_tags_permission_denied(self, mock_client, mock_http_client): + """Test add_tags with insufficient permissions raises AuthorizationError.""" + # Arrange + resource_id = 123 + mock_http_client.add_error( + f"/data_sources/{resource_id}/tags", + create_http_error(403, "Insufficient permissions to modify tags"), + ) + + # Act & Assert + with pytest.raises(AuthorizationError): + mock_client.sources.add_tags(resource_id, ["new-tag"]) + + def test_remove_tags_not_found(self, mock_client, mock_http_client): + """Test remove_tags on non-existent resource raises NotFoundError.""" + # Arrange + resource_id = 99999 + mock_http_client.add_error( + f"/data_sources/{resource_id}/tags", + create_http_error(404, "Resource not found"), + ) + + # Act & Assert + with pytest.raises(NotFoundError): + mock_client.sources.remove_tags(resource_id, ["tag"]) + + +@pytest.mark.unit +class TestTagsAcrossResources: + """Tests verifying tag operations work across different resource types.""" + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ("credentials", "/data_credentials"), + ], + ) + def test_get_tags_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test get_tags works across different resource types.""" + # Arrange + resource_id = 123 + expected_tags = ["resource-tag", "type-specific"] + mock_http_client.add_response(f"{endpoint}/{resource_id}/tags", expected_tags) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.get_tags(resource_id) + + # Assert + assert result == expected_tags + mock_http_client.assert_request_made("GET", f"{endpoint}/{resource_id}/tags") + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ("credentials", "/data_credentials"), + ], + ) + def test_set_tags_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test set_tags works across different resource types.""" + # Arrange + resource_id = 456 + new_tags = ["replaced-tag"] + mock_http_client.add_response(f"{endpoint}/{resource_id}/tags", new_tags) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.set_tags(resource_id, new_tags) + + # Assert + assert result == new_tags + mock_http_client.assert_request_made("POST", f"{endpoint}/{resource_id}/tags") + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ("credentials", "/data_credentials"), + ], + ) + def test_add_tags_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test add_tags works across different resource types.""" + # Arrange + resource_id = 789 + tags_to_add = ["added-tag"] + merged_tags = ["existing", "added-tag"] + mock_http_client.add_response(f"{endpoint}/{resource_id}/tags", merged_tags) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.add_tags(resource_id, tags_to_add) + + # Assert + assert result == merged_tags + mock_http_client.assert_request_made("PUT", f"{endpoint}/{resource_id}/tags") + + @pytest.mark.parametrize( + "resource_name,endpoint", + [ + ("sources", "/data_sources"), + ("destinations", "/data_sinks"), + ("nexsets", "/data_sets"), + ("credentials", "/data_credentials"), + ], + ) + def test_remove_tags_for_resource_type( + self, mock_client, mock_http_client, resource_name, endpoint + ): + """Test remove_tags works across different resource types.""" + # Arrange + resource_id = 321 + tags_to_remove = ["to-remove"] + remaining_tags = ["remaining"] + mock_http_client.add_response(f"{endpoint}/{resource_id}/tags", remaining_tags) + + # Act + resource = getattr(mock_client, resource_name) + result = resource.remove_tags(resource_id, tags_to_remove) + + # Assert + assert result == remaining_tags + mock_http_client.assert_request_made( + "DELETE", f"{endpoint}/{resource_id}/tags" + ) + + +@pytest.mark.unit +class TestTagFormats: + """Tests for various tag format scenarios.""" + + def test_tags_with_special_characters(self, mock_client, mock_http_client): + """Test tags containing special characters.""" + # Arrange + resource_id = 123 + special_tags = ["env:production", "team/data-eng", "version_2.0"] + mock_http_client.add_response( + f"/data_sources/{resource_id}/tags", special_tags + ) + + # Act + result = mock_client.sources.get_tags(resource_id) + + # Assert + assert result == special_tags + assert "env:production" in result + + def test_tags_with_unicode(self, mock_client, mock_http_client): + """Test tags containing unicode characters.""" + # Arrange + resource_id = 123 + unicode_tags = ["categoria-datos", "equipo-analisis"] + mock_http_client.add_response( + f"/data_sources/{resource_id}/tags", unicode_tags + ) + + # Act + result = mock_client.sources.get_tags(resource_id) + + # Assert + assert result == unicode_tags + + def test_tags_case_sensitivity(self, mock_client, mock_http_client): + """Test that tags preserve case sensitivity.""" + # Arrange + resource_id = 123 + case_sensitive_tags = ["Production", "CRITICAL", "lowercased"] + mock_http_client.add_response( + f"/data_sources/{resource_id}/tags", case_sensitive_tags + ) + + # Act + result = mock_client.sources.get_tags(resource_id) + + # Assert + assert result == case_sensitive_tags + assert "Production" in result + assert "CRITICAL" in result + assert "lowercased" in result diff --git a/tests/unit/test_tokens_resource.py b/tests/unit/test_tokens_resource.py new file mode 100644 index 0000000..4cf7ad0 --- /dev/null +++ b/tests/unit/test_tokens_resource.py @@ -0,0 +1,60 @@ +"""Unit tests for token resource.""" + +import pytest + +from nexla_sdk import NexlaClient + +pytestmark = pytest.mark.unit + + +@pytest.fixture +def client(mock_client: NexlaClient) -> NexlaClient: + return mock_client + + +class TestTokensResource: + def test_logout(self, client, mock_http_client): + mock_http_client.add_response("/token/logout", {"status": "ok"}) + + response = client.tokens.logout() + + assert isinstance(response, dict) + mock_http_client.assert_request_made("POST", "/token/logout") + + def test_create_google_token(self, client, mock_http_client): + mock_http_client.add_response("/gtoken", {"access_token": "goog"}) + + response = client.tokens.create_google_token({"id_token": "g123"}) + + assert response["access_token"] == "goog" + mock_http_client.assert_request_made("POST", "/gtoken") + + def test_metadata(self, client, mock_http_client): + mock_http_client.add_response("/metadata", {"version": "1"}) + + response = client.tokens.metadata() + + assert response["version"] == "1" + mock_http_client.assert_request_made("GET", "/metadata") + + def test_metadata_with_uid(self, client, mock_http_client): + mock_http_client.add_response("/metadata/uid1", {"version": "1"}) + + response = client.tokens.metadata("uid1") + + assert response["version"] == "1" + mock_http_client.assert_request_made("GET", "/metadata/uid1") + + def test_resource_authorize(self, client, mock_http_client): + mock_http_client.add_response("/resource_authorize", {"authorized": True}) + + response = client.tokens.resource_authorize({"resource_id": 1}) + + assert response["authorized"] is True + mock_http_client.assert_request_made("POST", "/resource_authorize") + + def test_removed_duplicate_methods(self, client): + """Verify redundant methods were removed.""" + assert not hasattr(client.tokens, "logout_post") + assert not hasattr(client.tokens, "logout_put") + assert not hasattr(client.tokens, "refresh_token_put") diff --git a/tests/unit/test_validators.py b/tests/unit/test_validators.py new file mode 100644 index 0000000..3a3ee27 --- /dev/null +++ b/tests/unit/test_validators.py @@ -0,0 +1,286 @@ +"""Unit tests for validators resource.""" + +import pytest + +from nexla_sdk.models.validators.requests import ( + ValidatorCopyOptions, + ValidatorCreate, + ValidatorUpdate, +) +from nexla_sdk.models.validators.responses import Validator +from tests.utils import assert_model_list_valid, assert_model_valid + + +# Sample response data +SAMPLE_VALIDATOR = { + "id": 123, + "name": "Test Validator", + "description": "A test validator", + "resource_type": "validator", + "code_type": "python", + "output_type": "record", + "code": "def validate(record): return record['value'] > 0", + "code_config": {}, + "code_encoding": "none", + "custom_config": {}, + "reusable": True, + "public": False, + "managed": False, + "owner": {"id": 1, "full_name": "Test User", "email": "test@example.com"}, + "org": {"id": 1, "name": "Test Org", "email_domain": "example.com"}, + "access_roles": ["owner"], + "data_sets": [1, 2], + "tags": ["test", "validation"], + "copied_from_id": None, + "updated_at": "2025-01-01T00:00:00Z", + "created_at": "2025-01-01T00:00:00Z", +} + +SAMPLE_VALIDATORS_LIST = [ + SAMPLE_VALIDATOR, + {**SAMPLE_VALIDATOR, "id": 124, "name": "Another Validator"}, + {**SAMPLE_VALIDATOR, "id": 125, "name": "Third Validator"}, +] + + +@pytest.fixture +def sample_validator_response(): + """Sample validator response.""" + return SAMPLE_VALIDATOR.copy() + + +@pytest.fixture +def sample_validators_list(): + """Sample validators list response.""" + return [v.copy() for v in SAMPLE_VALIDATORS_LIST] + + +@pytest.mark.unit +class TestValidatorsResource: + """Unit tests for ValidatorsResource using mocks.""" + + def test_list_validators_success( + self, mock_client, mock_http_client, sample_validators_list + ): + """Test listing validators with successful response.""" + mock_http_client.add_response("/validators", sample_validators_list) + + validators = mock_client.validators.list() + + assert len(validators) == 3 + assert_model_list_valid(validators, Validator) + mock_http_client.assert_request_made("GET", "/validators") + + def test_list_validators_with_filters( + self, mock_client, mock_http_client, sample_validators_list + ): + """Test listing validators with filters.""" + mock_http_client.add_response("/validators", sample_validators_list) + + validators = mock_client.validators.list( + access_role="owner", page=1, per_page=10, expand=True + ) + + assert len(validators) == 3 + request = mock_http_client.get_request() + assert "expand" in str(request) + + def test_list_public_validators( + self, mock_client, mock_http_client, sample_validators_list + ): + """Test listing public validators.""" + mock_http_client.add_response("/validators/public", sample_validators_list) + + validators = mock_client.validators.list_public() + + assert len(validators) == 3 + mock_http_client.assert_request_made("GET", "/validators/public") + + def test_get_validator_success( + self, mock_client, mock_http_client, sample_validator_response + ): + """Test getting a single validator.""" + validator_id = 123 + mock_http_client.add_response( + f"/validators/{validator_id}", sample_validator_response + ) + + validator = mock_client.validators.get(validator_id) + + assert_model_valid(validator, {"id": validator_id}) + mock_http_client.assert_request_made("GET", f"/validators/{validator_id}") + + def test_create_validator_success( + self, mock_client, mock_http_client, sample_validator_response + ): + """Test creating a validator.""" + mock_http_client.add_response("/validators", sample_validator_response) + + create_data = ValidatorCreate( + name="Test Validator", + code_type="python", + code="def validate(record): return True", + ) + validator = mock_client.validators.create(create_data) + + assert_model_valid(validator, {"name": "Test Validator"}) + mock_http_client.assert_request_made("POST", "/validators") + + def test_update_validator_success( + self, mock_client, mock_http_client, sample_validator_response + ): + """Test updating a validator.""" + validator_id = 123 + updated_response = {**sample_validator_response, "name": "Updated Validator"} + mock_http_client.add_response( + f"/validators/{validator_id}", updated_response + ) + + update_data = ValidatorUpdate(name="Updated Validator") + validator = mock_client.validators.update(validator_id, update_data) + + assert validator.name == "Updated Validator" + mock_http_client.assert_request_made("PUT", f"/validators/{validator_id}") + + def test_delete_validator_success(self, mock_client, mock_http_client): + """Test deleting a validator.""" + validator_id = 123 + mock_http_client.add_response( + f"/validators/{validator_id}", {"success": True} + ) + + result = mock_client.validators.delete(validator_id) + + assert result["success"] is True + mock_http_client.assert_request_made("DELETE", f"/validators/{validator_id}") + + def test_copy_validator_success( + self, mock_client, mock_http_client, sample_validator_response + ): + """Test copying a validator.""" + validator_id = 123 + copied_response = {**sample_validator_response, "id": 200, "copied_from_id": 123} + mock_http_client.add_response( + f"/validators/{validator_id}/copy", copied_response + ) + + options = ValidatorCopyOptions(owner_id=2) + validator = mock_client.validators.copy(validator_id, options) + + assert validator.id == 200 + assert validator.copied_from_id == 123 + mock_http_client.assert_request_made("POST", f"/validators/{validator_id}/copy") + + def test_get_tags_success(self, mock_client, mock_http_client): + """Test getting validator tags.""" + validator_id = 123 + tags = ["tag1", "tag2", "tag3"] + mock_http_client.add_response(f"/validators/{validator_id}/tags", tags) + + result = mock_client.validators.get_tags(validator_id) + + assert result == tags + mock_http_client.assert_request_made("GET", f"/validators/{validator_id}/tags") + + def test_set_tags_success(self, mock_client, mock_http_client): + """Test setting validator tags.""" + validator_id = 123 + new_tags = ["new_tag1", "new_tag2"] + mock_http_client.add_response(f"/validators/{validator_id}/tags", new_tags) + + result = mock_client.validators.set_tags(validator_id, new_tags) + + assert result == new_tags + mock_http_client.assert_request_made("POST", f"/validators/{validator_id}/tags") + + def test_add_tags_success(self, mock_client, mock_http_client): + """Test adding validator tags.""" + validator_id = 123 + updated_tags = ["tag1", "tag2", "new_tag"] + mock_http_client.add_response(f"/validators/{validator_id}/tags", updated_tags) + + result = mock_client.validators.add_tags(validator_id, ["new_tag"]) + + assert result == updated_tags + mock_http_client.assert_request_made("PUT", f"/validators/{validator_id}/tags") + + def test_remove_tags_success(self, mock_client, mock_http_client): + """Test removing validator tags.""" + validator_id = 123 + remaining_tags = ["tag1"] + mock_http_client.add_response(f"/validators/{validator_id}/tags", remaining_tags) + + result = mock_client.validators.remove_tags(validator_id, ["tag2"]) + + assert result == remaining_tags + mock_http_client.assert_request_made("DELETE", f"/validators/{validator_id}/tags") + + def test_search_tags_success( + self, mock_client, mock_http_client, sample_validators_list + ): + """Test searching validators by tags.""" + mock_http_client.add_response("/validators/search_tags", sample_validators_list) + + validators = mock_client.validators.search_tags(["test", "validation"]) + + assert len(validators) == 3 + mock_http_client.assert_request_made("POST", "/validators/search_tags") + + +@pytest.mark.unit +class TestValidatorModels: + """Unit tests for validator models.""" + + def test_validator_model_validation(self, sample_validator_response): + """Test Validator model parses valid data correctly.""" + validator = Validator.model_validate(sample_validator_response) + + assert validator.id == 123 + assert validator.name == "Test Validator" + assert validator.code_type == "python" + assert validator.resource_type == "validator" + assert validator.reusable is True + assert validator.public is False + assert "test" in validator.tags + + def test_validator_create_model_serialization(self): + """Test ValidatorCreate model serialization.""" + create_data = ValidatorCreate( + name="My Validator", + code_type="python", + code="def validate(r): return True", + tags=["validation"], + ) + + data = create_data.model_dump(exclude_none=True) + + assert data["name"] == "My Validator" + assert data["code_type"] == "python" + assert "tags" in data + assert data["resource_type"] == "validator" + + def test_validator_update_model_serialization(self): + """Test ValidatorUpdate model serialization.""" + update_data = ValidatorUpdate( + name="Updated Name", + description="Updated description", + ) + + data = update_data.model_dump(exclude_none=True) + + assert data["name"] == "Updated Name" + assert data["description"] == "Updated description" + # code_type should not be present since it wasn't set + assert "code_type" not in data + + def test_validator_copy_options_serialization(self): + """Test ValidatorCopyOptions model serialization.""" + options = ValidatorCopyOptions( + owner_id=5, + copy_access_controls=True, + ) + + data = options.model_dump(exclude_none=True) + + assert data["owner_id"] == 5 + assert data["copy_access_controls"] is True diff --git a/tests/unit/test_vendors.py b/tests/unit/test_vendors.py new file mode 100644 index 0000000..56e27d7 --- /dev/null +++ b/tests/unit/test_vendors.py @@ -0,0 +1,188 @@ +"""Unit tests for vendors resource.""" + +import pytest + +from nexla_sdk.models.vendors.requests import VendorCreate, VendorUpdate +from nexla_sdk.models.vendors.responses import Vendor +from tests.utils import assert_model_list_valid, assert_model_valid + + +# Sample response data +SAMPLE_VENDOR = { + "id": 123, + "name": "salesforce", + "display_name": "Salesforce", + "description": "Salesforce CRM connector", + "config": {}, + "small_logo": "https://example.com/sf-small.png", + "logo": "https://example.com/sf.png", + "connection_type": "api", + "auth_templates": [1, 2], + "vendor_endpoints": [], + "created_at": "2025-01-01T00:00:00Z", + "updated_at": "2025-01-01T00:00:00Z", +} + +SAMPLE_VENDORS_LIST = [ + SAMPLE_VENDOR, + {**SAMPLE_VENDOR, "id": 124, "name": "hubspot", "display_name": "HubSpot"}, + {**SAMPLE_VENDOR, "id": 125, "name": "zendesk", "display_name": "Zendesk"}, +] + + +@pytest.fixture +def sample_vendor_response(): + """Sample vendor response.""" + return SAMPLE_VENDOR.copy() + + +@pytest.fixture +def sample_vendors_list(): + """Sample vendors list response.""" + return [v.copy() for v in SAMPLE_VENDORS_LIST] + + +@pytest.mark.unit +class TestVendorsResource: + """Unit tests for VendorsResource using mocks.""" + + def test_list_vendors_success( + self, mock_client, mock_http_client, sample_vendors_list + ): + """Test listing vendors with successful response.""" + mock_http_client.add_response("/vendors", sample_vendors_list) + + vendors = mock_client.vendors.list() + + assert len(vendors) == 3 + assert_model_list_valid(vendors, Vendor) + mock_http_client.assert_request_made("GET", "/vendors") + + def test_get_vendor_by_id( + self, mock_client, mock_http_client, sample_vendor_response + ): + """Test getting a vendor by ID.""" + vendor_id = 123 + mock_http_client.add_response(f"/vendors/{vendor_id}", sample_vendor_response) + + vendor = mock_client.vendors.get(vendor_id) + + assert_model_valid(vendor, {"id": vendor_id}) + mock_http_client.assert_request_made("GET", f"/vendors/{vendor_id}") + + def test_get_vendor_by_name( + self, mock_client, mock_http_client, sample_vendor_response + ): + """Test getting a vendor by name.""" + mock_http_client.add_response("/vendors", sample_vendor_response) + + vendor = mock_client.vendors.get_by_name("salesforce") + + assert vendor.name == "salesforce" + mock_http_client.assert_request_made("GET", "/vendors") + + def test_create_vendor_success( + self, mock_client, mock_http_client, sample_vendor_response + ): + """Test creating a vendor.""" + mock_http_client.add_response("/vendors", sample_vendor_response) + + create_data = VendorCreate( + name="new_vendor", + display_name="New Vendor", + ) + vendor = mock_client.vendors.create(create_data) + + assert_model_valid(vendor, {"name": "salesforce"}) + mock_http_client.assert_request_made("POST", "/vendors") + + def test_update_vendor_success( + self, mock_client, mock_http_client, sample_vendor_response + ): + """Test updating a vendor.""" + vendor_id = 123 + updated_response = {**sample_vendor_response, "description": "Updated desc"} + mock_http_client.add_response(f"/vendors/{vendor_id}", updated_response) + + update_data = VendorUpdate(description="Updated desc") + vendor = mock_client.vendors.update(vendor_id, update_data) + + assert vendor.description == "Updated desc" + mock_http_client.assert_request_made("PUT", f"/vendors/{vendor_id}") + + def test_delete_vendor_success(self, mock_client, mock_http_client): + """Test deleting a vendor.""" + vendor_id = 123 + mock_http_client.add_response(f"/vendors/{vendor_id}", {"success": True}) + + result = mock_client.vendors.delete(vendor_id) + + assert result["success"] is True + mock_http_client.assert_request_made("DELETE", f"/vendors/{vendor_id}") + + def test_delete_auth_template_from_vendor(self, mock_client, mock_http_client): + """Test deleting an auth template from a vendor.""" + vendor_id = 123 + template_id = 456 + mock_http_client.add_response( + f"/vendors/{vendor_id}/auth_templates/{template_id}", {"success": True} + ) + + result = mock_client.vendors.delete_auth_template(vendor_id, template_id) + + assert result["success"] is True + mock_http_client.assert_request_made( + "DELETE", f"/vendors/{vendor_id}/auth_templates/{template_id}" + ) + + +@pytest.mark.unit +class TestVendorModels: + """Unit tests for vendor models.""" + + def test_vendor_model_validation(self, sample_vendor_response): + """Test Vendor model parses valid data correctly.""" + vendor = Vendor.model_validate(sample_vendor_response) + + assert vendor.id == 123 + assert vendor.name == "salesforce" + assert vendor.display_name == "Salesforce" + assert len(vendor.auth_templates) == 2 + + def test_vendor_model_with_minimal_data(self): + """Test Vendor model with minimal required fields.""" + minimal_data = { + "id": 1, + } + vendor = Vendor.model_validate(minimal_data) + + assert vendor.id == 1 + assert vendor.name is None + assert vendor.auth_templates == [] + + def test_vendor_create_model_serialization(self): + """Test VendorCreate model serialization.""" + create_data = VendorCreate( + name="new_vendor", + display_name="New Vendor", + description="A new vendor", + ) + + data = create_data.model_dump(exclude_none=True) + + assert data["name"] == "new_vendor" + assert data["display_name"] == "New Vendor" + assert data["description"] == "A new vendor" + + def test_vendor_update_model_serialization(self): + """Test VendorUpdate model serialization.""" + update_data = VendorUpdate( + display_name="Updated Name", + description="Updated description", + ) + + data = update_data.model_dump(exclude_none=True) + + assert data["display_name"] == "Updated Name" + assert data["description"] == "Updated description" + assert "name" not in data diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index dd4923a..b8fe5e1 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -32,9 +32,12 @@ from .mock_builders import ( MockDataFactory, MockResponseBuilder, + SearchResponseBuilder, + accessor_list, credential_list, destination_list, lookup_list, + nexset_list, project_list, source_list, team_list, @@ -45,6 +48,7 @@ "MockResponseBuilder", "MockDataFactory", "MockHTTPClient", + "SearchResponseBuilder", "create_mock_response", "create_http_error", "create_paginated_response", @@ -69,10 +73,12 @@ "assert_flow_response_structure", "assert_datetime_field_valid", "assert_list_field_valid", + "accessor_list", "credential_list", "source_list", "destination_list", "lookup_list", + "nexset_list", "user_list", "team_list", "project_list", diff --git a/tests/utils/mock_builders.py b/tests/utils/mock_builders.py index 3c52211..aedf32d 100644 --- a/tests/utils/mock_builders.py +++ b/tests/utils/mock_builders.py @@ -555,6 +555,122 @@ def docs_recommendation_response(**overrides) -> Dict[str, Any]: base.update(overrides) return base + @staticmethod + def accessor_response( + accessor_type: str = "USER", + access_role: str = "collaborator", + **overrides, + ) -> Dict[str, Any]: + """Build a mock accessor response for access control testing.""" + base = { + "id": fake.random_int(1, 10000), + "type": accessor_type, + "access_roles": [access_role], + "org_id": fake.random_int(1, 100), + } + if accessor_type == "USER": + base["email"] = fake.email() + base["full_name"] = fake.name() + base["user_id"] = fake.random_int(1, 10000) + elif accessor_type == "TEAM": + base["name"] = f"{fake.word().title()} Team" + base["team_id"] = fake.random_int(1, 1000) + base.update(overrides) + return base + + @staticmethod + def accessor_list(count: int = 3, **overrides) -> List[Dict[str, Any]]: + """Generate a list of accessor responses.""" + accessors = [] + for i in range(count): + accessor_type = "USER" if i % 2 == 0 else "TEAM" + accessors.append( + MockResponseBuilder.accessor_response(accessor_type=accessor_type, **overrides) + ) + return accessors + + @staticmethod + def docs_response(**overrides) -> Dict[str, Any]: + """Build a mock documentation response for a resource.""" + base = { + "entries": [ + {"key": "description", "value": fake.sentence()}, + {"key": "usage", "value": fake.paragraph()}, + {"key": "notes", "value": fake.text(max_nb_chars=100)}, + ] + } + base.update(overrides) + return base + + @staticmethod + def access_insights_response(**overrides) -> Dict[str, Any]: + """Build a mock access insights response.""" + base = { + "access_granted": True, + "access_reason": fake.random_element(["owner", "collaborator", "team_member"]), + "access_path": [ + { + "type": "direct", + "role": fake.random_element(["owner", "collaborator", "admin"]), + } + ], + "resource_id": fake.random_int(1, 10000), + "resource_type": fake.random_element( + ["data_source", "data_sink", "data_set", "data_credentials"] + ), + } + base.update(overrides) + return base + + @staticmethod + def search_response( + items: List[Dict[str, Any]] = None, + total: int = None, + page: int = 1, + per_page: int = 20, + ) -> Dict[str, Any]: + """Build a mock search response. + + Args: + items: List of items in the search results + total: Total count of items (defaults to len(items)) + page: Current page number + per_page: Items per page + + Returns: + Dictionary with data and meta information + """ + if items is None: + items = [] + return { + "data": items, + "meta": { + "total_count": total if total is not None else len(items), + "page": page, + "per_page": per_page, + }, + } + + @staticmethod + def paginated_response( + items: List[Dict[str, Any]], + page: int = 1, + per_page: int = 20, + total: Optional[int] = None, + ) -> Dict[str, Any]: + """Build a paginated response with meta information.""" + total = total if total is not None else len(items) + total_pages = (total + per_page - 1) // per_page if per_page > 0 else 1 + return { + "data": items, + "meta": { + "currentPage": page, + "totalCount": total, + "pageCount": total_pages, + "perPage": per_page, + }, + } + class MockDataFactory: """Factory for generating mock data for testing.""" @@ -1182,3 +1298,65 @@ def team_list(count: int = 3) -> List[Dict[str, Any]]: def project_list(count: int = 3) -> List[Dict[str, Any]]: """Generate a list of mock projects.""" return [MockResponseBuilder.project() for _ in range(count)] + + +def accessor_list(count: int = 3) -> List[Dict[str, Any]]: + """Generate a list of mock accessors.""" + return MockResponseBuilder.accessor_list(count) + + +def nexset_list(count: int = 3) -> List[Dict[str, Any]]: + """Generate a list of mock nexsets.""" + factory = MockDataFactory() + return [factory.create_mock_nexset() for _ in range(count)] + + +class SearchResponseBuilder: + """Builder for creating mock search responses.""" + + @staticmethod + def search_response( + items: List[Dict[str, Any]] = None, + total: int = None, + page: int = 1, + per_page: int = 20, + ) -> Dict[str, Any]: + """Build a mock search response. + + Args: + items: List of items in the search results + total: Total count of items (defaults to len(items)) + page: Current page number + per_page: Items per page + + Returns: + Dictionary with data and meta information + """ + if items is None: + items = [] + return { + "data": items, + "meta": { + "total_count": total if total is not None else len(items), + "page": page, + "per_page": per_page, + }, + } + + @staticmethod + def search_tags_response( + items: List[Dict[str, Any]] = None, + tags: List[str] = None, + ) -> List[Dict[str, Any]]: + """Build a mock search_tags response. + + Args: + items: List of items matching the tags + tags: Tags that were searched for + + Returns: + List of matching items + """ + if items is None: + items = [] + return items diff --git a/turbo.json b/turbo.json new file mode 100644 index 0000000..4734f1f --- /dev/null +++ b/turbo.json @@ -0,0 +1,24 @@ +{ + "$schema": "https://turbo.build/schema.json", + "globalDependencies": [ + "pnpm-lock.yaml", + "turbo.json", + "tsconfig.json" + ], + "pipeline": { + "build": { + "dependsOn": ["^build"], + "outputs": ["dist/**", "build/**"] + }, + "lint": { + "outputs": [] + }, + "typecheck": { + "outputs": [] + }, + "test": { + "dependsOn": ["^build"], + "outputs": ["coverage/**"] + } + } +} diff --git a/verify_type_checking.py b/verify_type_checking.py new file mode 100644 index 0000000..35bc368 --- /dev/null +++ b/verify_type_checking.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +"""Minimal test to verify TYPE_CHECKING pattern is correctly implemented.""" + +# This test verifies that: +# 1. The syntax is correct +# 2. The forward reference works at runtime +# 3. Type checkers can understand the annotation + +def test_type_checking_pattern(): + """Test the TYPE_CHECKING pattern in auth_parameters/responses.py""" + + # Read and parse the file as AST to verify structure + import ast + + with open('nexla_sdk/models/auth_parameters/responses.py', 'r') as f: + source = f.read() + + tree = ast.parse(source) + + # Verify TYPE_CHECKING is imported + type_checking_imported = False + auth_template_in_if = False + + for node in ast.walk(tree): + # Check imports + if isinstance(node, ast.ImportFrom): + if node.module == 'typing': + for alias in node.names: + if alias.name == 'TYPE_CHECKING': + type_checking_imported = True + print("✓ TYPE_CHECKING imported from typing") + + # Check if TYPE_CHECKING block exists + if isinstance(node, ast.If): + if isinstance(node.test, ast.Name) and node.test.id == 'TYPE_CHECKING': + print("✓ if TYPE_CHECKING: block found") + # Check if AuthTemplate import is inside + for item in node.body: + if isinstance(item, ast.ImportFrom): + if 'auth_templates' in item.module: + auth_template_in_if = True + print("✓ AuthTemplate import inside TYPE_CHECKING block") + + # Check class definition for forward reference + if isinstance(node, ast.ClassDef) and node.name == 'AuthParameter': + print(f"✓ AuthParameter class found") + for item in node.body: + if isinstance(item, ast.AnnAssign): + if isinstance(item.target, ast.Name) and item.target.id == 'auth_template': + # Check if it's a forward reference (string) + print(f"✓ auth_template field found with annotation") + # The annotation should contain a string literal "AuthTemplate" + + assert type_checking_imported, "TYPE_CHECKING not imported" + assert auth_template_in_if, "AuthTemplate not in TYPE_CHECKING block" + + print("\n✓ All checks passed - TYPE_CHECKING pattern correctly implemented") + print("✓ This prevents circular imports at runtime while preserving type hints") + +if __name__ == '__main__': + test_type_checking_pattern()