From 9f65816f1633b5fb22de2441a9ce0a833665bc62 Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti Date: Thu, 6 Nov 2025 12:00:38 +0000 Subject: [PATCH 01/23] fix directus --- echo/.devcontainer/devcontainer.json | 18 +- echo/.devcontainer/docker-compose-s3.yml | 15 + echo/.devcontainer/docker-compose.yml | 57 +- echo/.devcontainer/setup.sh | 340 ++ echo/.vscode/sessions.json | 4 - echo/dev-tmux.sh | 592 ---- echo/directus/.env.sample | 73 +- echo/directus/Dockerfile | 30 +- echo/directus/directus-sync.config.js | 1 - echo/directus/extensions/package.json | 7 - echo/directus/sync/collections/flows.json | 4 +- .../directus/sync/collections/operations.json | 264 +- .../sync/collections/permissions.json | 1995 +---------- echo/directus/sync/collections/policies.json | 45 +- echo/directus/sync/collections/roles.json | 10 +- echo/directus/sync/collections/settings.json | 15 +- .../collections/directus_sync_id_map.json | 28 - .../fields/announcement/activity.json | 1 + .../fields/announcement/created_at.json | 1 + .../fields/announcement/expires_at.json | 1 + .../sync/snapshot/fields/announcement/id.json | 1 + .../snapshot/fields/announcement/level.json | 1 + .../snapshot/fields/announcement/sort.json | 1 + .../fields/announcement/translations.json | 1 + .../fields/announcement/updated_at.json | 1 + .../fields/announcement/user_created.json | 1 + .../fields/announcement/user_updated.json | 1 + .../announcement_activity.json | 1 + .../announcement_activity/created_at.json | 1 + .../fields/announcement_activity/id.json | 1 + .../fields/announcement_activity/read.json | 1 + .../fields/announcement_activity/sort.json | 1 + .../announcement_activity/updated_at.json | 1 + .../announcement_activity/user_created.json | 1 + .../fields/announcement_activity/user_id.json | 1 + .../announcement_activity/user_updated.json | 1 + .../announcement_id.json | 1 + .../fields/announcement_translations/id.json | 1 + .../languages_code.json | 1 + .../announcement_translations/message.json | 1 + .../announcement_translations/title.json | 1 + .../fields/aspect/aspect_segment.json | 1 + .../snapshot/fields/aspect/created_at.json | 1 + .../snapshot/fields/aspect/description.json | 1 + .../sync/snapshot/fields/aspect/id.json | 1 + .../snapshot/fields/aspect/image_url.json | 1 + .../snapshot/fields/aspect/long_summary.json | 1 + .../sync/snapshot/fields/aspect/name.json | 1 + .../snapshot/fields/aspect/short_summary.json | 1 + .../snapshot/fields/aspect/updated_at.json | 1 + .../sync/snapshot/fields/aspect/view_id.json | 1 + .../fields/aspect_segment/aspect.json | 1 + .../fields/aspect_segment/description.json | 1 + .../snapshot/fields/aspect_segment/id.json | 1 + .../fields/aspect_segment/relevant_index.json | 1 + .../fields/aspect_segment/segment.json | 1 + .../aspect_segment/verbatim_transcript.json | 1 + .../snapshot/fields/conversation/chunks.json | 1 + .../conversation/conversation_segments.json | 1 + .../fields/conversation/created_at.json | 1 + .../fields/conversation/duration.json | 1 + .../sync/snapshot/fields/conversation/id.json | 1 + .../is_all_chunks_transcribed.json | 1 + .../is_audio_processing_finished.json | 1 + .../fields/conversation/is_finished.json | 1 + .../conversation/linked_conversations.json | 1 + .../conversation/linking_conversations.json | 1 + .../conversation/merged_audio_path.json | 1 + .../conversation/merged_transcript.json | 1 + .../conversation/participant_email.json | 1 + .../fields/conversation/participant_name.json | 1 + .../conversation/participant_user_agent.json | 1 + .../conversation/processing_status.json | 1 + .../conversation/project_chat_messages.json | 1 + .../fields/conversation/project_chats.json | 1 + .../fields/conversation/project_id.json | 1 + .../snapshot/fields/conversation/replies.json | 1 + .../snapshot/fields/conversation/source.json | 1 + .../snapshot/fields/conversation/summary.json | 1 + .../snapshot/fields/conversation/tags.json | 1 + .../fields/conversation/updated_at.json | 1 + .../conversation_chunk/conversation_id.json | 1 + .../conversation_segments.json | 1 + .../fields/conversation_chunk/created_at.json | 1 + .../cross_talk_instances.json | 1 + .../conversation_chunk/desired_language.json | 1 + .../conversation_chunk/detected_language.json | 1 + .../detected_language_confidence.json | 1 + .../conversation_chunk/diarization.json | 1 + .../fields/conversation_chunk/error.json | 1 + .../hallucination_reason.json | 1 + .../hallucination_score.json | 1 + .../fields/conversation_chunk/id.json | 1 + .../conversation_chunk/noise_ratio.json | 1 + .../fields/conversation_chunk/path.json | 1 + .../conversation_chunk/processing_status.json | 1 + .../conversation_chunk/raw_transcript.json | 1 + .../runpod_job_status_link.json | 1 + .../runpod_request_count.json | 1 + .../conversation_chunk/silence_ratio.json | 1 + .../fields/conversation_chunk/source.json | 1 + .../fields/conversation_chunk/timestamp.json | 1 + .../fields/conversation_chunk/transcript.json | 1 + .../conversation_chunk/translation_error.json | 1 + .../fields/conversation_chunk/updated_at.json | 1 + .../conversation_link/date_created.json | 1 + .../conversation_link/date_updated.json | 1 + .../snapshot/fields/conversation_link/id.json | 1 + .../fields/conversation_link/link_type.json | 1 + .../source_conversation_id.json | 1 + .../target_conversation_id.json | 1 + .../conversation_id.json | 1 + .../fields/conversation_project_tag/id.json | 1 + .../project_tag_id.json | 1 + .../conversation_reply/content_text.json | 1 + .../conversation_reply/conversation_id.json | 1 + .../conversation_reply/date_created.json | 1 + .../fields/conversation_reply/id.json | 1 + .../fields/conversation_reply/reply.json | 1 + .../fields/conversation_reply/sort.json | 1 + .../fields/conversation_reply/type.json | 1 + .../fields/conversation_segment/chunks.json | 1 + .../conversation_segment/config_id.json | 1 + .../contextual_transcript.json | 1 + .../conversation_segment/conversation_id.json | 1 + .../fields/conversation_segment/counter.json | 1 + .../fields/conversation_segment/id.json | 1 + .../conversation_segment/lightrag_flag.json | 1 + .../fields/conversation_segment/path.json | 1 + .../conversation_segment/transcript.json | 1 + .../conversation_chunk_id.json | 1 + .../conversation_segment_id.json | 1 + .../id.json | 1 + .../directus_sync_id_map/created_at.json | 43 - .../fields/directus_sync_id_map/id.json | 43 - .../fields/directus_sync_id_map/local_id.json | 43 - .../fields/directus_sync_id_map/sync_id.json | 43 - .../fields/directus_sync_id_map/table.json | 43 - .../disable_create_project.json | 1 + .../fields/directus_users/projects.json | 1 + .../snapshot/fields/insight/created_at.json | 1 + .../sync/snapshot/fields/insight/id.json | 1 + .../insight/project_analysis_run_id.json | 1 + .../sync/snapshot/fields/insight/summary.json | 1 + .../sync/snapshot/fields/insight/title.json | 1 + .../snapshot/fields/insight/updated_at.json | 1 + .../sync/snapshot/fields/languages/code.json | 1 + .../snapshot/fields/languages/direction.json | 1 + .../sync/snapshot/fields/languages/name.json | 1 + .../conversation_chunk_id.json | 1 + .../processing_status/conversation_id.json | 1 + .../fields/processing_status/duration_ms.json | 1 + .../fields/processing_status/event.json | 1 + .../snapshot/fields/processing_status/id.json | 1 + .../fields/processing_status/message.json | 1 + .../fields/processing_status/parent.json | 1 + .../project_analysis_run_id.json | 1 + .../fields/processing_status/project_id.json | 1 + .../fields/processing_status/timestamp.json | 1 + .../sync/snapshot/fields/project/context.json | 1 + ...sation_ask_for_participant_name_label.json | 1 + .../fields/project/conversations.json | 1 + .../snapshot/fields/project/created_at.json | 1 + ...conversation_ask_for_participant_name.json | 1 + .../default_conversation_description.json | 1 + .../default_conversation_finish_text.json | 1 + .../project/default_conversation_title.json | 1 + ...efault_conversation_transcript_prompt.json | 1 + .../default_conversation_tutorial_slug.json | 1 + .../fields/project/directus_user_id.json | 1 + .../fields/project/divider-n6xep9.json | 1 + .../fields/project/get_reply_mode.json | 1 + .../fields/project/get_reply_prompt.json | 1 + .../sync/snapshot/fields/project/id.json | 1 + .../project/image_generation_model.json | 1 + .../project/is_conversation_allowed.json | 1 + .../is_enhanced_audio_processing_enabled.json | 1 + .../fields/project/is_get_reply_enabled.json | 1 + ...ect_notification_subscription_allowed.json | 1 + .../snapshot/fields/project/language.json | 1 + .../sync/snapshot/fields/project/name.json | 1 + .../fields/project/processing_status.json | 1 + .../fields/project/project_analysis_runs.json | 1 + .../fields/project/project_chats.json | 1 + .../fields/project/project_reports.json | 1 + .../sync/snapshot/fields/project/tags.json | 1 + .../snapshot/fields/project/updated_at.json | 1 + .../project_analysis_run/created_at.json | 1 + .../fields/project_analysis_run/id.json | 1 + .../fields/project_analysis_run/insights.json | 1 + .../processing_status.json | 1 + .../project_analysis_run/project_id.json | 1 + .../project_analysis_run/updated_at.json | 1 + .../fields/project_analysis_run/views.json | 1 + .../fields/project_chat/auto_select.json | 1 + .../fields/project_chat/date_created.json | 1 + .../fields/project_chat/date_updated.json | 1 + .../sync/snapshot/fields/project_chat/id.json | 1 + .../snapshot/fields/project_chat/name.json | 1 + .../project_chat/project_chat_messages.json | 1 + .../fields/project_chat/project_id.json | 1 + .../project_chat/used_conversations.json | 1 + .../fields/project_chat/user_created.json | 1 + .../fields/project_chat/user_updated.json | 1 + .../conversation_id.json | 1 + .../fields/project_chat_conversation/id.json | 1 + .../project_chat_id.json | 1 + .../added_conversations.json | 1 + .../chat_message_metadata.json | 1 + .../project_chat_message/date_created.json | 1 + .../project_chat_message/date_updated.json | 1 + .../fields/project_chat_message/id.json | 1 + .../project_chat_message/message_from.json | 1 + .../project_chat_message/project_chat_id.json | 1 + .../project_chat_message/template_key.json | 1 + .../fields/project_chat_message/text.json | 1 + .../project_chat_message/tokens_count.json | 1 + .../used_conversations.json | 1 + .../conversation_id.json | 1 + .../project_chat_message_conversation/id.json | 1 + .../project_chat_message_id.json | 1 + .../conversation_id.json | 1 + .../id.json | 1 + .../project_chat_message_id.json | 1 + .../conversation.json | 1 + .../date_created.json | 1 + .../project_chat_message_metadata/id.json | 1 + .../message_metadata.json | 1 + .../project_chat_message_metadata/ratio.json | 1 + .../reference_text.json | 1 + .../project_chat_message_metadata/type.json | 1 + .../fields/project_report/content.json | 1 + .../fields/project_report/date_created.json | 1 + .../fields/project_report/date_updated.json | 1 + .../fields/project_report/error_code.json | 1 + .../snapshot/fields/project_report/id.json | 1 + .../fields/project_report/language.json | 1 + .../fields/project_report/links-v1gyg0.json | 1 + .../fields/project_report/project_id.json | 1 + .../project_report/show_portal_link.json | 1 + .../fields/project_report/status.json | 1 + .../project_report_metric/date_created.json | 1 + .../project_report_metric/date_updated.json | 1 + .../fields/project_report_metric/id.json | 1 + .../fields/project_report_metric/ip.json | 1 + .../project_report_id.json | 1 + .../fields/project_report_metric/type.json | 1 + .../conversation_id.json | 1 + .../date_submitted.json | 1 + .../date_updated.json | 1 + .../email.json | 1 + .../email_opt_in.json | 1 + .../email_opt_out_token.json | 1 + .../id.json | 1 + .../project_id.json | 1 + .../sort.json | 1 + .../fields/project_tag/conversations.json | 1 + .../fields/project_tag/created_at.json | 1 + .../sync/snapshot/fields/project_tag/id.json | 1 + .../fields/project_tag/project_id.json | 1 + .../snapshot/fields/project_tag/sort.json | 1 + .../snapshot/fields/project_tag/text.json | 1 + .../fields/project_tag/updated_at.json | 1 + .../sync/snapshot/fields/view/aspects.json | 1 + .../sync/snapshot/fields/view/created_at.json | 1 + .../snapshot/fields/view/description.json | 1 + .../sync/snapshot/fields/view/id.json | 1 + .../sync/snapshot/fields/view/language.json | 1 + .../sync/snapshot/fields/view/name.json | 1 + .../fields/view/project_analysis_run_id.json | 1 + .../sync/snapshot/fields/view/summary.json | 1 + .../sync/snapshot/fields/view/updated_at.json | 1 + .../sync/snapshot/fields/view/user_input.json | 1 + .../fields/view/user_input_description.json | 1 + echo/directus/sync/snapshot/info.json | 5 +- echo/docs/server_adding_dependencies.md | 14 +- echo/docs/troubleshooting-tips.md | 100 + echo/readme.md | 126 +- echo/server/.python-version | 1 + echo/server/Dockerfile | 9 +- echo/server/pyproject.toml | 59 +- echo/server/run-scheduler.sh | 2 +- echo/server/run-worker-cpu.sh | 2 +- echo/server/run-worker.sh | 2 +- echo/server/run.sh | 2 +- echo/server/uv.lock | 2965 +++++++++++++++++ echo/setup.sh | 40 - 287 files changed, 3912 insertions(+), 3380 deletions(-) create mode 100644 echo/.devcontainer/docker-compose-s3.yml create mode 100755 echo/.devcontainer/setup.sh delete mode 100755 echo/dev-tmux.sh delete mode 100644 echo/directus/extensions/package.json delete mode 100644 echo/directus/sync/snapshot/collections/directus_sync_id_map.json delete mode 100644 echo/directus/sync/snapshot/fields/directus_sync_id_map/created_at.json delete mode 100644 echo/directus/sync/snapshot/fields/directus_sync_id_map/id.json delete mode 100644 echo/directus/sync/snapshot/fields/directus_sync_id_map/local_id.json delete mode 100644 echo/directus/sync/snapshot/fields/directus_sync_id_map/sync_id.json delete mode 100644 echo/directus/sync/snapshot/fields/directus_sync_id_map/table.json create mode 100644 echo/docs/troubleshooting-tips.md create mode 100644 echo/server/.python-version create mode 100644 echo/server/uv.lock delete mode 100755 echo/setup.sh diff --git a/echo/.devcontainer/devcontainer.json b/echo/.devcontainer/devcontainer.json index a9652e9f..354e0c20 100644 --- a/echo/.devcontainer/devcontainer.json +++ b/echo/.devcontainer/devcontainer.json @@ -1,7 +1,7 @@ // For format details, see https://aka.ms/devcontainer.json. For config options, see the // README at: https://github.com/devcontainers/templates/tree/main/src/universal { - "name": "dembrane/pilot", + "name": "dembrane/echo", // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile // "image": "mcr.microsoft.com/devcontainers/base:ubuntu", "dockerComposeFile": "docker-compose.yml", @@ -11,23 +11,25 @@ "features": {}, // Use 'forwardPorts' to make a list of ports inside the container available locally. // Use 'postCreateCommand' to run commands after the container is created. - "postCreateCommand": "chmod +x ./setup.sh && ./setup.sh", + "postCreateCommand": "chmod +x ./.devcontainer/setup.sh && ./.devcontainer/setup.sh", "customizations": { "vscode": { "extensions": [ + // ts "YoavBls.pretty-ts-errors", "bradlc.vscode-tailwindcss", + "biomejs.biome", + // python "ms-python.python", "charliermarsh.ruff", - "matangover.mypy", + "meta.pyrefly", + "anysphere.cursorpyright", // for cursor + // general + "github.vscode-pull-request-github", "ms-azuretools.vscode-docker", "mhutchie.git-graph", "cweijan.vscode-postgresql-client2", - "github.vscode-pull-request-github", - "nguyenngoclong.terminal-keeper", - "biomejs.biome", - // for cursor specifically lol - "anysphere.cursorpyright" + "nguyenngoclong.terminal-keeper" ] } }, diff --git a/echo/.devcontainer/docker-compose-s3.yml b/echo/.devcontainer/docker-compose-s3.yml new file mode 100644 index 00000000..fcde2a24 --- /dev/null +++ b/echo/.devcontainer/docker-compose-s3.yml @@ -0,0 +1,15 @@ +services: + minio: + image: minio/minio:latest + ports: + - 9000:9000 + - 9001:9001 + entrypoint: > + /bin/sh -c " /usr/bin/mc config host add myminio http://minio:9000 $$MINIO_ROOT_USER $$MINIO_ROOT_PASSWORD; /usr/bin/mc mb --ignore-existing myminio/dembrane; /usr/bin/mc policy set download myminio/dembrane; minio server /mnt/data --console-address ":9001" " + environment: + - MINIO_ROOT_USER=dembrane + - MINIO_ROOT_PASSWORD=dembrane + - MINIO_VOLUMES=/mnt/data + volumes: + - ./minio_data:/mnt/data + restart: unless-stopped diff --git a/echo/.devcontainer/docker-compose.yml b/echo/.devcontainer/docker-compose.yml index ea431048..ec9ccd89 100644 --- a/echo/.devcontainer/docker-compose.yml +++ b/echo/.devcontainer/docker-compose.yml @@ -1,40 +1,14 @@ -version: '3.8' - services: - minio: - image: minio/minio:latest - ports: - - 9000:9000 - - 9001:9001 - entrypoint: > - /bin/sh -c " - /usr/bin/mc config host add myminio http://minio:9000 $$MINIO_ROOT_USER $$MINIO_ROOT_PASSWORD; - /usr/bin/mc mb --ignore-existing myminio/dembrane; - /usr/bin/mc policy set download myminio/dembrane; - minio server /mnt/data --console-address ":9001" - " - environment: - - MINIO_ROOT_USER=dembrane - - MINIO_ROOT_PASSWORD=dembrane - - MINIO_VOLUMES=/mnt/data - volumes: - - ./minio_data:/mnt/data - restart: unless-stopped - redis: - image: valkey/valkey:8.0 + image: valkey/valkey:8.0.6-alpine volumes: - ./redis_data:/data postgres: - image: pgvector/pgvector:0.6.2-pg16 + image: pgvector/pgvector:0.8.1-pg16 restart: unless-stopped ports: - - 5432:5432 - - 7474:7474 # Neo4j Browser - - 7687:7687 # Neo4j Bolt protocol - - 5173:5173 - - 5174:5174 + - 5432:5432 environment: POSTGRES_DB: dembrane POSTGRES_USER: dembrane @@ -44,16 +18,18 @@ services: - ./init.sql:/docker-entrypoint-initdb.d/init.sql neo4j: - image: neo4j:5.26.4 + image: neo4j:5.26.16-community + restart: unless-stopped volumes: - ./neo4j_data/logs:/logs - ./neo4j_data/config:/config - ./neo4j_data/data:/data - ./neo4j_data/plugins:/plugins + ports: + - 7474:7474 # Neo4j Browser + - 7687:7687 # Neo4j Bolt protocol environment: - NEO4J_AUTH=neo4j/admin@dembrane - network_mode: service:postgres - restart: unless-stopped directus: build: @@ -64,6 +40,8 @@ services: volumes: - ../directus/uploads:/directus/uploads - ../directus/templates:/directus/templates + env_file: + - ../directus/.env environment: - PUBLIC_URL=http://localhost:8055 - PORT=8055 @@ -88,11 +66,7 @@ services: - USER_INVITE_URL_ALLOW_LIST=http://localhost:5173/invite - USER_REGISTER_URL_ALLOW_LIST=http://localhost:5173/verify-email - PASSWORD_RESET_URL_ALLOW_LIST=http://localhost:5173/password-reset - - STORAGE_LOCATIONS=local,s3 - - STORAGE_S3_KEY=dembrane - - STORAGE_S3_SECRET=dembrane - - STORAGE_S3_BUCKET=dembrane - - STORAGE_S3_ENDPOINT=http://minio:9000 + - STORAGE_LOCATIONS=local - EMAIL_TRANSPORT="smtp" - EMAIL_FROM="" - EMAIL_SMTP_HOST="" @@ -111,6 +85,10 @@ services: context: ../server dockerfile: Dockerfile target: base + ports: + - 8000:8000 + - 5173:5173 + - 5174:5174 environment: - DIRECTUS_SECRET=secret - DIRECTUS_TOKEN=admin @@ -129,11 +107,12 @@ services: - STORAGE_S3_BUCKET=dembrane - STORAGE_S3_ENDPOINT=http://minio:9000 - NEO4J_URL=bolt://neo4j:7687 - + volumes: - ../..:/workspaces:cached + # for docker passthrough - /var/run/docker.sock:/var/run/docker.sock - network_mode: service:postgres + command: sleep infinity depends_on: - postgres diff --git a/echo/.devcontainer/setup.sh b/echo/.devcontainer/setup.sh new file mode 100755 index 00000000..a203c035 --- /dev/null +++ b/echo/.devcontainer/setup.sh @@ -0,0 +1,340 @@ +#!/bin/bash +set -euo pipefail + +NODE_VERSION="${NODE_VERSION:-22}" +PYTHON_VERSION="${PYTHON_VERSION:-3.11}" +PNPM_STORE="${PNPM_STORE:-/home/node/.local/share/pnpm/store}" +FNM_DIR="$HOME/.local/share/fnm" +UV_BIN_DIR="$HOME/.local/bin" +BASHRC="$HOME/.bashrc" + +APT_UPDATED="false" +ORIGINAL_DIR="$(pwd)" +trap 'cd "$ORIGINAL_DIR"' EXIT + +log_info() { + echo -e "\033[0;32m[INFO]\033[0m $1" +} + +log_warn() { + echo -e "\033[1;33m[WARN]\033[0m $1" +} + +log_error() { + echo -e "\033[0;31m[ERROR]\033[0m $1" >&2 +} + +command_exists() { + command -v "$1" &> /dev/null 2>&1 +} + +ensure_apt_packages() { + local missing=() + for pkg in "$@"; do + dpkg -s "$pkg" &> /dev/null || missing+=("$pkg") + done + if [ ${#missing[@]} -gt 0 ]; then + if [ "$APT_UPDATED" = "false" ]; then + log_info "Updating apt package index..." + apt-get update -qq + APT_UPDATED="true" + fi + log_info "Installing apt packages: ${missing[*]}" + apt-get install -y "${missing[@]}" + fi +} + +ensure_line_in_file() { + local file="$1" line="$2" + mkdir -p "$(dirname "$file")" + touch "$file" + grep -Fqx "$line" "$file" 2>/dev/null || echo "$line" >> "$file" +} + +safe_pushd() { + local dir="$1" + if [ -d "$dir" ]; then + pushd "$dir" > /dev/null + else + log_error "Directory not found: $dir" + return 1 + fi +} + +safe_popd() { + popd > /dev/null || true +} + +install_fnm() { + if command_exists fnm; then + log_info "fnm already installed: $(fnm --version)" + return + fi + + ensure_apt_packages curl ca-certificates + log_info "Installing fnm..." + curl -fsSL https://fnm.vercel.app/install | bash + + ensure_line_in_file "$BASHRC" 'eval "$(fnm env --use-on-cd)"' + log_info "fnm installed" +} + +activate_fnm_env() { + if [ -d "$FNM_DIR" ]; then + export PATH="$FNM_DIR:$PATH" + fi + command_exists fnm && eval "$(fnm env --use-on-cd)" 2>/dev/null || true +} + +install_node() { + activate_fnm_env + + if command_exists node && [[ $(node --version) == v${NODE_VERSION}* ]]; then + log_info "Node.js already installed: $(node --version)" + return + fi + + if ! command_exists fnm; then + log_error "fnm not found, cannot install Node.js" + return 1 + fi + + log_info "Installing Node.js ${NODE_VERSION} via fnm..." + fnm install "$NODE_VERSION" || log_warn "Node.js ${NODE_VERSION} may already be installed" + fnm default "$NODE_VERSION" || true + fnm use "$NODE_VERSION" || true + log_info "Node.js ready: $(node --version)" +} + +install_pnpm() { + if command_exists pnpm; then + log_info "pnpm already installed: $(pnpm --version)" + return + fi + + if ! command_exists npm; then + log_error "npm not found, cannot install pnpm" + return 1 + fi + + log_info "Installing pnpm globally..." + npm install -g pnpm + pnpm config set store-dir "$PNPM_STORE" || true + log_info "pnpm installed: $(pnpm --version)" +} + +install_uv() { + if command_exists uv; then + log_info "uv already installed: $(uv --version)" + return + fi + + ensure_apt_packages curl ca-certificates + log_info "Installing uv..." + curl -LsSf https://astral.sh/uv/install.sh | sh + + ensure_line_in_file "$BASHRC" 'source "$HOME/.local/bin/env"' + + if [ -f "$UV_BIN_DIR/env" ]; then + source "$UV_BIN_DIR/env" + elif [ -f "$UV_BIN_DIR/env.fish" ]; then + source "$UV_BIN_DIR/env.fish" 2>/dev/null || true + fi + export PATH="$UV_BIN_DIR:$PATH" + log_info "uv installed: $(uv --version)" +} + +ensure_uv_python() { + if ! command_exists uv; then + log_error "uv not available" + return 1 + fi + + if ! uv python list 2>/dev/null | grep -q "${PYTHON_VERSION}"; then + log_info "Installing Python ${PYTHON_VERSION} via uv..." + uv python install "${PYTHON_VERSION}" + else + log_info "Python ${PYTHON_VERSION} already installed via uv" + fi +} + +pin_uv_python() { + local target_dir="$1" + ensure_uv_python + + if safe_pushd "$target_dir"; then + if uv python pin "${PYTHON_VERSION}" 2>/dev/null; then + log_info "Pinned Python ${PYTHON_VERSION} for $(basename "$target_dir")" + elif [ -f .python-version ]; then + log_info "Python already pinned for $(basename "$target_dir")" + else + log_warn "Failed to pin Python for $(basename "$target_dir")" + fi + safe_popd + fi +} + +install_frontend_deps() { + local dir="$WORKSPACE_ROOT/frontend" + if [ ! -d "$dir" ]; then + log_warn "Frontend directory not found, skipping" + return + fi + + if ! command_exists pnpm; then + log_error "pnpm not available, skipping frontend dependency installation" + return 1 + fi + + log_info "Installing frontend dependencies (pnpm)..." + if safe_pushd "$dir"; then + pnpm install --frozen-lockfile || pnpm install + safe_popd + log_info "Frontend dependencies installed" + fi +} + +install_server_deps() { + local dir="$WORKSPACE_ROOT/server" + if [ ! -d "$dir" ]; then + log_warn "Server directory not found, skipping" + return + fi + + if ! command_exists uv; then + log_error "uv not available, cannot install server dependencies" + return 1 + fi + + pin_uv_python "$dir" + + log_info "Installing server dependencies (uv sync)..." + if safe_pushd "$dir"; then + uv sync || { + log_error "uv sync failed" + safe_popd + return 1 + } + safe_popd + log_info "Server dependencies installed" + fi +} + +show_help() { + cat </dev/null 2>&1 -} - -# Function to install tmux -install_tmux() { - print_status "Installing tmux..." - - # Check if we're running as root - local is_root=false - if [[ $EUID -eq 0 ]]; then - is_root=true - fi - - # Function to run commands with or without sudo - run_cmd() { - if [[ "$is_root" == true ]]; then - "$@" - else - sudo "$@" - fi - } - - if [[ "$OSTYPE" == "linux-gnu"* ]]; then - if command_exists apt-get; then - if [[ "$is_root" == true ]]; then - apt-get update && apt-get install -y tmux - else - sudo apt-get update && sudo apt-get install -y tmux - fi - elif command_exists yum; then - run_cmd yum install -y tmux - elif command_exists dnf; then - run_cmd dnf install -y tmux - elif command_exists pacman; then - run_cmd pacman -S --noconfirm tmux - elif command_exists zypper; then - run_cmd zypper install -y tmux - else - print_error "No supported package manager found. Please install tmux manually." - exit 1 - fi - elif [[ "$OSTYPE" == "darwin"* ]]; then - if command_exists brew; then - brew install tmux - else - print_error "Homebrew not found. Please install Homebrew first or install tmux manually." - exit 1 - fi - else - print_error "Unsupported operating system. Please install tmux manually." - exit 1 - fi - - print_status "Tmux installed successfully!" -} - -# Function to create tmux configuration -create_tmux_config() { - local config_file="$HOME/.tmux.conf" - - # Check if config already exists - if [[ -f "$config_file" ]]; then - print_warning "Tmux configuration already exists at $config_file" - read -p "Do you want to replace it with vim-style config? (y/N): " -n 1 -r - echo - if [[ ! $REPLY =~ ^[Yy]$ ]]; then - print_status "Keeping existing configuration" - return - fi - print_status "Removing existing configuration..." - rm "$config_file" - fi - - print_status "Creating vim-style tmux configuration..." - - # Create comprehensive vim-style tmux configuration - cat > "$config_file" << 'EOF' -# Vim-style tmux configuration for Echo Development Environment - -# ============================================================================= -# BASIC SETTINGS -# ============================================================================= - -# Enable mouse support -set -g mouse on - -# Set leader key to Ctrl+a -set -g prefix C-a -unbind C-b -bind C-a send-prefix - -# Set base index to 0 (default) -set -g base-index 0 -setw -g pane-base-index 0 - -# Increase scrollback buffer size -set -g history-limit 10000 - -# Enable vi mode -setw -g mode-keys vi - -# ============================================================================= -# VIM-STYLE NAVIGATION (hjkl) -# ============================================================================= - -# Pane navigation with hjkl -bind h select-pane -L -bind j select-pane -D -bind k select-pane -U -bind l select-pane -R - -# Window navigation with hjkl -bind -n C-h select-window -t :- -bind -n C-l select-window -t :+ - -# ============================================================================= -# VIM-STYLE COPY MODE (Ctrl+a [) -# ============================================================================= - -# Enter copy mode -bind [ copy-mode - -# Vim-style movement in copy mode -bind-key -T copy-mode-vi h send -X cursor-left -bind-key -T copy-mode-vi j send -X cursor-down -bind-key -T copy-mode-vi k send -X cursor-up -bind-key -T copy-mode-vi l send -X cursor-right - -# Vim-style word movement -bind-key -T copy-mode-vi w send -X next-word -bind-key -T copy-mode-vi b send -X previous-word -bind-key -T copy-mode-vi e send -X next-word-end - -# Vim-style line movement -bind-key -T copy-mode-vi 0 send -X start-of-line -bind-key -T copy-mode-vi $ send -X end-of-line -bind-key -T copy-mode-vi ^ send -X start-of-line -bind-key -T copy-mode-vi g send -X history-top -bind-key -T copy-mode-vi G send -X history-bottom - -# Vim-style page movement -bind-key -T copy-mode-vi C-u send -X halfpage-up -bind-key -T copy-mode-vi C-d send -X halfpage-down -bind-key -T copy-mode-vi C-b send -X page-up -bind-key -T copy-mode-vi C-f send -X page-down - -# ============================================================================= -# VIM-STYLE SELECTION AND COPY -# ============================================================================= - -# Start selection with v (visual mode) -bind-key -T copy-mode-vi v send -X begin-selection - -# Start line selection with V (visual line mode) -bind-key -T copy-mode-vi V send -X select-line - -# Start block selection with C-v (visual block mode) -bind-key -T copy-mode-vi C-v send -X rectangle-toggle - -# Yank selection to tmux buffer -bind-key -T copy-mode-vi y send -X copy-selection - -# Yank line to tmux buffer -bind-key -T copy-mode-vi Y send -X copy-line - -# ============================================================================= -# VIM-STYLE SEARCH -# ============================================================================= - -# Search forward/backward -bind-key -T copy-mode-vi / command-prompt -T copy-mode-vi -I "#{pane_search_string}" "send -X search-forward-incremental \"%%%\"" -bind-key -T copy-mode-vi ? command-prompt -T copy-mode-vi -I "#{pane_search_string}" "send -X search-backward-incremental \"%%%\"" - -# Next/previous search result -bind-key -T copy-mode-vi n send -X search-again -bind-key -T copy-mode-vi N send -X search-reverse - -# ============================================================================= -# VIM-STYLE EDITING COMMANDS -# ============================================================================= - -# Delete selection -bind-key -T copy-mode-vi d send -X delete-selection - -# Change selection (delete and enter insert mode) -bind-key -T copy-mode-vi c send -X delete-selection - -# ============================================================================= -# WINDOW AND PANE MANAGEMENT -# ============================================================================= - -# Create new window -bind c new-window -c "#{pane_current_path}" - -# Close current window -bind x kill-window - -# Rename current window -bind r command-prompt -I "#W" "rename-window '%%'" - -# Split panes with vim-style keys -bind | split-window -h -c "#{pane_current_path}" -bind - split-window -v -c "#{pane_current_path}" - -# Close current pane -bind q kill-pane - -# Toggle pane zoom -bind z resize-pane -Z - -# ============================================================================= -# WINDOW SWITCHING -# ============================================================================= - -# Quick window switching with F keys -bind-key -n F1 select-window -t :0 -bind-key -n F2 select-window -t :1 -bind-key -n F3 select-window -t :2 -bind-key -n F4 select-window -t :3 -bind-key -n F5 select-window -t :4 - -# Window switching with numbers -bind-key -n M-1 select-window -t :0 -bind-key -n M-2 select-window -t :1 -bind-key -n M-3 select-window -t :2 -bind-key -n M-4 select-window -t :3 -bind-key -n M-5 select-window -t :4 - -# ============================================================================= -# PANE RESIZING -# ============================================================================= - -# Resize panes with hjkl -bind -r H resize-pane -L 5 -bind -r J resize-pane -D 5 -bind -r K resize-pane -U 5 -bind -r L resize-pane -R 5 - -# Resize panes with arrow keys -bind -r Left resize-pane -L 5 -bind -r Down resize-pane -D 5 -bind -r Up resize-pane -U 5 -bind -r Right resize-pane -R 5 - -# ============================================================================= -# UTILITY COMMANDS -# ============================================================================= - -# Reload config -bind R source-file ~/.tmux.conf \; display "Config reloaded!" - -# Show key bindings -bind ? list-keys - -# Synchronize panes -bind y set-window-option synchronize-panes - -# Toggle status bar -bind b set-option status - -# ============================================================================= -# STATUS BAR CUSTOMIZATION -# ============================================================================= - -# Status bar colors -set -g status-style bg=colour235,fg=colour136,default - -# Window status colors -setw -g window-status-current-style bg=colour136,fg=colour235 -setw -g window-status-style bg=colour235,fg=colour136 - -# Pane border colors -set -g pane-border-style bg=colour235,fg=colour238 -set -g pane-active-border-style bg=colour235,fg=colour136 - -# Status bar content -set -g status-left-length 40 -set -g status-left "#[fg=green]#S #[fg=black]• #[fg=green,bright]#(uname -r | cut -c 1-6)#[default]" - -set -g status-right-length 60 -set -g status-right "#[fg=colour136]#(cut -d ' ' -f 1-3 /proc/loadavg)#[default] #[fg=colour33]%H:%M#[default]" - -# Center the window list -set -g status-justify centre - -# ============================================================================= -# PERFORMANCE OPTIMIZATIONS -# ============================================================================= - -# Faster key repeat -set -s escape-time 0 - -# Focus events -set -g focus-events on - -# Aggressive resize -setw -g aggressive-resize on -EOF - - print_status "Vim-style tmux configuration created at $config_file" -} - -# Function to check if session exists -session_exists() { - tmux has-session -t "$SESSION_NAME" 2>/dev/null -} - -# Function to kill existing session -kill_session() { - if session_exists; then - print_warning "Session '$SESSION_NAME' already exists. Killing it..." - tmux kill-session -t "$SESSION_NAME" - sleep 1 - fi -} - -# Function to create the development session -create_session() { - print_status "Creating tmux session '$SESSION_NAME'..." - - # Get the absolute path to the project root - local project_root="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" - - # Create new session with first window - tmux new-session -d -s "$SESSION_NAME" -n "server" -c "$project_root" - - # Window 1: Server - tmux send-keys -t "$SESSION_NAME:server" "cd server && source .venv/bin/activate && ./run.sh" C-m - - # Window 2: Workers and Scheduler (split 80/20) - tmux new-window -t "$SESSION_NAME" -n "workers" -c "$project_root" - - # Split the workers window horizontally (80% workers, 20% scheduler) - tmux split-window -t "$SESSION_NAME:workers" -h -p 80 -c "$project_root" - tmux split-window -t "$SESSION_NAME:workers" -h -p 20 -c "$project_root" - - # Rename the panes for clarity - tmux select-pane -t "$SESSION_NAME:workers.0" -T "workers" - tmux select-pane -t "$SESSION_NAME:workers.1" -T "workers-cpu" - tmux select-pane -t "$SESSION_NAME:workers.2" -T "scheduler" - - # Send commands to each pane - tmux send-keys -t "$SESSION_NAME:workers.0" "cd server && source .venv/bin/activate && ./run-worker.sh" C-m - tmux send-keys -t "$SESSION_NAME:workers.1" "cd server && source .venv/bin/activate && ./run-worker-cpu.sh" C-m - tmux send-keys -t "$SESSION_NAME:workers.2" "cd server && source .venv/bin/activate && ./run-scheduler.sh" C-m - - # Window 3: Frontends - tmux new-window -t "$SESSION_NAME" -n "frontends" -c "$project_root" - - # Split the frontends window vertically - tmux split-window -t "$SESSION_NAME:frontends" -v -c "$project_root" - - # Rename the panes for clarity - tmux select-pane -t "$SESSION_NAME:frontends.0" -T "admin-dashboard" - tmux select-pane -t "$SESSION_NAME:frontends.1" -T "participant-portal" - - # Send commands to each pane - tmux send-keys -t "$SESSION_NAME:frontends.0" "cd frontend && pnpm run dev" C-m - tmux send-keys -t "$SESSION_NAME:frontends.1" "cd frontend && pnpm run participant:dev" C-m - - # Set window layout for better organization - tmux select-window -t "$SESSION_NAME:server" - - print_status "Session created successfully!" -} - -# Function to attach to session -attach_session() { - print_status "Attaching to session '$SESSION_NAME'..." - tmux attach-session -t "$SESSION_NAME" -} - -# Function to show session info -show_session_info() { - print_status "Session Information:" - echo " Session Name: $SESSION_NAME" - echo " Windows:" - echo " 1. server - Main server process" - echo " 2. workers - Workers and scheduler (80/20 split)" - echo " - workers: Network workers" - echo " - workers-cpu: CPU workers" - echo " - scheduler: Task scheduler" - echo " 3. frontends - Frontend applications" - echo " - admin-dashboard: Admin interface" - echo " - participant-portal: Participant interface" - echo "" - echo " Vim-Style Navigation:" - echo " hjkl - Navigate between panes" - echo " Ctrl+h/l - Navigate between windows" - echo " Ctrl+a [ - Enter copy mode (vim-style)" - echo "" - echo " Copy Mode (Ctrl+a [):" - echo " hjkl - Move cursor" - echo " w/b/e - Word movement" - echo " 0/$/^ - Line start/end" - echo " g/G - Buffer start/end" - echo " v/V/C-v - Visual/line/block selection" - echo " y/Y - Yank selection/line" - echo " /? - Search forward/backward" - echo " n/N - Next/previous search" - echo "" - echo " Window Management:" - echo " Ctrl+a c - Create new window" - echo " Ctrl+a x - Close current window" - echo " Ctrl+a r - Rename window" - echo " F1-F5 - Switch to windows 1-5" - echo " Alt+1-5 - Switch to windows 1-5" - echo "" - echo " Pane Management:" - echo " Ctrl+a | - Split pane horizontally" - echo " Ctrl+a - - Split pane vertically" - echo " Ctrl+a q - Close current pane" - echo " Ctrl+a z - Toggle pane zoom" - echo " H/J/K/L - Resize panes" - echo "" - echo " Utility:" - echo " Ctrl+a R - Reload config" - echo " Ctrl+a ? - Show key bindings" - echo " Ctrl+a y - Toggle pane sync" - echo " Ctrl+a b - Toggle status bar" - echo "" - echo " Mouse support is enabled" - echo " Use Ctrl+a d to detach from session" -} - -# Function to show help -show_help() { - echo "Echo Development Environment Tmux Setup Script" - echo "" - echo "Usage: $0 [OPTIONS]" - echo "" - echo "Options:" - echo " -h, --help Show this help message" - echo " -i, --install Install tmux if not present" - echo " -c, --config Create/update tmux configuration" - echo " -k, --kill Kill existing session" - echo " -a, --attach Attach to existing session" - echo " -n, --new Create new session (default)" - echo " -s, --status Show session status" - echo " -f, --force Force recreate session (kill + new)" - echo "" - echo "Examples:" - echo " $0 # Create new session and attach" - echo " $0 -i # Install tmux and create session" - echo " $0 -k # Kill existing session" - echo " $0 -a # Attach to existing session" - echo " $0 -f # Force recreate session" -} - -# Main function -main() { - local install_tmux_flag=false - local create_config_flag=false - local kill_session_flag=false - local attach_only_flag=false - local new_session_flag=false - local show_status_flag=false - local force_flag=false - - # Parse command line arguments - while [[ $# -gt 0 ]]; do - case $1 in - -h|--help) - show_help - exit 0 - ;; - -i|--install) - install_tmux_flag=true - shift - ;; - -c|--config) - create_config_flag=true - shift - ;; - -k|--kill) - kill_session_flag=true - shift - ;; - -a|--attach) - attach_only_flag=true - shift - ;; - -n|--new) - new_session_flag=true - shift - ;; - -s|--status) - show_status_flag=true - shift - ;; - -f|--force) - force_flag=true - shift - ;; - *) - print_error "Unknown option: $1" - show_help - exit 1 - ;; - esac - done - - # Default behavior if no flags specified - if [[ "$install_tmux_flag" == false && "$create_config_flag" == false && "$kill_session_flag" == false && "$attach_only_flag" == false && "$new_session_flag" == false && "$show_status_flag" == false && "$force_flag" == false ]]; then - new_session_flag=true - fi - - # Check if tmux is installed - if ! command_exists tmux; then - if [[ "$install_tmux_flag" == true ]]; then - install_tmux - else - print_error "Tmux is not installed. Use -i flag to install it automatically." - exit 1 - fi - fi - - # Create/update tmux configuration - if [[ "$create_config_flag" == true ]]; then - create_tmux_config - fi - - # Show session status - if [[ "$show_status_flag" == true ]]; then - if session_exists; then - print_status "Session '$SESSION_NAME' exists" - tmux list-sessions - else - print_warning "Session '$SESSION_NAME' does not exist" - fi - show_session_info - exit 0 - fi - - # Kill session if requested - if [[ "$kill_session_flag" == true || "$force_flag" == true ]]; then - kill_session - fi - - # Attach to existing session - if [[ "$attach_only_flag" == true ]]; then - if session_exists; then - attach_session - else - print_error "Session '$SESSION_NAME' does not exist. Create it first with -n flag." - exit 1 - fi - exit 0 - fi - - # Create new session - if [[ "$new_session_flag" == true || "$force_flag" == true ]]; then - create_session - show_session_info - attach_session - fi -} - -# Run main function with all arguments -main "$@" \ No newline at end of file diff --git a/echo/directus/.env.sample b/echo/directus/.env.sample index ba98b27b..723c8847 100644 --- a/echo/directus/.env.sample +++ b/echo/directus/.env.sample @@ -1,57 +1,16 @@ -# directus -PUBLIC_URL=http://localhost:8055 -PORT=8055 -SECRET=secret -TELEMETRY=false - -# credentials -ADMIN_EMAIL=admin@dembrane.com -ADMIN_PASSWORD=admin - -# database -DB_CLIENT=postgres -DB_HOST=postgres -DB_PORT=5432 -DB_USER=dembrane -DB_PASSWORD=dembrane -DB_DATABASE=dembrane - -# ws -WEBSOCKETS_ENABLED=true - -# cors -CORS_ENABLED=true -CORS_ORIGIN=http://localhost:5173 -CORS_CREDENTIALS=true - -# session -SESSION_COOKIE_SECURE=false -SESSION_COOKIE_SAME_SITE=lax - -# email -# EMAIL_TRANSPORT=smtp -# EMAIL_FROM=DoNotReply@c1f65324-81a8-41de-abe3-2c1cbf52ba1a.azurecomm.net -# EMAIL_SMTP_HOST=smtp.azurecomm.net -# EMAIL_SMTP_PORT=587 -# EMAIL_SMTP_USER=Communication-Services-Pilot.062b802b-0df6-407b-8bd4-ecb66ec67c32.0e14b809-b0c5-474b-81ed-6c06d141123a -# EMAIL_SMTP_PASSWORD= - -# auth -USER_INVITE_URL_ALLOW_LIST=http://localhost:5173/invite -USER_REGISTER_URL_ALLOW_LIST=http://localhost:5173/verify-email -PASSWORD_RESET_URL_ALLOW_LIST=http://localhost:5173/password-reset - -# AUTH_PROVIDERS=google - -# AUTH_GOOGLE_DRIVER=openid -# AUTH_GOOGLE_CLIENT_ID= -# AUTH_GOOGLE_CLIENT_SECRET= -# AUTH_GOOGLE_ISSUER_URL=https://accounts.google.com -# AUTH_GOOGLE_IDENTIFIER_KEY=email -# AUTH_GOOGLE_FIRST_NAME_KEY=given_name -# AUTH_GOOGLE_LAST_NAME_KEY=family_name -# AUTH_GOOGLE_ICON=google -# AUTH_GOOGLE_LABEL=Google -# AUTH_GOOGLE_ALLOW_PUBLIC_REGISTRATION=true -# AUTH_GOOGLE_DEFAULT_ROLE_ID= -# AUTH_GOOGLE_REDIRECT_ALLOW_LIST= \ No newline at end of file +# for s3 +STORAGE_LOCATIONS=local,s3 +STORAGE_S3_KEY=dembrane +STORAGE_S3_SECRET=dembrane +STORAGE_S3_BUCKET=dembrane +STORAGE_S3_ENDPOINT=http://minio:9000 + +# for email +EMAIL_TRANSPORT="smtp" +EMAIL_FROM="" +EMAIL_SMTP_HOST="" +EMAIL_SMTP_PORT=587 +EMAIL_SMTP_USER="" +EMAIL_SMTP_PASSWORD="" +EMAIL_SMTP_SECURE=false +EMAIL_SMTP_IGNORE_TLS=false \ No newline at end of file diff --git a/echo/directus/Dockerfile b/echo/directus/Dockerfile index 6425287a..3ff6ba57 100644 --- a/echo/directus/Dockerfile +++ b/echo/directus/Dockerfile @@ -1,29 +1,29 @@ -FROM node:22-alpine AS third-party-ext +# for installing custom extensions via npm -RUN apk add python3 g++ make +# FROM node:22-alpine AS third-party-ext -WORKDIR /extensions +# RUN apk add python3 g++ make -# Copy package files first for better layer caching -COPY extensions/package*.json ./ +# WORKDIR /extensions -RUN npm install +# # Copy package files first for better layer caching +# COPY extensions/package*.json ./ -# Copy the rest of the extensions after npm install -COPY extensions/. ./ -# Move all extensions the starts with 'directus-extension-', using find, to the /extensions/directus folder -RUN mkdir -p ./directus +# RUN npm install -RUN cd node_modules && find . -maxdepth 1 -type d -name "directus-extension-*" -exec mv {} ../directus \; +# # Copy the rest of the extensions after npm install +# COPY extensions/. ./ +# # Move all extensions the starts with 'directus-extension-', using find, to the /extensions/directus folder +# RUN mkdir -p ./directus +# RUN cd node_modules && find . -maxdepth 1 -type d -name "directus-extension-*" -exec mv {} ../directus \; -FROM directus/directus:11.5.1 - -# for extensions installed via npm +FROM tractr/directus-sync:11.13.0 COPY ./directus-sync.config.js ./directus-sync.config.js -COPY --from=third-party-ext /extensions/directus ./extensions +# COPY --from=third-party-ext /extensions/directus ./extensions + COPY ./templates ./templates/ # https://github.com/directus/directus/blob/main/Dockerfile diff --git a/echo/directus/directus-sync.config.js b/echo/directus/directus-sync.config.js index 9449f1db..2cde8f1c 100644 --- a/echo/directus/directus-sync.config.js +++ b/echo/directus/directus-sync.config.js @@ -3,6 +3,5 @@ module.exports = { // this NEEDS to be done after the build is complete // OR needs to be run in the same container as the directus server dumpPath: './sync', - preserveIds: ['roles', 'policies', 'dashboards', 'panels'], specs: false, }; diff --git a/echo/directus/extensions/package.json b/echo/directus/extensions/package.json deleted file mode 100644 index 782c3b4b..00000000 --- a/echo/directus/extensions/package.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "name": "directus-extensions", - "dependencies": { - "directus-extension-schema-management-module": "~2.0.1", - "directus-extension-sync": "~3.0.3" - } -} \ No newline at end of file diff --git a/echo/directus/sync/collections/flows.json b/echo/directus/sync/collections/flows.json index e81051ff..0a114cce 100644 --- a/echo/directus/sync/collections/flows.json +++ b/echo/directus/sync/collections/flows.json @@ -8,7 +8,7 @@ "trigger": "operation", "accountability": "all", "options": {}, - "operation": "5bea9e07-e6f0-41c6-821d-b426dec3427c", + "operation": null, "_syncId": "17703446-fef0-49e9-bdc4-385db1311137" }, { @@ -50,7 +50,7 @@ "project_report" ] }, - "operation": "239c6fea-242d-4b7d-b854-c99d2fe43cf3", + "operation": null, "_syncId": "ec4e7ea5-72de-4365-b66f-d8f11b549495" } ] diff --git a/echo/directus/sync/collections/operations.json b/echo/directus/sync/collections/operations.json index d0d8f621..5168a13b 100644 --- a/echo/directus/sync/collections/operations.json +++ b/echo/directus/sync/collections/operations.json @@ -1,96 +1,4 @@ [ - { - "name": "Check Language", - "key": "check_language", - "type": "condition", - "position_x": 20, - "position_y": 1, - "options": { - "filter": { - "$trigger": { - "language": { - "_eq": "en" - } - } - } - }, - "resolve": "3dbf2ea1-17f8-4bde-aa89-43278fe9a00f", - "reject": "eaeb2c39-32e0-428d-ad03-ff0e6052adcc", - "flow": "17703446-fef0-49e9-bdc4-385db1311137", - "_syncId": "5bea9e07-e6f0-41c6-821d-b426dec3427c" - }, - { - "name": "Check Language", - "key": "check_language", - "type": "condition", - "position_x": 20, - "position_y": 1, - "options": { - "filter": { - "$trigger": { - "language": { - "_eq": "en" - } - } - } - }, - "resolve": "9390ed2f-7dc6-4a6a-83da-2d87d478261d", - "reject": "4795cc24-3f3c-4be9-9844-24552da522fa", - "flow": "17703446-fef0-49e9-bdc4-385db1311137", - "_syncId": "af7a671d-78fa-4d07-adad-c3bfabc4617c" - }, - { - "name": "Check Report Language", - "key": "check_report_language", - "type": "item-read", - "position_x": 73, - "position_y": 1, - "options": { - "query": { - "filter": { - "project_id": { - "id": { - "_eq": "{{$trigger.payload.project_id.id}}" - } - } - }, - "fields": [ - "language" - ] - }, - "collection": "project_report" - }, - "resolve": "efb3982e-5703-4c07-8982-a6e1b5218e4a", - "reject": null, - "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "d154ee34-0931-43f3-aaca-9168f88fa04b" - }, - { - "name": "Check Report Language", - "key": "check_report_language", - "type": "item-read", - "position_x": 73, - "position_y": 1, - "options": { - "query": { - "filter": { - "project_id": { - "id": { - "_eq": "{{$trigger.payload.project_id.id}}" - } - } - }, - "fields": [ - "language" - ] - }, - "collection": "project_report" - }, - "resolve": "ca1ffbc5-cfce-4fb4-8f15-c128ea407d41", - "reject": null, - "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "c5d2aff3-1a1a-4ea2-8e39-5189409b7dc1" - }, { "name": "Dutch", "key": "dutch", @@ -171,7 +79,7 @@ "resolve": null, "reject": null, "flow": "17703446-fef0-49e9-bdc4-385db1311137", - "_syncId": "84c38ea6-5d15-429f-8c24-9485d54ba7be" + "_syncId": "615a54cd-a72e-41ad-9403-9577c80280d6" }, { "name": "Email Send Operation Failed Dutch", @@ -185,7 +93,7 @@ "resolve": null, "reject": null, "flow": "17703446-fef0-49e9-bdc4-385db1311137", - "_syncId": "615a54cd-a72e-41ad-9403-9577c80280d6" + "_syncId": "84c38ea6-5d15-429f-8c24-9485d54ba7be" }, { "name": "failed", @@ -224,10 +132,10 @@ "options": { "code": "module.exports = async function(data) {\n\n const submissions = data.get_all_participants;\n \n // Filter submissions to only include those where email_opt_in is true\n const filteredSubmissions = submissions.filter(sub => sub.email_opt_in === true);\n\n // Create an array with email, project_id and an email_opt_out token for each submission\n const result = filteredSubmissions.map(sub => ({\n project_name: data.project_data[0].name || '',\n\t\tdefault_conversation_title: data.project_data[0].default_conversation_title || '',\n\t\tconversation_name: sub.conversation_id.participant_name || '',\n email: sub.email,\n project_id: sub.project_id || '',\n token: sub.email_opt_out_token,\n language: data.check_report_language[0].language || 'empty',\n ADMIN_BASE_URL: \"{{ $env.ADMIN_BASE_URL }}\" || \"http://localhost:5173\",\n PARTICIPANT_BASE_URL: \"{{ $env.PARTICIPANT_BASE_URL }}\" || \"http://localhost:5174\", \n }));\n \n return result;\n};" }, - "resolve": "e101f00d-2fb8-4f40-9e0e-4d24da5bb1e9", + "resolve": "b8144cee-59f6-40d9-a849-dd0c639e4e31", "reject": null, "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "efb3982e-5703-4c07-8982-a6e1b5218e4a" + "_syncId": "ca1ffbc5-cfce-4fb4-8f15-c128ea407d41" }, { "name": "Filter Emails", @@ -238,64 +146,10 @@ "options": { "code": "module.exports = async function(data) {\n\n const submissions = data.get_all_participants;\n \n // Filter submissions to only include those where email_opt_in is true\n const filteredSubmissions = submissions.filter(sub => sub.email_opt_in === true);\n\n // Create an array with email, project_id and an email_opt_out token for each submission\n const result = filteredSubmissions.map(sub => ({\n project_name: data.project_data[0].name || '',\n\t\tdefault_conversation_title: data.project_data[0].default_conversation_title || '',\n\t\tconversation_name: sub.conversation_id.participant_name || '',\n email: sub.email,\n project_id: sub.project_id || '',\n token: sub.email_opt_out_token,\n language: data.check_report_language[0].language || 'empty',\n ADMIN_BASE_URL: \"{{ $env.ADMIN_BASE_URL }}\" || \"http://localhost:5173\",\n PARTICIPANT_BASE_URL: \"{{ $env.PARTICIPANT_BASE_URL }}\" || \"http://localhost:5174\", \n }));\n \n return result;\n};" }, - "resolve": "b8144cee-59f6-40d9-a849-dd0c639e4e31", - "reject": null, - "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "ca1ffbc5-cfce-4fb4-8f15-c128ea407d41" - }, - { - "name": "Get All Participants", - "key": "get_all_participants", - "type": "item-read", - "position_x": 55, - "position_y": 1, - "options": { - "query": { - "filter": { - "project_id": { - "id": { - "_eq": "{{$trigger.payload.project_id}}" - } - } - }, - "fields": [ - "*", - "conversation_id.participant_name" - ] - }, - "collection": "project_report_notification_participants" - }, - "resolve": "c5d2aff3-1a1a-4ea2-8e39-5189409b7dc1", - "reject": null, - "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "47f1463c-3cb6-4420-a50e-92938fed3197" - }, - { - "name": "Get All Participants", - "key": "get_all_participants", - "type": "item-read", - "position_x": 55, - "position_y": 1, - "options": { - "query": { - "filter": { - "project_id": { - "id": { - "_eq": "{{$trigger.payload.project_id}}" - } - } - }, - "fields": [ - "*", - "conversation_id.participant_name" - ] - }, - "collection": "project_report_notification_participants" - }, - "resolve": "d154ee34-0931-43f3-aaca-9168f88fa04b", + "resolve": "e101f00d-2fb8-4f40-9e0e-4d24da5bb1e9", "reject": null, "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "107ebc18-7d2d-4299-9d98-b7d1e7322b7c" + "_syncId": "efb3982e-5703-4c07-8982-a6e1b5218e4a" }, { "name": "log environment vars", @@ -325,96 +179,6 @@ "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", "_syncId": "da5f4cce-eff0-426d-a207-d61366899d1f" }, - { - "name": "PROCEED_ONLY_IF \"published\" in payload", - "key": "proceed_only_if_published_in_payload", - "type": "condition", - "position_x": 19, - "position_y": 1, - "options": { - "filter": { - "$trigger": { - "payload": { - "status": { - "_eq": "published" - } - } - } - } - }, - "resolve": "ea051ae4-776f-490b-8b02-eaf4099243ef", - "reject": "84852456-3f3a-4906-be94-8b750159883b", - "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "239c6fea-242d-4b7d-b854-c99d2fe43cf3" - }, - { - "name": "PROCEED_ONLY_IF \"published\" in payload", - "key": "proceed_only_if_published_in_payload", - "type": "condition", - "position_x": 19, - "position_y": 1, - "options": { - "filter": { - "$trigger": { - "payload": { - "status": { - "_eq": "published" - } - } - } - } - }, - "resolve": "d8554457-95b6-474f-ba67-dfd5f936d575", - "reject": "e8274ad4-5844-42cd-8a6b-d40d08cf83d3", - "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "2784a141-2ae3-48d1-aedf-57c0be3bcc74" - }, - { - "name": "Project Data", - "key": "project_data", - "type": "item-read", - "position_x": 37, - "position_y": 1, - "options": { - "collection": "project", - "query": { - "filter": { - "id": { - "id": { - "_eq": "{{$trigger.payload.project_id}}" - } - } - } - } - }, - "resolve": "107ebc18-7d2d-4299-9d98-b7d1e7322b7c", - "reject": null, - "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "ea051ae4-776f-490b-8b02-eaf4099243ef" - }, - { - "name": "Project Data", - "key": "project_data", - "type": "item-read", - "position_x": 37, - "position_y": 1, - "options": { - "collection": "project", - "query": { - "filter": { - "id": { - "id": { - "_eq": "{{$trigger.payload.project_id}}" - } - } - } - } - }, - "resolve": "47f1463c-3cb6-4420-a50e-92938fed3197", - "reject": null, - "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "d8554457-95b6-474f-ba67-dfd5f936d575" - }, { "name": "Report Not Published", "key": "report_not_published", @@ -427,7 +191,7 @@ "resolve": null, "reject": null, "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "e8274ad4-5844-42cd-8a6b-d40d08cf83d3" + "_syncId": "84852456-3f3a-4906-be94-8b750159883b" }, { "name": "Report Not Published", @@ -441,7 +205,7 @@ "resolve": null, "reject": null, "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "84852456-3f3a-4906-be94-8b750159883b" + "_syncId": "e8274ad4-5844-42cd-8a6b-d40d08cf83d3" }, { "name": "Send Email Dutch", @@ -470,9 +234,9 @@ ] }, "resolve": null, - "reject": "615a54cd-a72e-41ad-9403-9577c80280d6", + "reject": "84c38ea6-5d15-429f-8c24-9485d54ba7be", "flow": "17703446-fef0-49e9-bdc4-385db1311137", - "_syncId": "ea78ec02-364d-4f18-80f8-ea5ac4c787ed" + "_syncId": "34fb6ee5-2813-484a-a1cc-f97de097509b" }, { "name": "Send Email Dutch", @@ -501,9 +265,9 @@ ] }, "resolve": null, - "reject": "84c38ea6-5d15-429f-8c24-9485d54ba7be", + "reject": "615a54cd-a72e-41ad-9403-9577c80280d6", "flow": "17703446-fef0-49e9-bdc4-385db1311137", - "_syncId": "34fb6ee5-2813-484a-a1cc-f97de097509b" + "_syncId": "ea78ec02-364d-4f18-80f8-ea5ac4c787ed" }, { "name": "Send Email English", @@ -581,7 +345,7 @@ "resolve": null, "reject": null, "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "b8144cee-59f6-40d9-a849-dd0c639e4e31" + "_syncId": "e101f00d-2fb8-4f40-9e0e-4d24da5bb1e9" }, { "name": "Trigger Email Flow", @@ -597,6 +361,6 @@ "resolve": null, "reject": null, "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", - "_syncId": "e101f00d-2fb8-4f40-9e0e-4d24da5bb1e9" + "_syncId": "b8144cee-59f6-40d9-a849-dd0c639e4e31" } ] diff --git a/echo/directus/sync/collections/permissions.json b/echo/directus/sync/collections/permissions.json index f9319c1e..35f853b6 100644 --- a/echo/directus/sync/collections/permissions.json +++ b/echo/directus/sync/collections/permissions.json @@ -179,1757 +179,6 @@ "policy": "_sync_default_public_policy", "_syncId": "67f6430d-23ac-493c-8a71-ffb358aa89ef" }, - { - "collection": "aspect", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "view_id": { - "project_analysis_run_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "d3181b55-841d-4f18-a3f7-888d9fbcc98c" - }, - { - "collection": "aspect", - "action": "delete", - "permissions": { - "_and": [ - { - "view_id": { - "project_analysis_run_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "1d595231-c999-46aa-9288-05ce55d79c7a" - }, - { - "collection": "aspect", - "action": "read", - "permissions": { - "_and": [ - { - "view_id": { - "project_analysis_run_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "82c540b7-e4ec-4824-a11d-49461a021d9e" - }, - { - "collection": "aspect", - "action": "update", - "permissions": { - "_and": [ - { - "view_id": { - "project_analysis_run_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "502f358e-990e-4eef-a307-1296f8b88379" - }, - { - "collection": "conversation_chunk", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "0979c9a3-49f3-476a-b10b-337354ff4b41" - }, - { - "collection": "conversation_chunk", - "action": "delete", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "5d7714cc-7901-47a6-a0ec-3a57277b1b10" - }, - { - "collection": "conversation_chunk", - "action": "read", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "561257a4-cc68-44a1-b06c-d64d40b7993d" - }, - { - "collection": "conversation_chunk", - "action": "update", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "be557ac0-8d76-4cc9-9fbd-c505c277a95e" - }, - { - "collection": "conversation_project_tag", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "3306acba-b7f2-4952-bef5-5d2e91c300ed" - }, - { - "collection": "conversation_project_tag", - "action": "delete", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "f53c0f9d-9e07-479b-9d02-e265df8e5ac5" - }, - { - "collection": "conversation_project_tag", - "action": "read", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "68b70afc-b046-4a33-bea4-5c783dc4802e" - }, - { - "collection": "conversation_project_tag", - "action": "update", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "f616b182-4cf9-4d05-8ca6-630660333682" - }, - { - "collection": "conversation", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "98cab13a-0dd4-4c3e-b578-9f97547252b2" - }, - { - "collection": "conversation", - "action": "delete", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "dc8458da-0ebf-40f6-a80b-52904da1c652" - }, - { - "collection": "conversation", - "action": "read", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "8f80716a-9e99-474d-9087-291f5a348752" - }, - { - "collection": "conversation", - "action": "update", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "89da9677-652d-4aad-9647-94e321b7821c" - }, - { - "collection": "directus_dashboards", - "action": "create", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "219fde48-a38a-469f-a761-9226e76de708" - }, - { - "collection": "directus_dashboards", - "action": "delete", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "62051d7b-48e3-49ef-bf36-e6a5b713409a" - }, - { - "collection": "directus_dashboards", - "action": "read", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "7d476116-e036-4b5c-b880-3c667a4c8474" - }, - { - "collection": "directus_dashboards", - "action": "update", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "f0bd3a61-a2f5-41fe-876e-b7d024cc3a1f" - }, - { - "collection": "directus_files", - "action": "create", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "76ea5c57-976e-4853-a753-49326557bee2" - }, - { - "collection": "directus_files", - "action": "delete", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "80ddbbbf-ebda-403f-a6ea-eaf9cac6b557" - }, - { - "collection": "directus_files", - "action": "read", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "af656ee2-20c3-4b80-9856-cf5e9df3a2a2" - }, - { - "collection": "directus_files", - "action": "update", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "bf8c205d-5919-47c2-b973-e7498d4b44fe" - }, - { - "collection": "directus_flows", - "action": "read", - "permissions": { - "trigger": { - "_eq": "manual" - } - }, - "validation": null, - "presets": null, - "fields": [ - "id", - "status", - "name", - "icon", - "color", - "options", - "trigger" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "05c696e0-1e52-4296-9ddb-0b5fbf3727a3" - }, - { - "collection": "directus_folders", - "action": "create", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "ec40cf3f-e10f-427f-aa03-008bcebe4290" - }, - { - "collection": "directus_folders", - "action": "delete", - "permissions": {}, - "validation": null, - "presets": null, - "fields": null, - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "346d8d04-2507-41fb-9385-7ad7ffaaa38b" - }, - { - "collection": "directus_folders", - "action": "read", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "0c5d2a33-54bf-472b-b20d-4f13d3f4bacb" - }, - { - "collection": "directus_folders", - "action": "update", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "1f74523f-7137-4a08-9729-6c45e519befd" - }, - { - "collection": "directus_panels", - "action": "create", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "9bfa45f1-1b7c-4a06-bb93-82b51820643a" - }, - { - "collection": "directus_panels", - "action": "delete", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "1e5eaa1a-2aff-48f6-9d52-4ec158b40675" - }, - { - "collection": "directus_panels", - "action": "read", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "35d7e78a-3722-4630-9d66-e9f9190eaabc" - }, - { - "collection": "directus_panels", - "action": "update", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "12606583-0c3f-4c43-876d-824dbf5d3b84" - }, - { - "collection": "directus_roles", - "action": "read", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "2005c126-e1cb-4e9c-98ea-5631f35fd89b" - }, - { - "collection": "directus_shares", - "action": "create", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "e3c1c580-9339-45c1-9184-3462b9f09200" - }, - { - "collection": "directus_shares", - "action": "delete", - "permissions": { - "user_created": { - "_eq": "$CURRENT_USER" - } - }, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "aabea0d0-884e-44ce-a8be-e41f30795b48" - }, - { - "collection": "directus_shares", - "action": "read", - "permissions": { - "_or": [ - { - "role": { - "_eq": "$CURRENT_ROLE" - } - }, - { - "role": { - "_null": true - } - } - ] - }, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "13e5551b-c68d-436e-a187-e990e92670c1" - }, - { - "collection": "directus_shares", - "action": "update", - "permissions": { - "user_created": { - "_eq": "$CURRENT_USER" - } - }, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "a2adcd23-716d-4b33-8b98-17e78aafb529" - }, - { - "collection": "directus_users", - "action": "read", - "permissions": {}, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "a347e0fe-ff46-49e4-93a5-b46ed26025f6" - }, - { - "collection": "directus_users", - "action": "update", - "permissions": { - "id": { - "_eq": "$CURRENT_USER" - } - }, - "validation": null, - "presets": null, - "fields": [ - "first_name", - "last_name", - "email", - "password", - "location", - "title", - "description", - "avatar", - "language", - "appearance", - "theme_light", - "theme_dark", - "theme_light_overrides", - "theme_dark_overrides", - "tfa_secret" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "e44ca5a2-98f4-41bc-a341-a87e9b23f0ce" - }, - { - "collection": "insight", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "project_analysis_run_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "bb56a579-b647-4cf2-85ca-293433033ae1" - }, - { - "collection": "insight", - "action": "delete", - "permissions": { - "_and": [ - { - "project_analysis_run_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "ac36a5b8-f540-4acf-a138-a446c6bce0af" - }, - { - "collection": "insight", - "action": "read", - "permissions": { - "_and": [ - { - "project_analysis_run_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "8050d765-df53-4acb-a163-b99c420374f5" - }, - { - "collection": "insight", - "action": "update", - "permissions": { - "_and": [ - { - "project_analysis_run_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "5c792ad4-88a0-49d9-9a19-f23804c7ae19" - }, - { - "collection": "project_analysis_run", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "2c7d71fa-78ad-40a9-b389-098ced8638c2" - }, - { - "collection": "project_analysis_run", - "action": "delete", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "107e0b22-8bbb-42d1-9c68-d6e338808fe4" - }, - { - "collection": "project_analysis_run", - "action": "read", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "4d615874-ce65-4738-8c3d-50015cdf46af" - }, - { - "collection": "project_analysis_run", - "action": "update", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "8973d452-68c9-4514-9175-fcb62641c098" - }, - { - "collection": "project_chat_conversation", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "8816c364-a043-4c53-bd99-0d04e460eb15" - }, - { - "collection": "project_chat_conversation", - "action": "delete", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "dc1d70d5-33c7-41dd-996c-5e27063ed223" - }, - { - "collection": "project_chat_conversation", - "action": "read", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "f4ea4dfd-8669-40b7-a258-e7e6a8925138" - }, - { - "collection": "project_chat_conversation", - "action": "update", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "24196645-f2c6-42fa-af09-4c53cbc88113" - }, - { - "collection": "project_chat_message_conversation_1", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "d65343cd-2a96-409d-b00a-19b1d64f459a" - }, - { - "collection": "project_chat_message_conversation_1", - "action": "delete", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "03e833ac-a64b-4425-ad42-195bba948706" - }, - { - "collection": "project_chat_message_conversation_1", - "action": "read", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "3b5e18e8-0b92-4b0f-a01a-220a08b82756" - }, - { - "collection": "project_chat_message_conversation_1", - "action": "update", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "112bc5b8-9ca4-4030-a83d-230a77c726db" - }, - { - "collection": "project_chat_message_conversation", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "03e63e4f-c005-4812-99fd-142b2b70008a" - }, - { - "collection": "project_chat_message_conversation", - "action": "delete", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "83b44d27-ea1c-4985-bf44-3961b5a7fef8" - }, - { - "collection": "project_chat_message_conversation", - "action": "read", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "9b494d9c-5639-4d64-b8ba-8be34e44ce1e" - }, - { - "collection": "project_chat_message_conversation", - "action": "update", - "permissions": { - "_and": [ - { - "conversation_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "737d52cf-e611-4c1c-8aa4-4e5809258adb" - }, - { - "collection": "project_chat_message", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "project_chat_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "fff65e80-e2a6-4814-a70d-796a913f1220" - }, - { - "collection": "project_chat_message", - "action": "delete", - "permissions": { - "_and": [ - { - "project_chat_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "4bb00e32-2513-44f6-8ff9-34439e1b74ce" - }, - { - "collection": "project_chat_message", - "action": "read", - "permissions": { - "_and": [ - { - "project_chat_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "c9a89174-b2e5-49e6-9a29-da129bb033aa" - }, - { - "collection": "project_chat_message", - "action": "update", - "permissions": { - "_and": [ - { - "project_chat_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "e3b280d6-e7b9-4480-b08d-06b82067917d" - }, - { - "collection": "project_chat", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "921db554-02c0-4369-9e90-74e0615687ea" - }, - { - "collection": "project_chat", - "action": "delete", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "f732c1e3-7f7d-45ae-933a-123e6dfd2df4" - }, - { - "collection": "project_chat", - "action": "read", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "e57af380-ed66-4a9a-b493-da2f49b0cf29" - }, - { - "collection": "project_chat", - "action": "update", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "eccf7229-64cd-4b0d-a72d-0558a6a1a379" - }, - { - "collection": "project_report_metric", - "action": "read", - "permissions": { - "_and": [ - { - "project_report_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "259f27a4-09ac-465a-81c2-40fb8d247b8f" - }, - { - "collection": "project_report", - "action": "create", - "permissions": null, - "validation": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "bb04e87d-ae0b-4078-a5ee-0ae46988953f" - }, - { - "collection": "project_report", - "action": "delete", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "1f808cf9-cc20-4d17-b9b6-851600a74324" - }, - { - "collection": "project_report", - "action": "read", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "c7044f74-d5ba-4e97-86a6-39e64349695c" - }, - { - "collection": "project_report", - "action": "update", - "permissions": null, - "validation": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "9228b33b-4cbd-4b53-acab-05b6bc1228dd" - }, - { - "collection": "project_tag", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "d507896c-f16f-4454-9ff7-567f9b629779" - }, - { - "collection": "project_tag", - "action": "delete", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "221e4bf8-7c95-49b5-b310-63f8d6976a50" - }, - { - "collection": "project_tag", - "action": "read", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "ca02e222-6c31-464f-8f7b-7fc3da61deb1" - }, - { - "collection": "project_tag", - "action": "update", - "permissions": { - "_and": [ - { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "50637da6-0bf0-4aec-b69b-ce167b282fc1" - }, - { - "collection": "project", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "efc10442-8cd5-4f6a-a88a-af6c4e01c488" - }, - { - "collection": "project", - "action": "delete", - "permissions": { - "_and": [ - { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "58c6fca7-0c84-42ec-85bd-a5402dea3b2a" - }, - { - "collection": "project", - "action": "read", - "permissions": { - "_and": [ - { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "12681845-d362-493f-b77d-09bda73229e8" - }, - { - "collection": "project", - "action": "update", - "permissions": { - "_and": [ - { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "fe537eb3-cd3b-4ae3-bfc9-d1559d01861e" - }, - { - "collection": "view", - "action": "create", - "permissions": {}, - "validation": { - "_and": [ - { - "project_analysis_run_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "b22b2817-6730-45c4-9c01-57bebaaa68b4" - }, - { - "collection": "view", - "action": "delete", - "permissions": { - "_and": [ - { - "project_analysis_run_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "24a33775-2dc7-409d-8e50-f6f2fb8dc67b" - }, - { - "collection": "view", - "action": "read", - "permissions": { - "_and": [ - { - "project_analysis_run_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "4be377bf-acab-44af-8be2-9d10471492ef" - }, - { - "collection": "view", - "action": "update", - "permissions": { - "_and": [ - { - "project_analysis_run_id": { - "project_id": { - "directus_user_id": { - "_eq": "$CURRENT_USER" - } - } - } - } - ] - }, - "validation": {}, - "presets": null, - "fields": [ - "*" - ], - "policy": "2446660a-ab6c-4801-ad69-5711030cba83", - "_syncId": "a16457ec-f3ad-44e5-acb3-67a96026a479" - }, { "collection": "announcement_activity", "action": "create", @@ -1958,6 +207,24 @@ "policy": "37a60e48-dd00-4867-af07-1fb22ac89078", "_syncId": "3787eedd-e90b-4091-bb50-e85da768a0e4" }, + { + "collection": "announcement_activity", + "action": "delete", + "permissions": { + "_and": [ + { + "user_id": { + "_eq": "$CURRENT_USER" + } + } + ] + }, + "validation": null, + "presets": null, + "fields": null, + "policy": "37a60e48-dd00-4867-af07-1fb22ac89078", + "_syncId": "1f8d508c-8c8c-441b-9d8b-880ed22ac5e9" + }, { "collection": "announcement_activity", "action": "read", @@ -2033,7 +300,15 @@ { "collection": "announcement", "action": "read", - "permissions": null, + "permissions": { + "_and": [ + { + "expires_at": { + "_gte": "$NOW" + } + } + ] + }, "validation": null, "presets": null, "fields": [ @@ -2870,6 +1145,30 @@ "policy": "37a60e48-dd00-4867-af07-1fb22ac89078", "_syncId": "dcd6b757-bbbb-49ec-b5b5-be1cb252e45b" }, + { + "collection": "project_report_metric", + "action": "read", + "permissions": { + "_and": [ + { + "project_report_id": { + "project_id": { + "directus_user_id": { + "_eq": "$CURRENT_USER" + } + } + } + } + ] + }, + "validation": null, + "presets": null, + "fields": [ + "*" + ], + "policy": "37a60e48-dd00-4867-af07-1fb22ac89078", + "_syncId": "88ab1b92-f40f-49fd-aa8a-5acaeccfac54" + }, { "collection": "project_report_notification_participants", "action": "create", @@ -3203,6 +1502,30 @@ "policy": "37a60e48-dd00-4867-af07-1fb22ac89078", "_syncId": "b9e92ecb-d92d-44bb-b7c2-802453104893" }, + { + "collection": "view", + "action": "update", + "permissions": { + "_and": [ + { + "project_analysis_run_id": { + "project_id": { + "directus_user_id": { + "_eq": "$CURRENT_USER" + } + } + } + } + ] + }, + "validation": null, + "presets": null, + "fields": [ + "*" + ], + "policy": "37a60e48-dd00-4867-af07-1fb22ac89078", + "_syncId": "dc261b86-dfee-42fb-9b3c-2d0b6fd51c56" + }, { "collection": "conversation_reply", "action": "read", @@ -3377,173 +1700,5 @@ ], "policy": "abf8a154-5b1c-4a46-ac9c-7300570f4f17", "_syncId": "8a8e0d56-e394-47af-8473-9c77b6a0870f" - }, - { - "collection": "aspect", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "a3670e9b-f6cf-4660-b301-de003c296e46" - }, - { - "collection": "conversation_chunk", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "d4bff189-18c9-4137-9d14-aa5603b96bce" - }, - { - "collection": "conversation_project_tag", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "2201bfe1-8b3e-49f6-a556-a5f7ebe4c36a" - }, - { - "collection": "conversation", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "c62bb609-c413-4190-a328-9b5c771a768f" - }, - { - "collection": "insight", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "9bb5e28d-a028-4728-b12c-28868838d248" - }, - { - "collection": "project_analysis_run", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "2722b19d-122d-47db-8831-522d24fc3848" - }, - { - "collection": "project_chat_conversation", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "530d06f2-f941-443c-9cbb-8740f5881639" - }, - { - "collection": "project_chat_message_conversation_1", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "9376c26f-c076-4de9-a48f-b5776ea2ce94" - }, - { - "collection": "project_chat_message_conversation", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "aead95f6-ee77-40e1-a6cd-719fb81d47a5" - }, - { - "collection": "project_chat_message", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "b61bf3f0-d988-46e1-9d0f-c74e7050c81e" - }, - { - "collection": "project_chat", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "4c3540bf-c137-4bfa-8996-e352ebec6298" - }, - { - "collection": "project_tag", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "9ef2d9c9-0c64-4e12-b118-13540e00707d" - }, - { - "collection": "project", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "ba31bcc7-d4d1-4007-8e88-c80032a70d40" - }, - { - "collection": "view", - "action": "read", - "permissions": null, - "validation": null, - "presets": null, - "fields": [ - "*" - ], - "policy": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0", - "_syncId": "454b5fe7-aa48-4398-b85d-18c90c29576f" } ] diff --git a/echo/directus/sync/collections/policies.json b/echo/directus/sync/collections/policies.json index c7d03e4d..737f848c 100644 --- a/echo/directus/sync/collections/policies.json +++ b/echo/directus/sync/collections/policies.json @@ -23,45 +23,40 @@ "enforce_tfa": false, "admin_access": false, "app_access": false, - "roles": [ - { - "role": null, - "sort": 1 - } - ], + "roles": [], "_syncId": "_sync_default_public_policy" }, { - "name": "Application User", - "icon": "supervised_user_circle", - "description": "MASTER POLICY", + "name": "Basic User Policy", + "icon": "account_box", + "description": null, "ip_access": null, "enforce_tfa": false, "admin_access": false, "app_access": false, "roles": [ { - "role": "2446660a-ab6c-4801-ad69-5711030cba83", + "role": "feebe863-90b1-41d1-a7ef-9694ddee3844", "sort": 1 } ], - "_syncId": "2446660a-ab6c-4801-ad69-5711030cba83" + "_syncId": "37a60e48-dd00-4867-af07-1fb22ac89078" }, { - "name": "Basic User", - "icon": "sports_baseball", - "description": "THIS CONTAINS EVERYTHING - ASSIGN THIS TO A USER", + "name": "2FA", + "icon": "badge", + "description": null, "ip_access": null, - "enforce_tfa": false, + "enforce_tfa": true, "admin_access": false, "app_access": false, "roles": [ { - "role": "feebe863-90b1-41d1-a7ef-9694ddee3844", + "role": "502f00a4-fa44-4cdf-9650-1d15f3ea23dd", "sort": 1 } ], - "_syncId": "37a60e48-dd00-4867-af07-1fb22ac89078" + "_syncId": "60564979-8a3b-46d4-8bd2-a04a2a2dff9f" }, { "name": "$t:public_label", @@ -78,21 +73,5 @@ } ], "_syncId": "abf8a154-5b1c-4a46-ac9c-7300570f4f17" - }, - { - "name": "Read Only - everything", - "icon": "badge", - "description": null, - "ip_access": null, - "enforce_tfa": false, - "admin_access": false, - "app_access": true, - "roles": [ - { - "role": "d49f0a90-9ea8-429f-aa56-6f3e6ae0dd87", - "sort": 1 - } - ], - "_syncId": "d3f32e42-f4a0-4a1e-acd9-e1a010aa47e0" } ] diff --git a/echo/directus/sync/collections/roles.json b/echo/directus/sync/collections/roles.json index 4bc7d9f0..a404cdfe 100644 --- a/echo/directus/sync/collections/roles.json +++ b/echo/directus/sync/collections/roles.json @@ -7,11 +7,11 @@ "_syncId": "_sync_default_admin_role" }, { - "name": "Application User", + "name": "Basic User With 2FA", "icon": "supervised_user_circle", "description": null, - "parent": null, - "_syncId": "2446660a-ab6c-4801-ad69-5711030cba83" + "parent": "feebe863-90b1-41d1-a7ef-9694ddee3844", + "_syncId": "502f00a4-fa44-4cdf-9650-1d15f3ea23dd" }, { "name": "Read-Only", @@ -21,10 +21,10 @@ "_syncId": "d49f0a90-9ea8-429f-aa56-6f3e6ae0dd87" }, { - "name": "Project User", + "name": "Basic User", "icon": "supervised_user_circle", "description": null, - "parent": "2446660a-ab6c-4801-ad69-5711030cba83", + "parent": null, "_syncId": "feebe863-90b1-41d1-a7ef-9694ddee3844" } ] diff --git a/echo/directus/sync/collections/settings.json b/echo/directus/sync/collections/settings.json index 293f5bc7..82e6ba6e 100644 --- a/echo/directus/sync/collections/settings.json +++ b/echo/directus/sync/collections/settings.json @@ -1,6 +1,6 @@ [ { - "project_name": "Directus", + "project_name": "Dembrane", "project_color": "#6644FF", "public_note": null, "auth_login_attempts": 25, @@ -64,6 +64,17 @@ "public_registration_verify_email": true, "public_registration_role": "feebe863-90b1-41d1-a7ef-9694ddee3844", "public_registration_email_filter": null, - "_syncId": "b6e2bbfa-b958-4062-b100-186514c3e197" + "visual_editor_urls": null, + "mcp_enabled": false, + "mcp_allow_deletes": false, + "mcp_prompts_collection": null, + "mcp_system_prompt_enabled": true, + "mcp_system_prompt": null, + "project_owner": null, + "project_usage": null, + "org_name": null, + "product_updates": null, + "project_status": null, + "_syncId": "_sync_default_settings" } ] diff --git a/echo/directus/sync/snapshot/collections/directus_sync_id_map.json b/echo/directus/sync/snapshot/collections/directus_sync_id_map.json deleted file mode 100644 index b73d7da4..00000000 --- a/echo/directus/sync/snapshot/collections/directus_sync_id_map.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "collection": "directus_sync_id_map", - "meta": { - "accountability": "all", - "archive_app_filter": true, - "archive_field": null, - "archive_value": null, - "collapse": "open", - "collection": "directus_sync_id_map", - "color": null, - "display_template": null, - "group": null, - "hidden": true, - "icon": null, - "item_duplication_fields": null, - "note": null, - "preview_url": null, - "singleton": false, - "sort": 11, - "sort_field": null, - "translations": null, - "unarchive_value": null, - "versioning": false - }, - "schema": { - "name": "directus_sync_id_map" - } -} diff --git a/echo/directus/sync/snapshot/fields/announcement/activity.json b/echo/directus/sync/snapshot/fields/announcement/activity.json index 1566edf3..55bc9fda 100644 --- a/echo/directus/sync/snapshot/fields/announcement/activity.json +++ b/echo/directus/sync/snapshot/fields/announcement/activity.json @@ -17,6 +17,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 11, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/announcement/created_at.json b/echo/directus/sync/snapshot/fields/announcement/created_at.json index e454ea0a..48e02085 100644 --- a/echo/directus/sync/snapshot/fields/announcement/created_at.json +++ b/echo/directus/sync/snapshot/fields/announcement/created_at.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 4, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/announcement/expires_at.json b/echo/directus/sync/snapshot/fields/announcement/expires_at.json index c9f72bee..14fbeee4 100644 --- a/echo/directus/sync/snapshot/fields/announcement/expires_at.json +++ b/echo/directus/sync/snapshot/fields/announcement/expires_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/announcement/id.json b/echo/directus/sync/snapshot/fields/announcement/id.json index ec332836..2f3d9eb5 100644 --- a/echo/directus/sync/snapshot/fields/announcement/id.json +++ b/echo/directus/sync/snapshot/fields/announcement/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 1, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/announcement/level.json b/echo/directus/sync/snapshot/fields/announcement/level.json index 13c47c7f..eea2251d 100644 --- a/echo/directus/sync/snapshot/fields/announcement/level.json +++ b/echo/directus/sync/snapshot/fields/announcement/level.json @@ -45,6 +45,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/announcement/sort.json b/echo/directus/sync/snapshot/fields/announcement/sort.json index 2b7a99f7..03dae1a9 100644 --- a/echo/directus/sync/snapshot/fields/announcement/sort.json +++ b/echo/directus/sync/snapshot/fields/announcement/sort.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/announcement/translations.json b/echo/directus/sync/snapshot/fields/announcement/translations.json index 07423137..2eb29cda 100644 --- a/echo/directus/sync/snapshot/fields/announcement/translations.json +++ b/echo/directus/sync/snapshot/fields/announcement/translations.json @@ -19,6 +19,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 9, "special": [ "translations" diff --git a/echo/directus/sync/snapshot/fields/announcement/updated_at.json b/echo/directus/sync/snapshot/fields/announcement/updated_at.json index cb7f4a81..bcf9b92c 100644 --- a/echo/directus/sync/snapshot/fields/announcement/updated_at.json +++ b/echo/directus/sync/snapshot/fields/announcement/updated_at.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 6, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/announcement/user_created.json b/echo/directus/sync/snapshot/fields/announcement/user_created.json index 1724e8ca..7a9fd4ce 100644 --- a/echo/directus/sync/snapshot/fields/announcement/user_created.json +++ b/echo/directus/sync/snapshot/fields/announcement/user_created.json @@ -17,6 +17,7 @@ }, "readonly": true, "required": false, + "searchable": true, "sort": 3, "special": [ "user-created" diff --git a/echo/directus/sync/snapshot/fields/announcement/user_updated.json b/echo/directus/sync/snapshot/fields/announcement/user_updated.json index 41eb161d..847cf15c 100644 --- a/echo/directus/sync/snapshot/fields/announcement/user_updated.json +++ b/echo/directus/sync/snapshot/fields/announcement/user_updated.json @@ -17,6 +17,7 @@ }, "readonly": true, "required": false, + "searchable": true, "sort": 5, "special": [ "user-updated" diff --git a/echo/directus/sync/snapshot/fields/announcement_activity/announcement_activity.json b/echo/directus/sync/snapshot/fields/announcement_activity/announcement_activity.json index d3e876ef..e9959ade 100644 --- a/echo/directus/sync/snapshot/fields/announcement_activity/announcement_activity.json +++ b/echo/directus/sync/snapshot/fields/announcement_activity/announcement_activity.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 10, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/announcement_activity/created_at.json b/echo/directus/sync/snapshot/fields/announcement_activity/created_at.json index 905e82a5..a47820e1 100644 --- a/echo/directus/sync/snapshot/fields/announcement_activity/created_at.json +++ b/echo/directus/sync/snapshot/fields/announcement_activity/created_at.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 4, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/announcement_activity/id.json b/echo/directus/sync/snapshot/fields/announcement_activity/id.json index a2b99aea..3f3836dc 100644 --- a/echo/directus/sync/snapshot/fields/announcement_activity/id.json +++ b/echo/directus/sync/snapshot/fields/announcement_activity/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 1, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/announcement_activity/read.json b/echo/directus/sync/snapshot/fields/announcement_activity/read.json index 9a686e55..ea0547af 100644 --- a/echo/directus/sync/snapshot/fields/announcement_activity/read.json +++ b/echo/directus/sync/snapshot/fields/announcement_activity/read.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 9, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/announcement_activity/sort.json b/echo/directus/sync/snapshot/fields/announcement_activity/sort.json index c7d433d8..c6bc4f7c 100644 --- a/echo/directus/sync/snapshot/fields/announcement_activity/sort.json +++ b/echo/directus/sync/snapshot/fields/announcement_activity/sort.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/announcement_activity/updated_at.json b/echo/directus/sync/snapshot/fields/announcement_activity/updated_at.json index 25655ad3..de0877a9 100644 --- a/echo/directus/sync/snapshot/fields/announcement_activity/updated_at.json +++ b/echo/directus/sync/snapshot/fields/announcement_activity/updated_at.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 6, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/announcement_activity/user_created.json b/echo/directus/sync/snapshot/fields/announcement_activity/user_created.json index d82fc3c9..f284396a 100644 --- a/echo/directus/sync/snapshot/fields/announcement_activity/user_created.json +++ b/echo/directus/sync/snapshot/fields/announcement_activity/user_created.json @@ -17,6 +17,7 @@ }, "readonly": true, "required": false, + "searchable": true, "sort": 3, "special": [ "user-created" diff --git a/echo/directus/sync/snapshot/fields/announcement_activity/user_id.json b/echo/directus/sync/snapshot/fields/announcement_activity/user_id.json index bfc5aef8..ae585993 100644 --- a/echo/directus/sync/snapshot/fields/announcement_activity/user_id.json +++ b/echo/directus/sync/snapshot/fields/announcement_activity/user_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": [ "user-created" diff --git a/echo/directus/sync/snapshot/fields/announcement_activity/user_updated.json b/echo/directus/sync/snapshot/fields/announcement_activity/user_updated.json index ac748cc2..7616720f 100644 --- a/echo/directus/sync/snapshot/fields/announcement_activity/user_updated.json +++ b/echo/directus/sync/snapshot/fields/announcement_activity/user_updated.json @@ -17,6 +17,7 @@ }, "readonly": true, "required": false, + "searchable": true, "sort": 5, "special": [ "user-updated" diff --git a/echo/directus/sync/snapshot/fields/announcement_translations/announcement_id.json b/echo/directus/sync/snapshot/fields/announcement_translations/announcement_id.json index a11f6e62..5ce4438c 100644 --- a/echo/directus/sync/snapshot/fields/announcement_translations/announcement_id.json +++ b/echo/directus/sync/snapshot/fields/announcement_translations/announcement_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/announcement_translations/id.json b/echo/directus/sync/snapshot/fields/announcement_translations/id.json index 20550a02..df7f29ed 100644 --- a/echo/directus/sync/snapshot/fields/announcement_translations/id.json +++ b/echo/directus/sync/snapshot/fields/announcement_translations/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/announcement_translations/languages_code.json b/echo/directus/sync/snapshot/fields/announcement_translations/languages_code.json index e88997db..05c6bec4 100644 --- a/echo/directus/sync/snapshot/fields/announcement_translations/languages_code.json +++ b/echo/directus/sync/snapshot/fields/announcement_translations/languages_code.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/announcement_translations/message.json b/echo/directus/sync/snapshot/fields/announcement_translations/message.json index 17f14ced..ea107f0c 100644 --- a/echo/directus/sync/snapshot/fields/announcement_translations/message.json +++ b/echo/directus/sync/snapshot/fields/announcement_translations/message.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/announcement_translations/title.json b/echo/directus/sync/snapshot/fields/announcement_translations/title.json index aaf375aa..4980454c 100644 --- a/echo/directus/sync/snapshot/fields/announcement_translations/title.json +++ b/echo/directus/sync/snapshot/fields/announcement_translations/title.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/aspect/aspect_segment.json b/echo/directus/sync/snapshot/fields/aspect/aspect_segment.json index fb693576..3ecd7ce8 100644 --- a/echo/directus/sync/snapshot/fields/aspect/aspect_segment.json +++ b/echo/directus/sync/snapshot/fields/aspect/aspect_segment.json @@ -19,6 +19,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 10, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/aspect/created_at.json b/echo/directus/sync/snapshot/fields/aspect/created_at.json index 231856f0..b3925789 100644 --- a/echo/directus/sync/snapshot/fields/aspect/created_at.json +++ b/echo/directus/sync/snapshot/fields/aspect/created_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/aspect/description.json b/echo/directus/sync/snapshot/fields/aspect/description.json index 192b2956..c4bec5f9 100644 --- a/echo/directus/sync/snapshot/fields/aspect/description.json +++ b/echo/directus/sync/snapshot/fields/aspect/description.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 6, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/aspect/id.json b/echo/directus/sync/snapshot/fields/aspect/id.json index f5599d06..2e6f818b 100644 --- a/echo/directus/sync/snapshot/fields/aspect/id.json +++ b/echo/directus/sync/snapshot/fields/aspect/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/aspect/image_url.json b/echo/directus/sync/snapshot/fields/aspect/image_url.json index 6e0b22da..e8c06e87 100644 --- a/echo/directus/sync/snapshot/fields/aspect/image_url.json +++ b/echo/directus/sync/snapshot/fields/aspect/image_url.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 9, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/aspect/long_summary.json b/echo/directus/sync/snapshot/fields/aspect/long_summary.json index cd033608..7b75b724 100644 --- a/echo/directus/sync/snapshot/fields/aspect/long_summary.json +++ b/echo/directus/sync/snapshot/fields/aspect/long_summary.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/aspect/name.json b/echo/directus/sync/snapshot/fields/aspect/name.json index f8cf14d2..0b50a7e0 100644 --- a/echo/directus/sync/snapshot/fields/aspect/name.json +++ b/echo/directus/sync/snapshot/fields/aspect/name.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/aspect/short_summary.json b/echo/directus/sync/snapshot/fields/aspect/short_summary.json index f119b3b3..3a3d67e6 100644 --- a/echo/directus/sync/snapshot/fields/aspect/short_summary.json +++ b/echo/directus/sync/snapshot/fields/aspect/short_summary.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/aspect/updated_at.json b/echo/directus/sync/snapshot/fields/aspect/updated_at.json index a59bb6c8..c0d727ce 100644 --- a/echo/directus/sync/snapshot/fields/aspect/updated_at.json +++ b/echo/directus/sync/snapshot/fields/aspect/updated_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/aspect/view_id.json b/echo/directus/sync/snapshot/fields/aspect/view_id.json index 4730cf24..999d5eaa 100644 --- a/echo/directus/sync/snapshot/fields/aspect/view_id.json +++ b/echo/directus/sync/snapshot/fields/aspect/view_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/aspect_segment/aspect.json b/echo/directus/sync/snapshot/fields/aspect_segment/aspect.json index 44e2bafc..844ceeb7 100644 --- a/echo/directus/sync/snapshot/fields/aspect_segment/aspect.json +++ b/echo/directus/sync/snapshot/fields/aspect_segment/aspect.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 6, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/aspect_segment/description.json b/echo/directus/sync/snapshot/fields/aspect_segment/description.json index b22cca23..e340c5f2 100644 --- a/echo/directus/sync/snapshot/fields/aspect_segment/description.json +++ b/echo/directus/sync/snapshot/fields/aspect_segment/description.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/aspect_segment/id.json b/echo/directus/sync/snapshot/fields/aspect_segment/id.json index fad53d4b..851649fd 100644 --- a/echo/directus/sync/snapshot/fields/aspect_segment/id.json +++ b/echo/directus/sync/snapshot/fields/aspect_segment/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 1, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/aspect_segment/relevant_index.json b/echo/directus/sync/snapshot/fields/aspect_segment/relevant_index.json index c44b3dfd..cd6d7796 100644 --- a/echo/directus/sync/snapshot/fields/aspect_segment/relevant_index.json +++ b/echo/directus/sync/snapshot/fields/aspect_segment/relevant_index.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/aspect_segment/segment.json b/echo/directus/sync/snapshot/fields/aspect_segment/segment.json index ac9cfc23..a516d5f2 100644 --- a/echo/directus/sync/snapshot/fields/aspect_segment/segment.json +++ b/echo/directus/sync/snapshot/fields/aspect_segment/segment.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/aspect_segment/verbatim_transcript.json b/echo/directus/sync/snapshot/fields/aspect_segment/verbatim_transcript.json index 19ab3208..90f07de9 100644 --- a/echo/directus/sync/snapshot/fields/aspect_segment/verbatim_transcript.json +++ b/echo/directus/sync/snapshot/fields/aspect_segment/verbatim_transcript.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation/chunks.json b/echo/directus/sync/snapshot/fields/conversation/chunks.json index 69670b8a..e0b45493 100644 --- a/echo/directus/sync/snapshot/fields/conversation/chunks.json +++ b/echo/directus/sync/snapshot/fields/conversation/chunks.json @@ -23,6 +23,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 12, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/conversation/conversation_segments.json b/echo/directus/sync/snapshot/fields/conversation/conversation_segments.json index 383cc449..695a9411 100644 --- a/echo/directus/sync/snapshot/fields/conversation/conversation_segments.json +++ b/echo/directus/sync/snapshot/fields/conversation/conversation_segments.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 16, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/conversation/created_at.json b/echo/directus/sync/snapshot/fields/conversation/created_at.json index ed5b62d5..ee8d1a54 100644 --- a/echo/directus/sync/snapshot/fields/conversation/created_at.json +++ b/echo/directus/sync/snapshot/fields/conversation/created_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/conversation/duration.json b/echo/directus/sync/snapshot/fields/conversation/duration.json index 0f9af9ea..fc9d53bf 100644 --- a/echo/directus/sync/snapshot/fields/conversation/duration.json +++ b/echo/directus/sync/snapshot/fields/conversation/duration.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 17, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation/id.json b/echo/directus/sync/snapshot/fields/conversation/id.json index a2a3e9ff..b0390b5d 100644 --- a/echo/directus/sync/snapshot/fields/conversation/id.json +++ b/echo/directus/sync/snapshot/fields/conversation/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/conversation/is_all_chunks_transcribed.json b/echo/directus/sync/snapshot/fields/conversation/is_all_chunks_transcribed.json index 5ed325e6..d0584b36 100644 --- a/echo/directus/sync/snapshot/fields/conversation/is_all_chunks_transcribed.json +++ b/echo/directus/sync/snapshot/fields/conversation/is_all_chunks_transcribed.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 22, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/conversation/is_audio_processing_finished.json b/echo/directus/sync/snapshot/fields/conversation/is_audio_processing_finished.json index 9d433773..2f7a3bef 100644 --- a/echo/directus/sync/snapshot/fields/conversation/is_audio_processing_finished.json +++ b/echo/directus/sync/snapshot/fields/conversation/is_audio_processing_finished.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 21, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/conversation/is_finished.json b/echo/directus/sync/snapshot/fields/conversation/is_finished.json index 98a35542..7e6352bb 100644 --- a/echo/directus/sync/snapshot/fields/conversation/is_finished.json +++ b/echo/directus/sync/snapshot/fields/conversation/is_finished.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 20, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/conversation/linked_conversations.json b/echo/directus/sync/snapshot/fields/conversation/linked_conversations.json index 6836725f..38198ad2 100644 --- a/echo/directus/sync/snapshot/fields/conversation/linked_conversations.json +++ b/echo/directus/sync/snapshot/fields/conversation/linked_conversations.json @@ -20,6 +20,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 23, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/conversation/linking_conversations.json b/echo/directus/sync/snapshot/fields/conversation/linking_conversations.json index fc69aaf3..6449e4d3 100644 --- a/echo/directus/sync/snapshot/fields/conversation/linking_conversations.json +++ b/echo/directus/sync/snapshot/fields/conversation/linking_conversations.json @@ -19,6 +19,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 24, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/conversation/merged_audio_path.json b/echo/directus/sync/snapshot/fields/conversation/merged_audio_path.json index 2e090086..b29a3865 100644 --- a/echo/directus/sync/snapshot/fields/conversation/merged_audio_path.json +++ b/echo/directus/sync/snapshot/fields/conversation/merged_audio_path.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 19, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation/merged_transcript.json b/echo/directus/sync/snapshot/fields/conversation/merged_transcript.json index 940070c5..3657c7b2 100644 --- a/echo/directus/sync/snapshot/fields/conversation/merged_transcript.json +++ b/echo/directus/sync/snapshot/fields/conversation/merged_transcript.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 18, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation/participant_email.json b/echo/directus/sync/snapshot/fields/conversation/participant_email.json index 8d313ce0..51f29ebe 100644 --- a/echo/directus/sync/snapshot/fields/conversation/participant_email.json +++ b/echo/directus/sync/snapshot/fields/conversation/participant_email.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation/participant_name.json b/echo/directus/sync/snapshot/fields/conversation/participant_name.json index e7dd10ff..23f25715 100644 --- a/echo/directus/sync/snapshot/fields/conversation/participant_name.json +++ b/echo/directus/sync/snapshot/fields/conversation/participant_name.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 6, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation/participant_user_agent.json b/echo/directus/sync/snapshot/fields/conversation/participant_user_agent.json index 2183c2ac..dbe094af 100644 --- a/echo/directus/sync/snapshot/fields/conversation/participant_user_agent.json +++ b/echo/directus/sync/snapshot/fields/conversation/participant_user_agent.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation/processing_status.json b/echo/directus/sync/snapshot/fields/conversation/processing_status.json index 600fbe85..473bb5c7 100644 --- a/echo/directus/sync/snapshot/fields/conversation/processing_status.json +++ b/echo/directus/sync/snapshot/fields/conversation/processing_status.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/conversation/project_chat_messages.json b/echo/directus/sync/snapshot/fields/conversation/project_chat_messages.json index fb8ca754..c9e5837c 100644 --- a/echo/directus/sync/snapshot/fields/conversation/project_chat_messages.json +++ b/echo/directus/sync/snapshot/fields/conversation/project_chat_messages.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 14, "special": [ "m2m" diff --git a/echo/directus/sync/snapshot/fields/conversation/project_chats.json b/echo/directus/sync/snapshot/fields/conversation/project_chats.json index 0bf97d33..cd1d5847 100644 --- a/echo/directus/sync/snapshot/fields/conversation/project_chats.json +++ b/echo/directus/sync/snapshot/fields/conversation/project_chats.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 13, "special": [ "m2m" diff --git a/echo/directus/sync/snapshot/fields/conversation/project_id.json b/echo/directus/sync/snapshot/fields/conversation/project_id.json index ff6a1e04..14bd16d1 100644 --- a/echo/directus/sync/snapshot/fields/conversation/project_id.json +++ b/echo/directus/sync/snapshot/fields/conversation/project_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation/replies.json b/echo/directus/sync/snapshot/fields/conversation/replies.json index 06deabf8..cf040c13 100644 --- a/echo/directus/sync/snapshot/fields/conversation/replies.json +++ b/echo/directus/sync/snapshot/fields/conversation/replies.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 15, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/conversation/source.json b/echo/directus/sync/snapshot/fields/conversation/source.json index cdb0a2d0..5aacc6f5 100644 --- a/echo/directus/sync/snapshot/fields/conversation/source.json +++ b/echo/directus/sync/snapshot/fields/conversation/source.json @@ -28,6 +28,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 11, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation/summary.json b/echo/directus/sync/snapshot/fields/conversation/summary.json index d8dbea1c..c54e0805 100644 --- a/echo/directus/sync/snapshot/fields/conversation/summary.json +++ b/echo/directus/sync/snapshot/fields/conversation/summary.json @@ -30,6 +30,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 10, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation/tags.json b/echo/directus/sync/snapshot/fields/conversation/tags.json index 4c95a868..ba140876 100644 --- a/echo/directus/sync/snapshot/fields/conversation/tags.json +++ b/echo/directus/sync/snapshot/fields/conversation/tags.json @@ -19,6 +19,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 9, "special": [ "m2m" diff --git a/echo/directus/sync/snapshot/fields/conversation/updated_at.json b/echo/directus/sync/snapshot/fields/conversation/updated_at.json index 4caac63d..a37408da 100644 --- a/echo/directus/sync/snapshot/fields/conversation/updated_at.json +++ b/echo/directus/sync/snapshot/fields/conversation/updated_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/conversation_id.json b/echo/directus/sync/snapshot/fields/conversation_chunk/conversation_id.json index 4b0e1a53..845b411f 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/conversation_id.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/conversation_id.json @@ -18,6 +18,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 6, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/conversation_segments.json b/echo/directus/sync/snapshot/fields/conversation_chunk/conversation_segments.json index 24db509b..4bfe1297 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/conversation_segments.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/conversation_segments.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 11, "special": [ "m2m" diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/created_at.json b/echo/directus/sync/snapshot/fields/conversation_chunk/created_at.json index b91cebd9..dad00a54 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/created_at.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/created_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/cross_talk_instances.json b/echo/directus/sync/snapshot/fields/conversation_chunk/cross_talk_instances.json index a6027926..9d3f5165 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/cross_talk_instances.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/cross_talk_instances.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 15, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/desired_language.json b/echo/directus/sync/snapshot/fields/conversation_chunk/desired_language.json index 764e638b..4c979634 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/desired_language.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/desired_language.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 20, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/detected_language.json b/echo/directus/sync/snapshot/fields/conversation_chunk/detected_language.json index 0e1dab51..048f8af5 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/detected_language.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/detected_language.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 21, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/detected_language_confidence.json b/echo/directus/sync/snapshot/fields/conversation_chunk/detected_language_confidence.json index dbf48c48..94ec5939 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/detected_language_confidence.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/detected_language_confidence.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 22, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/diarization.json b/echo/directus/sync/snapshot/fields/conversation_chunk/diarization.json index 46141b0f..e8b7cb17 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/diarization.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/diarization.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 17, "special": [ "cast-json" diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/error.json b/echo/directus/sync/snapshot/fields/conversation_chunk/error.json index b978c94c..6f151012 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/error.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/error.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/hallucination_reason.json b/echo/directus/sync/snapshot/fields/conversation_chunk/hallucination_reason.json index 4392186f..617a5740 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/hallucination_reason.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/hallucination_reason.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 18, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/hallucination_score.json b/echo/directus/sync/snapshot/fields/conversation_chunk/hallucination_score.json index 2265ebd5..1bf55205 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/hallucination_score.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/hallucination_score.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 19, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/id.json b/echo/directus/sync/snapshot/fields/conversation_chunk/id.json index 90b7abd8..9ee3be9c 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/id.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/noise_ratio.json b/echo/directus/sync/snapshot/fields/conversation_chunk/noise_ratio.json index 2184931f..5c294581 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/noise_ratio.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/noise_ratio.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 14, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/path.json b/echo/directus/sync/snapshot/fields/conversation_chunk/path.json index 2f76a955..10f1ded2 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/path.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/path.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/processing_status.json b/echo/directus/sync/snapshot/fields/conversation_chunk/processing_status.json index 76a60cdf..f3456584 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/processing_status.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/processing_status.json @@ -20,6 +20,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/raw_transcript.json b/echo/directus/sync/snapshot/fields/conversation_chunk/raw_transcript.json index e3e50053..4dd39cc5 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/raw_transcript.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/raw_transcript.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 23, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/runpod_job_status_link.json b/echo/directus/sync/snapshot/fields/conversation_chunk/runpod_job_status_link.json index 71525afa..6f308a7b 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/runpod_job_status_link.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/runpod_job_status_link.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 13, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/runpod_request_count.json b/echo/directus/sync/snapshot/fields/conversation_chunk/runpod_request_count.json index 4892a215..e6188ff6 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/runpod_request_count.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/runpod_request_count.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 12, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/silence_ratio.json b/echo/directus/sync/snapshot/fields/conversation_chunk/silence_ratio.json index d117a0cc..f65f5ebb 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/silence_ratio.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/silence_ratio.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 16, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/source.json b/echo/directus/sync/snapshot/fields/conversation_chunk/source.json index c0f0ff71..30f5d3eb 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/source.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/source.json @@ -34,6 +34,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/timestamp.json b/echo/directus/sync/snapshot/fields/conversation_chunk/timestamp.json index 223fe490..4ebd7fcc 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/timestamp.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/timestamp.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 9, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/transcript.json b/echo/directus/sync/snapshot/fields/conversation_chunk/transcript.json index e509687b..1578f225 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/transcript.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/transcript.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 10, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/translation_error.json b/echo/directus/sync/snapshot/fields/conversation_chunk/translation_error.json index ca8e3d69..851b2251 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/translation_error.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/translation_error.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 24, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_chunk/updated_at.json b/echo/directus/sync/snapshot/fields/conversation_chunk/updated_at.json index 3a376674..e1dcf1cd 100644 --- a/echo/directus/sync/snapshot/fields/conversation_chunk/updated_at.json +++ b/echo/directus/sync/snapshot/fields/conversation_chunk/updated_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/conversation_link/date_created.json b/echo/directus/sync/snapshot/fields/conversation_link/date_created.json index da729a61..f7096de8 100644 --- a/echo/directus/sync/snapshot/fields/conversation_link/date_created.json +++ b/echo/directus/sync/snapshot/fields/conversation_link/date_created.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 3, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/conversation_link/date_updated.json b/echo/directus/sync/snapshot/fields/conversation_link/date_updated.json index 3f256088..0ae350d2 100644 --- a/echo/directus/sync/snapshot/fields/conversation_link/date_updated.json +++ b/echo/directus/sync/snapshot/fields/conversation_link/date_updated.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 4, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/conversation_link/id.json b/echo/directus/sync/snapshot/fields/conversation_link/id.json index f26d83a9..1e8d8f5f 100644 --- a/echo/directus/sync/snapshot/fields/conversation_link/id.json +++ b/echo/directus/sync/snapshot/fields/conversation_link/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 1, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_link/link_type.json b/echo/directus/sync/snapshot/fields/conversation_link/link_type.json index 5766cd87..ef4c78dd 100644 --- a/echo/directus/sync/snapshot/fields/conversation_link/link_type.json +++ b/echo/directus/sync/snapshot/fields/conversation_link/link_type.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_link/source_conversation_id.json b/echo/directus/sync/snapshot/fields/conversation_link/source_conversation_id.json index 1a05eec5..afccda0c 100644 --- a/echo/directus/sync/snapshot/fields/conversation_link/source_conversation_id.json +++ b/echo/directus/sync/snapshot/fields/conversation_link/source_conversation_id.json @@ -19,6 +19,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/conversation_link/target_conversation_id.json b/echo/directus/sync/snapshot/fields/conversation_link/target_conversation_id.json index 5e5364f1..779b912f 100644 --- a/echo/directus/sync/snapshot/fields/conversation_link/target_conversation_id.json +++ b/echo/directus/sync/snapshot/fields/conversation_link/target_conversation_id.json @@ -19,6 +19,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 6, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/conversation_project_tag/conversation_id.json b/echo/directus/sync/snapshot/fields/conversation_project_tag/conversation_id.json index 974b4db0..acba5319 100644 --- a/echo/directus/sync/snapshot/fields/conversation_project_tag/conversation_id.json +++ b/echo/directus/sync/snapshot/fields/conversation_project_tag/conversation_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_project_tag/id.json b/echo/directus/sync/snapshot/fields/conversation_project_tag/id.json index 6cf0c889..405404e4 100644 --- a/echo/directus/sync/snapshot/fields/conversation_project_tag/id.json +++ b/echo/directus/sync/snapshot/fields/conversation_project_tag/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_project_tag/project_tag_id.json b/echo/directus/sync/snapshot/fields/conversation_project_tag/project_tag_id.json index c9f60505..b7dada03 100644 --- a/echo/directus/sync/snapshot/fields/conversation_project_tag/project_tag_id.json +++ b/echo/directus/sync/snapshot/fields/conversation_project_tag/project_tag_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_reply/content_text.json b/echo/directus/sync/snapshot/fields/conversation_reply/content_text.json index 26cfbc9b..3976dfdd 100644 --- a/echo/directus/sync/snapshot/fields/conversation_reply/content_text.json +++ b/echo/directus/sync/snapshot/fields/conversation_reply/content_text.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_reply/conversation_id.json b/echo/directus/sync/snapshot/fields/conversation_reply/conversation_id.json index 5d4af35d..ae83abb1 100644 --- a/echo/directus/sync/snapshot/fields/conversation_reply/conversation_id.json +++ b/echo/directus/sync/snapshot/fields/conversation_reply/conversation_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_reply/date_created.json b/echo/directus/sync/snapshot/fields/conversation_reply/date_created.json index dc4ffe25..8b5ea21c 100644 --- a/echo/directus/sync/snapshot/fields/conversation_reply/date_created.json +++ b/echo/directus/sync/snapshot/fields/conversation_reply/date_created.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/conversation_reply/id.json b/echo/directus/sync/snapshot/fields/conversation_reply/id.json index 21b8565c..3f9ad90f 100644 --- a/echo/directus/sync/snapshot/fields/conversation_reply/id.json +++ b/echo/directus/sync/snapshot/fields/conversation_reply/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 1, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/conversation_reply/reply.json b/echo/directus/sync/snapshot/fields/conversation_reply/reply.json index 924de587..6bef3339 100644 --- a/echo/directus/sync/snapshot/fields/conversation_reply/reply.json +++ b/echo/directus/sync/snapshot/fields/conversation_reply/reply.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_reply/sort.json b/echo/directus/sync/snapshot/fields/conversation_reply/sort.json index c50b9107..06c54157 100644 --- a/echo/directus/sync/snapshot/fields/conversation_reply/sort.json +++ b/echo/directus/sync/snapshot/fields/conversation_reply/sort.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_reply/type.json b/echo/directus/sync/snapshot/fields/conversation_reply/type.json index 562eaea8..b55f2545 100644 --- a/echo/directus/sync/snapshot/fields/conversation_reply/type.json +++ b/echo/directus/sync/snapshot/fields/conversation_reply/type.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_segment/chunks.json b/echo/directus/sync/snapshot/fields/conversation_segment/chunks.json index 7355fb3f..39f3637f 100644 --- a/echo/directus/sync/snapshot/fields/conversation_segment/chunks.json +++ b/echo/directus/sync/snapshot/fields/conversation_segment/chunks.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 11, "special": [ "m2m" diff --git a/echo/directus/sync/snapshot/fields/conversation_segment/config_id.json b/echo/directus/sync/snapshot/fields/conversation_segment/config_id.json index 700f02e2..36006288 100644 --- a/echo/directus/sync/snapshot/fields/conversation_segment/config_id.json +++ b/echo/directus/sync/snapshot/fields/conversation_segment/config_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_segment/contextual_transcript.json b/echo/directus/sync/snapshot/fields/conversation_segment/contextual_transcript.json index 2458cc96..58b999b6 100644 --- a/echo/directus/sync/snapshot/fields/conversation_segment/contextual_transcript.json +++ b/echo/directus/sync/snapshot/fields/conversation_segment/contextual_transcript.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_segment/conversation_id.json b/echo/directus/sync/snapshot/fields/conversation_segment/conversation_id.json index 16912c44..889dd089 100644 --- a/echo/directus/sync/snapshot/fields/conversation_segment/conversation_id.json +++ b/echo/directus/sync/snapshot/fields/conversation_segment/conversation_id.json @@ -19,6 +19,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 12, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/conversation_segment/counter.json b/echo/directus/sync/snapshot/fields/conversation_segment/counter.json index 0a6bc902..ff60e483 100644 --- a/echo/directus/sync/snapshot/fields/conversation_segment/counter.json +++ b/echo/directus/sync/snapshot/fields/conversation_segment/counter.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_segment/id.json b/echo/directus/sync/snapshot/fields/conversation_segment/id.json index b51a44fb..04bb7fe3 100644 --- a/echo/directus/sync/snapshot/fields/conversation_segment/id.json +++ b/echo/directus/sync/snapshot/fields/conversation_segment/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 1, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_segment/lightrag_flag.json b/echo/directus/sync/snapshot/fields/conversation_segment/lightrag_flag.json index 4d5cf9e8..f7c29709 100644 --- a/echo/directus/sync/snapshot/fields/conversation_segment/lightrag_flag.json +++ b/echo/directus/sync/snapshot/fields/conversation_segment/lightrag_flag.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 10, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/conversation_segment/path.json b/echo/directus/sync/snapshot/fields/conversation_segment/path.json index 2344f65d..b7e5b644 100644 --- a/echo/directus/sync/snapshot/fields/conversation_segment/path.json +++ b/echo/directus/sync/snapshot/fields/conversation_segment/path.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 9, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_segment/transcript.json b/echo/directus/sync/snapshot/fields/conversation_segment/transcript.json index d8b4a5d8..c11c8cdd 100644 --- a/echo/directus/sync/snapshot/fields/conversation_segment/transcript.json +++ b/echo/directus/sync/snapshot/fields/conversation_segment/transcript.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_segment_conversation_chunk/conversation_chunk_id.json b/echo/directus/sync/snapshot/fields/conversation_segment_conversation_chunk/conversation_chunk_id.json index a2f30b04..84a3b53d 100644 --- a/echo/directus/sync/snapshot/fields/conversation_segment_conversation_chunk/conversation_chunk_id.json +++ b/echo/directus/sync/snapshot/fields/conversation_segment_conversation_chunk/conversation_chunk_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_segment_conversation_chunk/conversation_segment_id.json b/echo/directus/sync/snapshot/fields/conversation_segment_conversation_chunk/conversation_segment_id.json index 489b15c8..5fc803e7 100644 --- a/echo/directus/sync/snapshot/fields/conversation_segment_conversation_chunk/conversation_segment_id.json +++ b/echo/directus/sync/snapshot/fields/conversation_segment_conversation_chunk/conversation_segment_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/conversation_segment_conversation_chunk/id.json b/echo/directus/sync/snapshot/fields/conversation_segment_conversation_chunk/id.json index 7b05bf8d..d94e7ee2 100644 --- a/echo/directus/sync/snapshot/fields/conversation_segment_conversation_chunk/id.json +++ b/echo/directus/sync/snapshot/fields/conversation_segment_conversation_chunk/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/directus_sync_id_map/created_at.json b/echo/directus/sync/snapshot/fields/directus_sync_id_map/created_at.json deleted file mode 100644 index 09d4108f..00000000 --- a/echo/directus/sync/snapshot/fields/directus_sync_id_map/created_at.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "collection": "directus_sync_id_map", - "field": "created_at", - "type": "timestamp", - "meta": { - "collection": "directus_sync_id_map", - "conditions": null, - "display": null, - "display_options": null, - "field": "created_at", - "group": null, - "hidden": false, - "interface": null, - "note": null, - "options": null, - "readonly": false, - "required": false, - "sort": null, - "special": null, - "translations": null, - "validation": null, - "validation_message": null, - "width": "full" - }, - "schema": { - "name": "created_at", - "table": "directus_sync_id_map", - "data_type": "timestamp with time zone", - "default_value": "CURRENT_TIMESTAMP", - "max_length": null, - "numeric_precision": null, - "numeric_scale": null, - "is_nullable": true, - "is_unique": false, - "is_indexed": true, - "is_primary_key": false, - "is_generated": false, - "generation_expression": null, - "has_auto_increment": false, - "foreign_key_table": null, - "foreign_key_column": null - } -} diff --git a/echo/directus/sync/snapshot/fields/directus_sync_id_map/id.json b/echo/directus/sync/snapshot/fields/directus_sync_id_map/id.json deleted file mode 100644 index 9d142ac9..00000000 --- a/echo/directus/sync/snapshot/fields/directus_sync_id_map/id.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "collection": "directus_sync_id_map", - "field": "id", - "type": "integer", - "meta": { - "collection": "directus_sync_id_map", - "conditions": null, - "display": null, - "display_options": null, - "field": "id", - "group": null, - "hidden": false, - "interface": null, - "note": null, - "options": null, - "readonly": false, - "required": false, - "sort": null, - "special": null, - "translations": null, - "validation": null, - "validation_message": null, - "width": "full" - }, - "schema": { - "name": "id", - "table": "directus_sync_id_map", - "data_type": "integer", - "default_value": "nextval('directus_sync_id_map_id_seq'::regclass)", - "max_length": null, - "numeric_precision": 32, - "numeric_scale": 0, - "is_nullable": false, - "is_unique": true, - "is_indexed": false, - "is_primary_key": true, - "is_generated": false, - "generation_expression": null, - "has_auto_increment": true, - "foreign_key_table": null, - "foreign_key_column": null - } -} diff --git a/echo/directus/sync/snapshot/fields/directus_sync_id_map/local_id.json b/echo/directus/sync/snapshot/fields/directus_sync_id_map/local_id.json deleted file mode 100644 index 50e1fc3c..00000000 --- a/echo/directus/sync/snapshot/fields/directus_sync_id_map/local_id.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "collection": "directus_sync_id_map", - "field": "local_id", - "type": "string", - "meta": { - "collection": "directus_sync_id_map", - "conditions": null, - "display": null, - "display_options": null, - "field": "local_id", - "group": null, - "hidden": false, - "interface": null, - "note": null, - "options": null, - "readonly": false, - "required": false, - "sort": null, - "special": null, - "translations": null, - "validation": null, - "validation_message": null, - "width": "full" - }, - "schema": { - "name": "local_id", - "table": "directus_sync_id_map", - "data_type": "character varying", - "default_value": null, - "max_length": 255, - "numeric_precision": null, - "numeric_scale": null, - "is_nullable": false, - "is_unique": false, - "is_indexed": false, - "is_primary_key": false, - "is_generated": false, - "generation_expression": null, - "has_auto_increment": false, - "foreign_key_table": null, - "foreign_key_column": null - } -} diff --git a/echo/directus/sync/snapshot/fields/directus_sync_id_map/sync_id.json b/echo/directus/sync/snapshot/fields/directus_sync_id_map/sync_id.json deleted file mode 100644 index 2e239f21..00000000 --- a/echo/directus/sync/snapshot/fields/directus_sync_id_map/sync_id.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "collection": "directus_sync_id_map", - "field": "sync_id", - "type": "string", - "meta": { - "collection": "directus_sync_id_map", - "conditions": null, - "display": null, - "display_options": null, - "field": "sync_id", - "group": null, - "hidden": false, - "interface": null, - "note": null, - "options": null, - "readonly": false, - "required": false, - "sort": null, - "special": null, - "translations": null, - "validation": null, - "validation_message": null, - "width": "full" - }, - "schema": { - "name": "sync_id", - "table": "directus_sync_id_map", - "data_type": "character varying", - "default_value": null, - "max_length": 255, - "numeric_precision": null, - "numeric_scale": null, - "is_nullable": false, - "is_unique": false, - "is_indexed": false, - "is_primary_key": false, - "is_generated": false, - "generation_expression": null, - "has_auto_increment": false, - "foreign_key_table": null, - "foreign_key_column": null - } -} diff --git a/echo/directus/sync/snapshot/fields/directus_sync_id_map/table.json b/echo/directus/sync/snapshot/fields/directus_sync_id_map/table.json deleted file mode 100644 index 550fe969..00000000 --- a/echo/directus/sync/snapshot/fields/directus_sync_id_map/table.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "collection": "directus_sync_id_map", - "field": "table", - "type": "string", - "meta": { - "collection": "directus_sync_id_map", - "conditions": null, - "display": null, - "display_options": null, - "field": "table", - "group": null, - "hidden": false, - "interface": null, - "note": null, - "options": null, - "readonly": false, - "required": false, - "sort": null, - "special": null, - "translations": null, - "validation": null, - "validation_message": null, - "width": "full" - }, - "schema": { - "name": "table", - "table": "directus_sync_id_map", - "data_type": "character varying", - "default_value": null, - "max_length": 255, - "numeric_precision": null, - "numeric_scale": null, - "is_nullable": false, - "is_unique": false, - "is_indexed": false, - "is_primary_key": false, - "is_generated": false, - "generation_expression": null, - "has_auto_increment": false, - "foreign_key_table": null, - "foreign_key_column": null - } -} diff --git a/echo/directus/sync/snapshot/fields/directus_users/disable_create_project.json b/echo/directus/sync/snapshot/fields/directus_users/disable_create_project.json index 74f96d68..baab55b9 100644 --- a/echo/directus/sync/snapshot/fields/directus_users/disable_create_project.json +++ b/echo/directus/sync/snapshot/fields/directus_users/disable_create_project.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/directus_users/projects.json b/echo/directus/sync/snapshot/fields/directus_users/projects.json index 5cac5e99..f683bbd0 100644 --- a/echo/directus/sync/snapshot/fields/directus_users/projects.json +++ b/echo/directus/sync/snapshot/fields/directus_users/projects.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/insight/created_at.json b/echo/directus/sync/snapshot/fields/insight/created_at.json index 834047eb..0076b08b 100644 --- a/echo/directus/sync/snapshot/fields/insight/created_at.json +++ b/echo/directus/sync/snapshot/fields/insight/created_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/insight/id.json b/echo/directus/sync/snapshot/fields/insight/id.json index aff47a5a..d914f272 100644 --- a/echo/directus/sync/snapshot/fields/insight/id.json +++ b/echo/directus/sync/snapshot/fields/insight/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/insight/project_analysis_run_id.json b/echo/directus/sync/snapshot/fields/insight/project_analysis_run_id.json index 25311a82..0b229643 100644 --- a/echo/directus/sync/snapshot/fields/insight/project_analysis_run_id.json +++ b/echo/directus/sync/snapshot/fields/insight/project_analysis_run_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 6, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/insight/summary.json b/echo/directus/sync/snapshot/fields/insight/summary.json index 70182ac1..10d42f1c 100644 --- a/echo/directus/sync/snapshot/fields/insight/summary.json +++ b/echo/directus/sync/snapshot/fields/insight/summary.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/insight/title.json b/echo/directus/sync/snapshot/fields/insight/title.json index d81babf6..2e040f91 100644 --- a/echo/directus/sync/snapshot/fields/insight/title.json +++ b/echo/directus/sync/snapshot/fields/insight/title.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/insight/updated_at.json b/echo/directus/sync/snapshot/fields/insight/updated_at.json index 468a7840..daf806eb 100644 --- a/echo/directus/sync/snapshot/fields/insight/updated_at.json +++ b/echo/directus/sync/snapshot/fields/insight/updated_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/languages/code.json b/echo/directus/sync/snapshot/fields/languages/code.json index fca9bd7b..577b7536 100644 --- a/echo/directus/sync/snapshot/fields/languages/code.json +++ b/echo/directus/sync/snapshot/fields/languages/code.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/languages/direction.json b/echo/directus/sync/snapshot/fields/languages/direction.json index 2eb9e18c..12331681 100644 --- a/echo/directus/sync/snapshot/fields/languages/direction.json +++ b/echo/directus/sync/snapshot/fields/languages/direction.json @@ -38,6 +38,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/languages/name.json b/echo/directus/sync/snapshot/fields/languages/name.json index 40bb6eb5..35aceafb 100644 --- a/echo/directus/sync/snapshot/fields/languages/name.json +++ b/echo/directus/sync/snapshot/fields/languages/name.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/processing_status/conversation_chunk_id.json b/echo/directus/sync/snapshot/fields/processing_status/conversation_chunk_id.json index af96e138..17fe280f 100644 --- a/echo/directus/sync/snapshot/fields/processing_status/conversation_chunk_id.json +++ b/echo/directus/sync/snapshot/fields/processing_status/conversation_chunk_id.json @@ -18,6 +18,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/processing_status/conversation_id.json b/echo/directus/sync/snapshot/fields/processing_status/conversation_id.json index a7c5e8d9..3becaa82 100644 --- a/echo/directus/sync/snapshot/fields/processing_status/conversation_id.json +++ b/echo/directus/sync/snapshot/fields/processing_status/conversation_id.json @@ -20,6 +20,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/processing_status/duration_ms.json b/echo/directus/sync/snapshot/fields/processing_status/duration_ms.json index 12f7a2c0..40347c01 100644 --- a/echo/directus/sync/snapshot/fields/processing_status/duration_ms.json +++ b/echo/directus/sync/snapshot/fields/processing_status/duration_ms.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 6, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/processing_status/event.json b/echo/directus/sync/snapshot/fields/processing_status/event.json index 4729aacf..f0aeee11 100644 --- a/echo/directus/sync/snapshot/fields/processing_status/event.json +++ b/echo/directus/sync/snapshot/fields/processing_status/event.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/processing_status/id.json b/echo/directus/sync/snapshot/fields/processing_status/id.json index e2f67efd..15a5d170 100644 --- a/echo/directus/sync/snapshot/fields/processing_status/id.json +++ b/echo/directus/sync/snapshot/fields/processing_status/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/processing_status/message.json b/echo/directus/sync/snapshot/fields/processing_status/message.json index 61c6d962..46ee0eca 100644 --- a/echo/directus/sync/snapshot/fields/processing_status/message.json +++ b/echo/directus/sync/snapshot/fields/processing_status/message.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/processing_status/parent.json b/echo/directus/sync/snapshot/fields/processing_status/parent.json index 1a59010e..71de80c0 100644 --- a/echo/directus/sync/snapshot/fields/processing_status/parent.json +++ b/echo/directus/sync/snapshot/fields/processing_status/parent.json @@ -20,6 +20,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/processing_status/project_analysis_run_id.json b/echo/directus/sync/snapshot/fields/processing_status/project_analysis_run_id.json index f2b5418a..2596c962 100644 --- a/echo/directus/sync/snapshot/fields/processing_status/project_analysis_run_id.json +++ b/echo/directus/sync/snapshot/fields/processing_status/project_analysis_run_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 10, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/processing_status/project_id.json b/echo/directus/sync/snapshot/fields/processing_status/project_id.json index 7d0b8c99..01f4a4cc 100644 --- a/echo/directus/sync/snapshot/fields/processing_status/project_id.json +++ b/echo/directus/sync/snapshot/fields/processing_status/project_id.json @@ -20,6 +20,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 9, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/processing_status/timestamp.json b/echo/directus/sync/snapshot/fields/processing_status/timestamp.json index 4d7b1d09..fe419a85 100644 --- a/echo/directus/sync/snapshot/fields/processing_status/timestamp.json +++ b/echo/directus/sync/snapshot/fields/processing_status/timestamp.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 3, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/project/context.json b/echo/directus/sync/snapshot/fields/project/context.json index 8e576ed1..09f14e2e 100644 --- a/echo/directus/sync/snapshot/fields/project/context.json +++ b/echo/directus/sync/snapshot/fields/project/context.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project/conversation_ask_for_participant_name_label.json b/echo/directus/sync/snapshot/fields/project/conversation_ask_for_participant_name_label.json index 13773b4e..56862612 100644 --- a/echo/directus/sync/snapshot/fields/project/conversation_ask_for_participant_name_label.json +++ b/echo/directus/sync/snapshot/fields/project/conversation_ask_for_participant_name_label.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 9, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project/conversations.json b/echo/directus/sync/snapshot/fields/project/conversations.json index 3e771ebc..ac8e81fa 100644 --- a/echo/directus/sync/snapshot/fields/project/conversations.json +++ b/echo/directus/sync/snapshot/fields/project/conversations.json @@ -25,6 +25,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 17, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/project/created_at.json b/echo/directus/sync/snapshot/fields/project/created_at.json index a34e5c9d..897c37b6 100644 --- a/echo/directus/sync/snapshot/fields/project/created_at.json +++ b/echo/directus/sync/snapshot/fields/project/created_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/project/default_conversation_ask_for_participant_name.json b/echo/directus/sync/snapshot/fields/project/default_conversation_ask_for_participant_name.json index cdc2c42b..e8ffd19c 100644 --- a/echo/directus/sync/snapshot/fields/project/default_conversation_ask_for_participant_name.json +++ b/echo/directus/sync/snapshot/fields/project/default_conversation_ask_for_participant_name.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 10, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/project/default_conversation_description.json b/echo/directus/sync/snapshot/fields/project/default_conversation_description.json index 56c1bc12..3762f13a 100644 --- a/echo/directus/sync/snapshot/fields/project/default_conversation_description.json +++ b/echo/directus/sync/snapshot/fields/project/default_conversation_description.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 13, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project/default_conversation_finish_text.json b/echo/directus/sync/snapshot/fields/project/default_conversation_finish_text.json index d2de6940..8742fffb 100644 --- a/echo/directus/sync/snapshot/fields/project/default_conversation_finish_text.json +++ b/echo/directus/sync/snapshot/fields/project/default_conversation_finish_text.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 15, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project/default_conversation_title.json b/echo/directus/sync/snapshot/fields/project/default_conversation_title.json index dbf3dfb0..01dca870 100644 --- a/echo/directus/sync/snapshot/fields/project/default_conversation_title.json +++ b/echo/directus/sync/snapshot/fields/project/default_conversation_title.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 12, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project/default_conversation_transcript_prompt.json b/echo/directus/sync/snapshot/fields/project/default_conversation_transcript_prompt.json index 538cbb17..942fdba7 100644 --- a/echo/directus/sync/snapshot/fields/project/default_conversation_transcript_prompt.json +++ b/echo/directus/sync/snapshot/fields/project/default_conversation_transcript_prompt.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 14, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project/default_conversation_tutorial_slug.json b/echo/directus/sync/snapshot/fields/project/default_conversation_tutorial_slug.json index 8d5e49ce..521971a8 100644 --- a/echo/directus/sync/snapshot/fields/project/default_conversation_tutorial_slug.json +++ b/echo/directus/sync/snapshot/fields/project/default_conversation_tutorial_slug.json @@ -30,6 +30,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 11, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project/directus_user_id.json b/echo/directus/sync/snapshot/fields/project/directus_user_id.json index 8798debe..785d847f 100644 --- a/echo/directus/sync/snapshot/fields/project/directus_user_id.json +++ b/echo/directus/sync/snapshot/fields/project/directus_user_id.json @@ -17,6 +17,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 19, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/project/divider-n6xep9.json b/echo/directus/sync/snapshot/fields/project/divider-n6xep9.json index adeaa998..5378881d 100644 --- a/echo/directus/sync/snapshot/fields/project/divider-n6xep9.json +++ b/echo/directus/sync/snapshot/fields/project/divider-n6xep9.json @@ -18,6 +18,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 22, "special": [ "alias", diff --git a/echo/directus/sync/snapshot/fields/project/get_reply_mode.json b/echo/directus/sync/snapshot/fields/project/get_reply_mode.json index 03a8eac9..79ad0ce2 100644 --- a/echo/directus/sync/snapshot/fields/project/get_reply_mode.json +++ b/echo/directus/sync/snapshot/fields/project/get_reply_mode.json @@ -57,6 +57,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 26, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project/get_reply_prompt.json b/echo/directus/sync/snapshot/fields/project/get_reply_prompt.json index 2cc84d25..09f02b0e 100644 --- a/echo/directus/sync/snapshot/fields/project/get_reply_prompt.json +++ b/echo/directus/sync/snapshot/fields/project/get_reply_prompt.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 25, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project/id.json b/echo/directus/sync/snapshot/fields/project/id.json index 7435b2a2..312c6e5f 100644 --- a/echo/directus/sync/snapshot/fields/project/id.json +++ b/echo/directus/sync/snapshot/fields/project/id.json @@ -17,6 +17,7 @@ }, "readonly": true, "required": false, + "searchable": true, "sort": 1, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/project/image_generation_model.json b/echo/directus/sync/snapshot/fields/project/image_generation_model.json index 2af75507..42934681 100644 --- a/echo/directus/sync/snapshot/fields/project/image_generation_model.json +++ b/echo/directus/sync/snapshot/fields/project/image_generation_model.json @@ -30,6 +30,7 @@ }, "readonly": false, "required": true, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project/is_conversation_allowed.json b/echo/directus/sync/snapshot/fields/project/is_conversation_allowed.json index c084f33e..cb028847 100644 --- a/echo/directus/sync/snapshot/fields/project/is_conversation_allowed.json +++ b/echo/directus/sync/snapshot/fields/project/is_conversation_allowed.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 23, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/project/is_enhanced_audio_processing_enabled.json b/echo/directus/sync/snapshot/fields/project/is_enhanced_audio_processing_enabled.json index 9522a8c8..22ee522e 100644 --- a/echo/directus/sync/snapshot/fields/project/is_enhanced_audio_processing_enabled.json +++ b/echo/directus/sync/snapshot/fields/project/is_enhanced_audio_processing_enabled.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 27, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/project/is_get_reply_enabled.json b/echo/directus/sync/snapshot/fields/project/is_get_reply_enabled.json index c4bbb7ed..fedbf32c 100644 --- a/echo/directus/sync/snapshot/fields/project/is_get_reply_enabled.json +++ b/echo/directus/sync/snapshot/fields/project/is_get_reply_enabled.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 24, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/project/is_project_notification_subscription_allowed.json b/echo/directus/sync/snapshot/fields/project/is_project_notification_subscription_allowed.json index 07422686..3f2caba4 100644 --- a/echo/directus/sync/snapshot/fields/project/is_project_notification_subscription_allowed.json +++ b/echo/directus/sync/snapshot/fields/project/is_project_notification_subscription_allowed.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 28, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/project/language.json b/echo/directus/sync/snapshot/fields/project/language.json index 5cb160db..7e3404b0 100644 --- a/echo/directus/sync/snapshot/fields/project/language.json +++ b/echo/directus/sync/snapshot/fields/project/language.json @@ -30,6 +30,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project/name.json b/echo/directus/sync/snapshot/fields/project/name.json index d93032db..6888b57f 100644 --- a/echo/directus/sync/snapshot/fields/project/name.json +++ b/echo/directus/sync/snapshot/fields/project/name.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project/processing_status.json b/echo/directus/sync/snapshot/fields/project/processing_status.json index b79ffb82..c4b81d17 100644 --- a/echo/directus/sync/snapshot/fields/project/processing_status.json +++ b/echo/directus/sync/snapshot/fields/project/processing_status.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 29, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/project/project_analysis_runs.json b/echo/directus/sync/snapshot/fields/project/project_analysis_runs.json index cec8602d..bba605b0 100644 --- a/echo/directus/sync/snapshot/fields/project/project_analysis_runs.json +++ b/echo/directus/sync/snapshot/fields/project/project_analysis_runs.json @@ -23,6 +23,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 18, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/project/project_chats.json b/echo/directus/sync/snapshot/fields/project/project_chats.json index 63e8ae6a..5f398a49 100644 --- a/echo/directus/sync/snapshot/fields/project/project_chats.json +++ b/echo/directus/sync/snapshot/fields/project/project_chats.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 20, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/project/project_reports.json b/echo/directus/sync/snapshot/fields/project/project_reports.json index 6599e17b..d629d15c 100644 --- a/echo/directus/sync/snapshot/fields/project/project_reports.json +++ b/echo/directus/sync/snapshot/fields/project/project_reports.json @@ -20,6 +20,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 21, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/project/tags.json b/echo/directus/sync/snapshot/fields/project/tags.json index 0aebfc16..fbfeb4ae 100644 --- a/echo/directus/sync/snapshot/fields/project/tags.json +++ b/echo/directus/sync/snapshot/fields/project/tags.json @@ -19,6 +19,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 16, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/project/updated_at.json b/echo/directus/sync/snapshot/fields/project/updated_at.json index 138c6e7f..49fa2937 100644 --- a/echo/directus/sync/snapshot/fields/project/updated_at.json +++ b/echo/directus/sync/snapshot/fields/project/updated_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/project_analysis_run/created_at.json b/echo/directus/sync/snapshot/fields/project_analysis_run/created_at.json index 42993414..e5312b5c 100644 --- a/echo/directus/sync/snapshot/fields/project_analysis_run/created_at.json +++ b/echo/directus/sync/snapshot/fields/project_analysis_run/created_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/project_analysis_run/id.json b/echo/directus/sync/snapshot/fields/project_analysis_run/id.json index c4038a77..339b9f19 100644 --- a/echo/directus/sync/snapshot/fields/project_analysis_run/id.json +++ b/echo/directus/sync/snapshot/fields/project_analysis_run/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/project_analysis_run/insights.json b/echo/directus/sync/snapshot/fields/project_analysis_run/insights.json index d51be437..65687528 100644 --- a/echo/directus/sync/snapshot/fields/project_analysis_run/insights.json +++ b/echo/directus/sync/snapshot/fields/project_analysis_run/insights.json @@ -20,6 +20,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 12, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/project_analysis_run/processing_status.json b/echo/directus/sync/snapshot/fields/project_analysis_run/processing_status.json index 992b24b8..13793fab 100644 --- a/echo/directus/sync/snapshot/fields/project_analysis_run/processing_status.json +++ b/echo/directus/sync/snapshot/fields/project_analysis_run/processing_status.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 14, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/project_analysis_run/project_id.json b/echo/directus/sync/snapshot/fields/project_analysis_run/project_id.json index 2d640080..78639c7e 100644 --- a/echo/directus/sync/snapshot/fields/project_analysis_run/project_id.json +++ b/echo/directus/sync/snapshot/fields/project_analysis_run/project_id.json @@ -17,6 +17,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/project_analysis_run/updated_at.json b/echo/directus/sync/snapshot/fields/project_analysis_run/updated_at.json index 2b04c5d5..c678553e 100644 --- a/echo/directus/sync/snapshot/fields/project_analysis_run/updated_at.json +++ b/echo/directus/sync/snapshot/fields/project_analysis_run/updated_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/project_analysis_run/views.json b/echo/directus/sync/snapshot/fields/project_analysis_run/views.json index 74559564..cd11c3d5 100644 --- a/echo/directus/sync/snapshot/fields/project_analysis_run/views.json +++ b/echo/directus/sync/snapshot/fields/project_analysis_run/views.json @@ -20,6 +20,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 13, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/project_chat/auto_select.json b/echo/directus/sync/snapshot/fields/project_chat/auto_select.json index 0f5c2d33..a0b46651 100644 --- a/echo/directus/sync/snapshot/fields/project_chat/auto_select.json +++ b/echo/directus/sync/snapshot/fields/project_chat/auto_select.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 10, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/project_chat/date_created.json b/echo/directus/sync/snapshot/fields/project_chat/date_created.json index a2497372..99d01712 100644 --- a/echo/directus/sync/snapshot/fields/project_chat/date_created.json +++ b/echo/directus/sync/snapshot/fields/project_chat/date_created.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 3, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/project_chat/date_updated.json b/echo/directus/sync/snapshot/fields/project_chat/date_updated.json index 39875250..bd30cd60 100644 --- a/echo/directus/sync/snapshot/fields/project_chat/date_updated.json +++ b/echo/directus/sync/snapshot/fields/project_chat/date_updated.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 5, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/project_chat/id.json b/echo/directus/sync/snapshot/fields/project_chat/id.json index c4f23b2e..ebba7016 100644 --- a/echo/directus/sync/snapshot/fields/project_chat/id.json +++ b/echo/directus/sync/snapshot/fields/project_chat/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 1, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/project_chat/name.json b/echo/directus/sync/snapshot/fields/project_chat/name.json index e56bf556..d1ca361f 100644 --- a/echo/directus/sync/snapshot/fields/project_chat/name.json +++ b/echo/directus/sync/snapshot/fields/project_chat/name.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 9, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat/project_chat_messages.json b/echo/directus/sync/snapshot/fields/project_chat/project_chat_messages.json index 0d02f042..9e07b0df 100644 --- a/echo/directus/sync/snapshot/fields/project_chat/project_chat_messages.json +++ b/echo/directus/sync/snapshot/fields/project_chat/project_chat_messages.json @@ -23,6 +23,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/project_chat/project_id.json b/echo/directus/sync/snapshot/fields/project_chat/project_id.json index 71861ad3..d1edb06e 100644 --- a/echo/directus/sync/snapshot/fields/project_chat/project_id.json +++ b/echo/directus/sync/snapshot/fields/project_chat/project_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 6, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/project_chat/used_conversations.json b/echo/directus/sync/snapshot/fields/project_chat/used_conversations.json index 94a502cd..ea824f8f 100644 --- a/echo/directus/sync/snapshot/fields/project_chat/used_conversations.json +++ b/echo/directus/sync/snapshot/fields/project_chat/used_conversations.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": [ "m2m" diff --git a/echo/directus/sync/snapshot/fields/project_chat/user_created.json b/echo/directus/sync/snapshot/fields/project_chat/user_created.json index b79e5705..acc728b7 100644 --- a/echo/directus/sync/snapshot/fields/project_chat/user_created.json +++ b/echo/directus/sync/snapshot/fields/project_chat/user_created.json @@ -17,6 +17,7 @@ }, "readonly": true, "required": false, + "searchable": true, "sort": 2, "special": [ "user-created" diff --git a/echo/directus/sync/snapshot/fields/project_chat/user_updated.json b/echo/directus/sync/snapshot/fields/project_chat/user_updated.json index e03b9c61..328d4c80 100644 --- a/echo/directus/sync/snapshot/fields/project_chat/user_updated.json +++ b/echo/directus/sync/snapshot/fields/project_chat/user_updated.json @@ -17,6 +17,7 @@ }, "readonly": true, "required": false, + "searchable": true, "sort": 4, "special": [ "user-updated" diff --git a/echo/directus/sync/snapshot/fields/project_chat_conversation/conversation_id.json b/echo/directus/sync/snapshot/fields/project_chat_conversation/conversation_id.json index 2ff1bcfc..0b7d75f3 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_conversation/conversation_id.json +++ b/echo/directus/sync/snapshot/fields/project_chat_conversation/conversation_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_conversation/id.json b/echo/directus/sync/snapshot/fields/project_chat_conversation/id.json index e3ba7d57..6dfbb095 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_conversation/id.json +++ b/echo/directus/sync/snapshot/fields/project_chat_conversation/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_conversation/project_chat_id.json b/echo/directus/sync/snapshot/fields/project_chat_conversation/project_chat_id.json index 72f7740e..c1576d55 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_conversation/project_chat_id.json +++ b/echo/directus/sync/snapshot/fields/project_chat_conversation/project_chat_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message/added_conversations.json b/echo/directus/sync/snapshot/fields/project_chat_message/added_conversations.json index b17a896f..546d2656 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message/added_conversations.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message/added_conversations.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 9, "special": [ "m2m" diff --git a/echo/directus/sync/snapshot/fields/project_chat_message/chat_message_metadata.json b/echo/directus/sync/snapshot/fields/project_chat_message/chat_message_metadata.json index b42f855f..968c0259 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message/chat_message_metadata.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message/chat_message_metadata.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 11, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/project_chat_message/date_created.json b/echo/directus/sync/snapshot/fields/project_chat_message/date_created.json index 616bbd32..0ce48ed9 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message/date_created.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message/date_created.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 2, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/project_chat_message/date_updated.json b/echo/directus/sync/snapshot/fields/project_chat_message/date_updated.json index 861a0c2f..78afc732 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message/date_updated.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message/date_updated.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 3, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/project_chat_message/id.json b/echo/directus/sync/snapshot/fields/project_chat_message/id.json index e491d678..cd26233e 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message/id.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 1, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/project_chat_message/message_from.json b/echo/directus/sync/snapshot/fields/project_chat_message/message_from.json index 19ae26a9..81fe6146 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message/message_from.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message/message_from.json @@ -33,6 +33,7 @@ }, "readonly": false, "required": true, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message/project_chat_id.json b/echo/directus/sync/snapshot/fields/project_chat_message/project_chat_id.json index dd4bfa3b..62d77054 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message/project_chat_id.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message/project_chat_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/project_chat_message/template_key.json b/echo/directus/sync/snapshot/fields/project_chat_message/template_key.json index b1bd0764..55c06a4c 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message/template_key.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message/template_key.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 12, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message/text.json b/echo/directus/sync/snapshot/fields/project_chat_message/text.json index 9f43395b..95ccd4d2 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message/text.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message/text.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message/tokens_count.json b/echo/directus/sync/snapshot/fields/project_chat_message/tokens_count.json index 088812f1..c1172ac5 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message/tokens_count.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message/tokens_count.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 10, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message/used_conversations.json b/echo/directus/sync/snapshot/fields/project_chat_message/used_conversations.json index 7e10b17e..62173e44 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message/used_conversations.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message/used_conversations.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": [ "m2m" diff --git a/echo/directus/sync/snapshot/fields/project_chat_message_conversation/conversation_id.json b/echo/directus/sync/snapshot/fields/project_chat_message_conversation/conversation_id.json index 614e9053..51b19690 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message_conversation/conversation_id.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message_conversation/conversation_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message_conversation/id.json b/echo/directus/sync/snapshot/fields/project_chat_message_conversation/id.json index 41c64510..9238fd1c 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message_conversation/id.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message_conversation/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message_conversation/project_chat_message_id.json b/echo/directus/sync/snapshot/fields/project_chat_message_conversation/project_chat_message_id.json index 5722670a..0276bb4c 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message_conversation/project_chat_message_id.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message_conversation/project_chat_message_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message_conversation_1/conversation_id.json b/echo/directus/sync/snapshot/fields/project_chat_message_conversation_1/conversation_id.json index 818d51a0..663b93ce 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message_conversation_1/conversation_id.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message_conversation_1/conversation_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message_conversation_1/id.json b/echo/directus/sync/snapshot/fields/project_chat_message_conversation_1/id.json index 9d60051d..3180cf56 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message_conversation_1/id.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message_conversation_1/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message_conversation_1/project_chat_message_id.json b/echo/directus/sync/snapshot/fields/project_chat_message_conversation_1/project_chat_message_id.json index b89008e8..2addb24d 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message_conversation_1/project_chat_message_id.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message_conversation_1/project_chat_message_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/conversation.json b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/conversation.json index bd581e21..e65405db 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/conversation.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/conversation.json @@ -17,6 +17,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/date_created.json b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/date_created.json index cc528a01..8d44b600 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/date_created.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/date_created.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 2, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/id.json b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/id.json index d0cbfbfe..889f1333 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/id.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 1, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/message_metadata.json b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/message_metadata.json index 19dec785..1e1fc6f4 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/message_metadata.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/message_metadata.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/ratio.json b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/ratio.json index 8a6f65ae..2c959518 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/ratio.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/ratio.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/reference_text.json b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/reference_text.json index 4851160c..d742b4d9 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/reference_text.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/reference_text.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 6, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/type.json b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/type.json index b5fc2f8b..106fac34 100644 --- a/echo/directus/sync/snapshot/fields/project_chat_message_metadata/type.json +++ b/echo/directus/sync/snapshot/fields/project_chat_message_metadata/type.json @@ -26,6 +26,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_report/content.json b/echo/directus/sync/snapshot/fields/project_report/content.json index 8f2d6eff..69a82d8e 100644 --- a/echo/directus/sync/snapshot/fields/project_report/content.json +++ b/echo/directus/sync/snapshot/fields/project_report/content.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 11, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_report/date_created.json b/echo/directus/sync/snapshot/fields/project_report/date_created.json index 00c21534..8fa5af91 100644 --- a/echo/directus/sync/snapshot/fields/project_report/date_created.json +++ b/echo/directus/sync/snapshot/fields/project_report/date_created.json @@ -17,6 +17,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/project_report/date_updated.json b/echo/directus/sync/snapshot/fields/project_report/date_updated.json index e63f47fd..c189f18f 100644 --- a/echo/directus/sync/snapshot/fields/project_report/date_updated.json +++ b/echo/directus/sync/snapshot/fields/project_report/date_updated.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 6, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/project_report/error_code.json b/echo/directus/sync/snapshot/fields/project_report/error_code.json index f94835f5..b6403406 100644 --- a/echo/directus/sync/snapshot/fields/project_report/error_code.json +++ b/echo/directus/sync/snapshot/fields/project_report/error_code.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 9, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_report/id.json b/echo/directus/sync/snapshot/fields/project_report/id.json index 774a2218..a99cb339 100644 --- a/echo/directus/sync/snapshot/fields/project_report/id.json +++ b/echo/directus/sync/snapshot/fields/project_report/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_report/language.json b/echo/directus/sync/snapshot/fields/project_report/language.json index feb3b8da..6a15b083 100644 --- a/echo/directus/sync/snapshot/fields/project_report/language.json +++ b/echo/directus/sync/snapshot/fields/project_report/language.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 12, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_report/links-v1gyg0.json b/echo/directus/sync/snapshot/fields/project_report/links-v1gyg0.json index c1e00d47..ddf50e4d 100644 --- a/echo/directus/sync/snapshot/fields/project_report/links-v1gyg0.json +++ b/echo/directus/sync/snapshot/fields/project_report/links-v1gyg0.json @@ -23,6 +23,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": [ "alias", diff --git a/echo/directus/sync/snapshot/fields/project_report/project_id.json b/echo/directus/sync/snapshot/fields/project_report/project_id.json index e8acdda6..36c26bea 100644 --- a/echo/directus/sync/snapshot/fields/project_report/project_id.json +++ b/echo/directus/sync/snapshot/fields/project_report/project_id.json @@ -19,6 +19,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 10, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/project_report/show_portal_link.json b/echo/directus/sync/snapshot/fields/project_report/show_portal_link.json index 461b1e45..166c78b2 100644 --- a/echo/directus/sync/snapshot/fields/project_report/show_portal_link.json +++ b/echo/directus/sync/snapshot/fields/project_report/show_portal_link.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/project_report/status.json b/echo/directus/sync/snapshot/fields/project_report/status.json index 3737534b..595a1cfd 100644 --- a/echo/directus/sync/snapshot/fields/project_report/status.json +++ b/echo/directus/sync/snapshot/fields/project_report/status.json @@ -58,6 +58,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_report_metric/date_created.json b/echo/directus/sync/snapshot/fields/project_report_metric/date_created.json index 799d9f22..a4e345ac 100644 --- a/echo/directus/sync/snapshot/fields/project_report_metric/date_created.json +++ b/echo/directus/sync/snapshot/fields/project_report_metric/date_created.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 3, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/project_report_metric/date_updated.json b/echo/directus/sync/snapshot/fields/project_report_metric/date_updated.json index 8ca0731e..8164cca6 100644 --- a/echo/directus/sync/snapshot/fields/project_report_metric/date_updated.json +++ b/echo/directus/sync/snapshot/fields/project_report_metric/date_updated.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 5, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/project_report_metric/id.json b/echo/directus/sync/snapshot/fields/project_report_metric/id.json index 0fc1a151..847fa033 100644 --- a/echo/directus/sync/snapshot/fields/project_report_metric/id.json +++ b/echo/directus/sync/snapshot/fields/project_report_metric/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 1, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_report_metric/ip.json b/echo/directus/sync/snapshot/fields/project_report_metric/ip.json index 20053cac..591e7404 100644 --- a/echo/directus/sync/snapshot/fields/project_report_metric/ip.json +++ b/echo/directus/sync/snapshot/fields/project_report_metric/ip.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_report_metric/project_report_id.json b/echo/directus/sync/snapshot/fields/project_report_metric/project_report_id.json index 34609ee1..7d114fdc 100644 --- a/echo/directus/sync/snapshot/fields/project_report_metric/project_report_id.json +++ b/echo/directus/sync/snapshot/fields/project_report_metric/project_report_id.json @@ -19,6 +19,7 @@ }, "readonly": true, "required": false, + "searchable": true, "sort": 6, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/project_report_metric/type.json b/echo/directus/sync/snapshot/fields/project_report_metric/type.json index 6913283f..36d3a108 100644 --- a/echo/directus/sync/snapshot/fields/project_report_metric/type.json +++ b/echo/directus/sync/snapshot/fields/project_report_metric/type.json @@ -23,6 +23,7 @@ }, "readonly": false, "required": true, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_report_notification_participants/conversation_id.json b/echo/directus/sync/snapshot/fields/project_report_notification_participants/conversation_id.json index 00327c35..ad7a0fe8 100644 --- a/echo/directus/sync/snapshot/fields/project_report_notification_participants/conversation_id.json +++ b/echo/directus/sync/snapshot/fields/project_report_notification_participants/conversation_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 6, "special": [ "m2o" diff --git a/echo/directus/sync/snapshot/fields/project_report_notification_participants/date_submitted.json b/echo/directus/sync/snapshot/fields/project_report_notification_participants/date_submitted.json index 5bbe12a5..5b80e34e 100644 --- a/echo/directus/sync/snapshot/fields/project_report_notification_participants/date_submitted.json +++ b/echo/directus/sync/snapshot/fields/project_report_notification_participants/date_submitted.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 3, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/project_report_notification_participants/date_updated.json b/echo/directus/sync/snapshot/fields/project_report_notification_participants/date_updated.json index dc485ba2..02392964 100644 --- a/echo/directus/sync/snapshot/fields/project_report_notification_participants/date_updated.json +++ b/echo/directus/sync/snapshot/fields/project_report_notification_participants/date_updated.json @@ -17,6 +17,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 4, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/project_report_notification_participants/email.json b/echo/directus/sync/snapshot/fields/project_report_notification_participants/email.json index 1db9fb32..83f7bacb 100644 --- a/echo/directus/sync/snapshot/fields/project_report_notification_participants/email.json +++ b/echo/directus/sync/snapshot/fields/project_report_notification_participants/email.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_report_notification_participants/email_opt_in.json b/echo/directus/sync/snapshot/fields/project_report_notification_participants/email_opt_in.json index 1febd72f..e897ae24 100644 --- a/echo/directus/sync/snapshot/fields/project_report_notification_participants/email_opt_in.json +++ b/echo/directus/sync/snapshot/fields/project_report_notification_participants/email_opt_in.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": [ "cast-boolean" diff --git a/echo/directus/sync/snapshot/fields/project_report_notification_participants/email_opt_out_token.json b/echo/directus/sync/snapshot/fields/project_report_notification_participants/email_opt_out_token.json index 7ed13bb1..f5219a95 100644 --- a/echo/directus/sync/snapshot/fields/project_report_notification_participants/email_opt_out_token.json +++ b/echo/directus/sync/snapshot/fields/project_report_notification_participants/email_opt_out_token.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 9, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/project_report_notification_participants/id.json b/echo/directus/sync/snapshot/fields/project_report_notification_participants/id.json index 4e10e297..05c2178d 100644 --- a/echo/directus/sync/snapshot/fields/project_report_notification_participants/id.json +++ b/echo/directus/sync/snapshot/fields/project_report_notification_participants/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": true, "required": false, + "searchable": true, "sort": 1, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/project_report_notification_participants/project_id.json b/echo/directus/sync/snapshot/fields/project_report_notification_participants/project_id.json index c43aa800..fadd35c3 100644 --- a/echo/directus/sync/snapshot/fields/project_report_notification_participants/project_id.json +++ b/echo/directus/sync/snapshot/fields/project_report_notification_participants/project_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_report_notification_participants/sort.json b/echo/directus/sync/snapshot/fields/project_report_notification_participants/sort.json index cb7b1797..15f3d1e3 100644 --- a/echo/directus/sync/snapshot/fields/project_report_notification_participants/sort.json +++ b/echo/directus/sync/snapshot/fields/project_report_notification_participants/sort.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_tag/conversations.json b/echo/directus/sync/snapshot/fields/project_tag/conversations.json index 333468ea..698f9dab 100644 --- a/echo/directus/sync/snapshot/fields/project_tag/conversations.json +++ b/echo/directus/sync/snapshot/fields/project_tag/conversations.json @@ -24,6 +24,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 6, "special": [ "m2m" diff --git a/echo/directus/sync/snapshot/fields/project_tag/created_at.json b/echo/directus/sync/snapshot/fields/project_tag/created_at.json index f15128ba..7ca27a1b 100644 --- a/echo/directus/sync/snapshot/fields/project_tag/created_at.json +++ b/echo/directus/sync/snapshot/fields/project_tag/created_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/project_tag/id.json b/echo/directus/sync/snapshot/fields/project_tag/id.json index 042cc59e..e3a19b5e 100644 --- a/echo/directus/sync/snapshot/fields/project_tag/id.json +++ b/echo/directus/sync/snapshot/fields/project_tag/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/project_tag/project_id.json b/echo/directus/sync/snapshot/fields/project_tag/project_id.json index 5f3950bc..85809963 100644 --- a/echo/directus/sync/snapshot/fields/project_tag/project_id.json +++ b/echo/directus/sync/snapshot/fields/project_tag/project_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_tag/sort.json b/echo/directus/sync/snapshot/fields/project_tag/sort.json index 18b52ace..678748c3 100644 --- a/echo/directus/sync/snapshot/fields/project_tag/sort.json +++ b/echo/directus/sync/snapshot/fields/project_tag/sort.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_tag/text.json b/echo/directus/sync/snapshot/fields/project_tag/text.json index 939bda28..450c65ca 100644 --- a/echo/directus/sync/snapshot/fields/project_tag/text.json +++ b/echo/directus/sync/snapshot/fields/project_tag/text.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/project_tag/updated_at.json b/echo/directus/sync/snapshot/fields/project_tag/updated_at.json index 07ebe977..37598c5c 100644 --- a/echo/directus/sync/snapshot/fields/project_tag/updated_at.json +++ b/echo/directus/sync/snapshot/fields/project_tag/updated_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/view/aspects.json b/echo/directus/sync/snapshot/fields/view/aspects.json index 02f68739..92d25905 100644 --- a/echo/directus/sync/snapshot/fields/view/aspects.json +++ b/echo/directus/sync/snapshot/fields/view/aspects.json @@ -24,6 +24,7 @@ }, "readonly": false, "required": false, + "searchable": true, "sort": 8, "special": [ "o2m" diff --git a/echo/directus/sync/snapshot/fields/view/created_at.json b/echo/directus/sync/snapshot/fields/view/created_at.json index 69399340..0627f0ce 100644 --- a/echo/directus/sync/snapshot/fields/view/created_at.json +++ b/echo/directus/sync/snapshot/fields/view/created_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 3, "special": [ "date-created" diff --git a/echo/directus/sync/snapshot/fields/view/description.json b/echo/directus/sync/snapshot/fields/view/description.json index 730411d6..4cfd5d04 100644 --- a/echo/directus/sync/snapshot/fields/view/description.json +++ b/echo/directus/sync/snapshot/fields/view/description.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 6, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/view/id.json b/echo/directus/sync/snapshot/fields/view/id.json index efe6ac1c..f813c72f 100644 --- a/echo/directus/sync/snapshot/fields/view/id.json +++ b/echo/directus/sync/snapshot/fields/view/id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 2, "special": [ "uuid" diff --git a/echo/directus/sync/snapshot/fields/view/language.json b/echo/directus/sync/snapshot/fields/view/language.json index c24be293..85401ac1 100644 --- a/echo/directus/sync/snapshot/fields/view/language.json +++ b/echo/directus/sync/snapshot/fields/view/language.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 11, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/view/name.json b/echo/directus/sync/snapshot/fields/view/name.json index 77004d27..119e2754 100644 --- a/echo/directus/sync/snapshot/fields/view/name.json +++ b/echo/directus/sync/snapshot/fields/view/name.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 5, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/view/project_analysis_run_id.json b/echo/directus/sync/snapshot/fields/view/project_analysis_run_id.json index 5c172e14..9fc2614c 100644 --- a/echo/directus/sync/snapshot/fields/view/project_analysis_run_id.json +++ b/echo/directus/sync/snapshot/fields/view/project_analysis_run_id.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 1, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/view/summary.json b/echo/directus/sync/snapshot/fields/view/summary.json index c04d336f..cb17ade1 100644 --- a/echo/directus/sync/snapshot/fields/view/summary.json +++ b/echo/directus/sync/snapshot/fields/view/summary.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 7, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/view/updated_at.json b/echo/directus/sync/snapshot/fields/view/updated_at.json index 81715ca9..b3148220 100644 --- a/echo/directus/sync/snapshot/fields/view/updated_at.json +++ b/echo/directus/sync/snapshot/fields/view/updated_at.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 4, "special": [ "date-updated" diff --git a/echo/directus/sync/snapshot/fields/view/user_input.json b/echo/directus/sync/snapshot/fields/view/user_input.json index 6866e2e7..cd84c834 100644 --- a/echo/directus/sync/snapshot/fields/view/user_input.json +++ b/echo/directus/sync/snapshot/fields/view/user_input.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 12, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/fields/view/user_input_description.json b/echo/directus/sync/snapshot/fields/view/user_input_description.json index 3391e6d5..9c216524 100644 --- a/echo/directus/sync/snapshot/fields/view/user_input_description.json +++ b/echo/directus/sync/snapshot/fields/view/user_input_description.json @@ -15,6 +15,7 @@ "options": null, "readonly": false, "required": false, + "searchable": true, "sort": 13, "special": null, "translations": null, diff --git a/echo/directus/sync/snapshot/info.json b/echo/directus/sync/snapshot/info.json index 2f58e029..8241ace0 100644 --- a/echo/directus/sync/snapshot/info.json +++ b/echo/directus/sync/snapshot/info.json @@ -1,5 +1,6 @@ { "version": 1, - "directus": "11.5.1", - "vendor": "postgres" + "directus": "11.13.0", + "vendor": "postgres", + "systemFields": [] } diff --git a/echo/docs/server_adding_dependencies.md b/echo/docs/server_adding_dependencies.md index 65adcc86..6e4c7de9 100644 --- a/echo/docs/server_adding_dependencies.md +++ b/echo/docs/server_adding_dependencies.md @@ -1,25 +1,23 @@ # Server: Adding Dependencies -We use `rye` to manage our dependencies. It uses `uv` under the hood. +We use `uv` to manage our dependencies. -> [How will rye and uv coexist in the future?](https://github.com/astral-sh/rye/discussions/1164) - -From [Basics of rye](https://rye.astral.sh/guide/basics/): +From [Basics of uv](https://docs.astral.sh/uv/): Example Add the latest version of a dependency that is compatible with the configured Python version: -$ rye add flask +$ uv add flask Added flask>=3.0.1 as regular dependency Add a dependency but add an optional extra feature: -$ rye add flask --features dotenv +$ uv add flask[dotenv] Added flask[dotenv]>=3.0.1 as regular dependency Add a git dependency: -$ rye add flask --git https://github.com/pallets/flask +$ uv add flask --git https://github.com/pallets/flask Added flask @ git+https://github.com/pallets/flask as regular dependency Add a local dependency: -$ rye add packagename --path path/to/packagename +$ uv add packagename --path path/to/packagename Added packagename @ file:///path/to/packagename as regular dependency diff --git a/echo/docs/troubleshooting-tips.md b/echo/docs/troubleshooting-tips.md new file mode 100644 index 00000000..a0ba7b6d --- /dev/null +++ b/echo/docs/troubleshooting-tips.md @@ -0,0 +1,100 @@ +### How do I add python dependencies? + +See [server_adding_dependencies.md](./docs/server_adding_dependencies.md) + +### How do I use the `style-guides`? + +Attach @ to the cursor chat. See [./meta.md](./meta.md) for more context. + +### Can I develop/run only the frontend? + +See [frontend_getting_started.md](./docs/frontend_getting_started.md) + +### How do I add translations for the frontend? + +See [frontend_translations.md](./docs/frontend_translations.md) + + +## Troubleshooting + +### Directus not starting (Docker Desktop) + +If the Directus container does not start, this could be due to the database not being ready yet. + +1. **Open Docker Desktop** → **Containers**. +2. **Restart** the Directus container. +3. Ensure you have run the [Database Migrations](./docs/database_migrations.md) + +### Directus invalid password? + +If you try logging into directus and it doesn't work with what you have in the .env file. + +Solution: You need to reset the DB. (delete ".devcontainer/postgres_data" and rebuild / migrate the DB again / etc) + +### Redis not starting (Docker Desktop) + +`Can't open the append-only file: Permission denied` + +If your Redis container fails to start and you see a “Permission denied” error about the append-only file, you may need to change permissions on the Redis data folder. + +1. **Open a local WSL terminal** (outside of the container). +2. **Run**: + ```bash + sudo chown -R 1001:1001 ./echo/.devcontainer/redis_data + ``` +3. **Restart** the redis container from Docker Desktop. + +### Minio not starting + +- Go to minio-ui at http://localhost:9001/ +- Login with credentials from [.devcontainer/docker-compose.yml](.devcontainer/docker-compose.yml) +- Create a bucket called "dembrane" + +### Frontends stuck on reloading + +`The file does not exist at "node_modules/.vite/deps/chunk\*" which is in the optimize deps directory.` + +- https://github.com/vitejs/vite/discussions/17738 +- fix is to disable cache in the network tab in the browser + +### Fix for mypy extension hung up (devcontainer hang/lag) + +```bash +ps -aux | grep "mypy" +# grab all the process ids +kill -9 +``` + +### (Windows Specific) Issues with the default WSL distribution that comes with Docker Desktop + +**Enable WSL Integration in Docker Desktop** + - Open Docker Desktop. + - Click the cog/settings icon, then go to **Resources** → **WSL Integration**. + - Toggle on the distribution (e.g., “Ubuntu-22.04”) that you want Docker to use. + +### Docker Desktop Container Crashing + +In case docker desktop crashes/ runs out of memory/ IDE freezes, try these steps: +- Increase allocates RAM to WSL[https://fizzylogic.nl/2023/01/05/how-to-configure-memory-limits-in-wsl2] +- Reduce mypy load by excluding files[https://github.com/python/mypy/issues/17105] +- Uninstall mypy + +## Additional Tips + +1. **Check Docker Resources** + + - Make sure Docker has enough memory/CPU allocated under **Docker Desktop** → **Settings** → **Resources**. + +2. **Handling Port Conflicts** + + - If ports like `8055` are in use, either stop the conflicting service or update the Directus port in your Docker Compose file. + +3. **Persistence** + + - Docker volumes or the `.devcontainer/redis_data` folder store data. If you remove them, you may lose data. Make backups if necessary. + +4. **Running Commands Outside vs. Inside Dev Container** + - Typically, build/test/development commands run inside the dev container. + - Docker-level commands (like `docker compose` or `sudo chown` for folder permissions) sometimes must be run in your **local WSL terminal**, depending on how your dev container is configured. + + diff --git a/echo/readme.md b/echo/readme.md index 5c0dc0e2..e8f664a3 100644 --- a/echo/readme.md +++ b/echo/readme.md @@ -1,4 +1,4 @@ -# Dembrane ECHO +# Dembrane ![CodeRabbit Pull Request Reviews](https://img.shields.io/coderabbit/prs/github/Dembrane/echo?utm_source=oss&utm_medium=github&utm_campaign=Dembrane%2Fecho&labelColor=171717&color=FF570A&link=https%3A%2F%2Fcoderabbit.ai&label=CodeRabbit+Reviews) @@ -21,11 +21,11 @@ Clients: ## Getting Started -# How do I run Dembrane ECHO locally? +# How do I run Dembrane locally? -Dembrane ECHO is a application with multiple services and dependencies. +Dembrane is a application with multiple services and dependencies. -The following guide is to run the whole application locally. it is recommended to use [dev containers](https://containers.dev/) for development to properly configure and manage these services and dependencies. +The following guide is to run the whole application locally. it is HIGHLY recommended to use [dev containers](https://containers.dev/) for development to properly configure and manage these services and dependencies. > TIP: If you only want to run the frontend, you can use the [frontend_getting_started.md](./docs/frontend_getting_started.md). @@ -35,7 +35,7 @@ The following guide is to run the whole application locally. it is recommended t - VS Code (or Cursor) with "Dev Containers" Extension installed - Docker Desktop -- WSL (strongly recommended if you are on Windows) +- WSL (recommended if you are on Windows) ## Steps: @@ -48,19 +48,25 @@ The following guide is to run the whole application locally. it is recommended t 1. This installs the following: - - Devcontainer with `pnpm`, `rye` installed and configured (see [devcontainer.json](.devcontainer/devcontainer.json) for more context) + - Devcontainer with `pnpm`, `uv` installed and configured (see [devcontainer.json](.devcontainer/devcontainer.json) for more context) - Postgres database running and exposed on port 5432 - Redis instance - - Minio server running and exposed on port 9001 - Directus server running and exposed on port 8055 +1. For your S3-compatible storage you can either + - bring your own S3 backend + - or use the `.devcontainer/docker-compose-s3.yml` file to spin up a Minio server exposed on port 9001. + + In both cases you need to configure in `server/.env` and `directus/.env`. We are working on providing an configurable alternative to store files via local storage instead of S3. PRs are welcome to make this flow better. + + 1. Configure `.env` files - Most .env variables are already setup through the devcontainer. - You can override any of them by setting the corresponding environment variable in the `.env` file, you can see what variables are needed in the `.env.sample` files. - For the server: update `server/.env` - For the frontends: update `frontend/.env` - - For directus, it is not straight-forward to update the env (PRs are welcomed). Preferred way would be to add a .env file to `directus/` and have it load inside the container using `.devcontainer/docker-compose.yml` + - For directus, update `directus/.env` (For directus, you might need to restart your docker container to load the keys) 1. Run the [database migrations](./docs//database_migrations.md) @@ -70,106 +76,6 @@ The following guide is to run the whole application locally. it is recommended t - Type **"Active session"**. - Click **"Terminal Keeper: Active session"**. -## FAQ - -### How do I add python dependencies? - -See [server_adding_dependencies.md](./docs/server_adding_dependencies.md) - -### How do I use the `style-guides`? - -Attach @ to the cursor chat. See [./meta.md](./meta.md) for more context. - -### Can I develop/run only the frontend? - -See [frontend_getting_started.md](./docs/frontend_getting_started.md) - -### How do I add translations for the frontend? - -See [frontend_translations.md](./docs/frontend_translations.md) - - -## Troubleshooting - -### Directus not starting (Docker Desktop) - -If the Directus container does not start, this could be due to the database not being ready yet. - -1. **Open Docker Desktop** → **Containers**. -2. **Restart** the Directus container. -3. Ensure you have run the [Database Migrations](./docs/database_migrations.md) - -### Directus invalid password? - -If you try logging into directus and it doesn't work with what you have in the .env file. - -Solution: You need to reset the DB. (delete ".devcontainer/postgres_data" and rebuild / migrate the DB again / etc) - -### Redis not starting (Docker Desktop) - -`Can't open the append-only file: Permission denied` - -If your Redis container fails to start and you see a “Permission denied” error about the append-only file, you may need to change permissions on the Redis data folder. - -1. **Open a local WSL terminal** (outside of the container). -2. **Run**: - ```bash - sudo chown -R 1001:1001 ./echo/.devcontainer/redis_data - ``` -3. **Restart** the redis container from Docker Desktop. - -### Minio not starting - -- Go to minio-ui at http://localhost:9001/ -- Login with credentials from [.devcontainer/docker-compose.yml](.devcontainer/docker-compose.yml) -- Create a bucket called "dembrane" - -### Frontends stuck on reloading - -`The file does not exist at "node_modules/.vite/deps/chunk\*" which is in the optimize deps directory.` - -- https://github.com/vitejs/vite/discussions/17738 -- fix is to disable cache in the network tab in the browser - -### Fix for mypy extension hung up (devcontainer hang/lag) - -```bash -ps -aux | grep "mypy" -# grab all the process ids -kill -9 -``` - -### (Windows Specific) Issues with the default WSL distribution that comes with Docker Desktop - -**Enable WSL Integration in Docker Desktop** - - Open Docker Desktop. - - Click the cog/settings icon, then go to **Resources** → **WSL Integration**. - - Toggle on the distribution (e.g., “Ubuntu-22.04”) that you want Docker to use. - -### Docker Desktop Container Crashing - -In case docker desktop crashes/ runs out of memory/ IDE freezes, try these steps: -- Increase allocates RAM to WSL[https://fizzylogic.nl/2023/01/05/how-to-configure-memory-limits-in-wsl2] -- Reduce mypy load by excluding files[https://github.com/python/mypy/issues/17105] -- Uninstall mypy - -## Additional Tips - -1. **Check Docker Resources** - - - Make sure Docker has enough memory/CPU allocated under **Docker Desktop** → **Settings** → **Resources**. - -2. **Handling Port Conflicts** - - - If ports like `8055` are in use, either stop the conflicting service or update the Directus port in your Docker Compose file. - -3. **Persistence** - - - Docker volumes or the `.devcontainer/redis_data` folder store data. If you remove them, you may lose data. Make backups if necessary. - -4. **Running Commands Outside vs. Inside Dev Container** - - Typically, build/test/development commands run inside the dev container. - - Docker-level commands (like `docker compose` or `sudo chown` for folder permissions) sometimes must be run in your **local WSL terminal**, depending on how your dev container is configured. - +## FAQ [./docs/troubleshooting-tips.md](./docs/troubleshooting-tips.md) -Enjoy building with Dembrane Echo! +Enjoy building with Dembrane! \ No newline at end of file diff --git a/echo/server/.python-version b/echo/server/.python-version new file mode 100644 index 00000000..641602f4 --- /dev/null +++ b/echo/server/.python-version @@ -0,0 +1 @@ +3.11.14 diff --git a/echo/server/Dockerfile b/echo/server/Dockerfile index f7e6f38e..b398f8f9 100644 --- a/echo/server/Dockerfile +++ b/echo/server/Dockerfile @@ -18,13 +18,16 @@ RUN \ apt-get install -y --no-install-recommends git curl ca-certificates pkg-config zip build-essential && \ chmod +x /usr/local/bin/ffmpeg /usr/local/bin/ffprobe +# Install uv +COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv + WORKDIR /code/server # Install dependencies first (better layer caching) -COPY requirements.lock ./requirements.lock -RUN pip install --no-cache-dir -r requirements.lock +COPY pyproject.toml uv.lock* ./ +RUN uv sync --frozen -# Copy everything else after pip install +# Copy everything else after dependency installation # This ensures dependency layer is cached separately from application code COPY . . diff --git a/echo/server/pyproject.toml b/echo/server/pyproject.toml index 78b02311..fdf69005 100644 --- a/echo/server/pyproject.toml +++ b/echo/server/pyproject.toml @@ -2,7 +2,7 @@ name = "Dembrane" version = "0.0.1" description = "Python project for the Dembrane API" -requires-python = "== 3.11" +requires-python = ">= 3.11, < 3.12" dependencies = [ # API server "fastapi==0.109.*", @@ -94,42 +94,24 @@ dependencies = [ [tool.setuptools] packages = ["dembrane"] -[tool.rye] -virtual = true -managed = true +[tool.uv] +package = false -[tool.rye.scripts] -"format" = { chain = ["format:isort", "format:ruff", "fix:ruff"] } -"format:ruff" = "ruff format" -"format:isort" = "isort ." -"lint" = { chain = ["check:ruff", "typecheck"] } -"check:ruff" = "ruff ." -"fix:ruff" = "ruff --fix ." -typecheck = { chain = ["typecheck:mypy"] } -"typecheck:mypy" = "mypy ." -"test" = "pytest" -"test:cov" = "pytest --cov=dembrane --cov-report=term --cov-report=html" +# [tool.mypy] +# plugins = 'pydantic.mypy' +# exclude = ['scripts', 'tests'] +# warn_redundant_casts = true +# warn_unused_ignores = true +# check_untyped_defs = true +# no_implicit_reexport = true +# disallow_untyped_defs = true +# ignore_missing_imports = true -[tool.mypy] -plugins = 'pydantic.mypy' -exclude = ['scripts', 'tests'] -warn_redundant_casts = true -warn_unused_ignores = true -check_untyped_defs = true -no_implicit_reexport = true -disallow_untyped_defs = true -ignore_missing_imports = true - -# Removed trankit mypy override since trankit is no longer a dependency -# [[tool.mypy.overrides]] -# follow_imports = "skip" -# module = ["trankit.*"] - -[tool.pydantic-mypy] -init_forbid_extra = true -init_typed = true -warn_required_dynamic_aliases = true -warn_untyped_fields = true +# [tool.pydantic-mypy] +# init_forbid_extra = true +# init_typed = true +# warn_required_dynamic_aliases = true +# warn_untyped_fields = true [tool.pytest.ini_options] testpaths = ["tests", "dembrane"] @@ -232,6 +214,13 @@ fixable = ["ALL"] unfixable = [] dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" +[tool.pyrefly] +project-excludes = [ + "**/scripts*", + "**/tests*", +] +ignore-missing-imports = ["*"] + [tool.ruff.format] quote-style = "double" indent-style = "space" diff --git a/echo/server/run-scheduler.sh b/echo/server/run-scheduler.sh index 9d364108..2673e73d 100755 --- a/echo/server/run-scheduler.sh +++ b/echo/server/run-scheduler.sh @@ -1 +1 @@ -python -m dembrane.scheduler \ No newline at end of file +uv run python -m dembrane.scheduler \ No newline at end of file diff --git a/echo/server/run-worker-cpu.sh b/echo/server/run-worker-cpu.sh index 3f076e3f..d0f6c3c6 100755 --- a/echo/server/run-worker-cpu.sh +++ b/echo/server/run-worker-cpu.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -dramatiq --watch ./dembrane --queues cpu --processes 1 --threads 2 dembrane.tasks +uv run dramatiq --watch ./dembrane --queues cpu --processes 1 --threads 2 dembrane.tasks diff --git a/echo/server/run-worker.sh b/echo/server/run-worker.sh index 7b2f981e..82981b99 100755 --- a/echo/server/run-worker.sh +++ b/echo/server/run-worker.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -dramatiq-gevent --watch ./dembrane --queues network --processes 2 --threads 1 dembrane.tasks +uv run dramatiq-gevent --watch ./dembrane --queues network --processes 2 --threads 1 dembrane.tasks diff --git a/echo/server/run.sh b/echo/server/run.sh index bf81bec1..8f75a247 100755 --- a/echo/server/run.sh +++ b/echo/server/run.sh @@ -1,3 +1,3 @@ #!/bin/sh -uvicorn dembrane.main:app --port 8000 --reload --loop asyncio \ No newline at end of file +uv run uvicorn dembrane.main:app --port 8000 --reload --loop asyncio \ No newline at end of file diff --git a/echo/server/uv.lock b/echo/server/uv.lock new file mode 100644 index 00000000..b7047a91 --- /dev/null +++ b/echo/server/uv.lock @@ -0,0 +1,2965 @@ +version = 1 +revision = 3 +requires-python = "==3.11.*" + +[[package]] +name = "aiofiles" +version = "23.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/41/cfed10bc64d774f497a86e5ede9248e1d062db675504b41c320954d99641/aiofiles-23.2.1.tar.gz", hash = "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a", size = 32072, upload-time = "2023-08-09T15:23:11.564Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/19/5af6804c4cc0fed83f47bff6e413a98a36618e7d40185cd36e69737f3b0e/aiofiles-23.2.1-py3-none-any.whl", hash = "sha256:19297512c647d4b27a2cf7c34caa7e405c0d60b5560618a29a9fe027b18b0107", size = 15727, upload-time = "2023-08-09T15:23:09.774Z" }, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.11.14" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6c/96/91e93ae5fd04d428c101cdbabce6c820d284d61d2614d00518f4fa52ea24/aiohttp-3.11.14.tar.gz", hash = "sha256:d6edc538c7480fa0a3b2bdd705f8010062d74700198da55d16498e1b49549b9c", size = 7676994, upload-time = "2025-03-17T02:45:10.69Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/f5/5e2ae82822b1781f828bb9285fb585a4ac028cfd329788caf073bde45706/aiohttp-3.11.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f296d637a50bb15fb6a229fbb0eb053080e703b53dbfe55b1e4bb1c5ed25d325", size = 709382, upload-time = "2025-03-17T02:43:09.977Z" }, + { url = "https://files.pythonhosted.org/packages/2f/eb/a0e118c54eb9f897e13e7a357b2ef9b8d0ca438060a9db8ad4af4561aab4/aiohttp-3.11.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec6cd1954ca2bbf0970f531a628da1b1338f594bf5da7e361e19ba163ecc4f3b", size = 469254, upload-time = "2025-03-17T02:43:11.708Z" }, + { url = "https://files.pythonhosted.org/packages/ea/3f/03c2f177536ad6ab4d3052e21fb67ce430d0257b3c61a0ef6b91b7b12cb4/aiohttp-3.11.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:572def4aad0a4775af66d5a2b5923c7de0820ecaeeb7987dcbccda2a735a993f", size = 456342, upload-time = "2025-03-17T02:43:13.534Z" }, + { url = "https://files.pythonhosted.org/packages/d8/fe/849c000be857f60e36d2ce0a8c3d1ad34f8ea64b0ff119ecdafbc94cddfb/aiohttp-3.11.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c68e41c4d576cd6aa6c6d2eddfb32b2acfb07ebfbb4f9da991da26633a3db1a", size = 1686573, upload-time = "2025-03-17T02:43:14.944Z" }, + { url = "https://files.pythonhosted.org/packages/a8/e9/737aef162bf618f3b3e0f4a6ed03b5baca5e2a9ffabdab4be1b756ca1061/aiohttp-3.11.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b8bbfc8111826aa8363442c0fc1f5751456b008737ff053570f06a151650b3", size = 1747903, upload-time = "2025-03-17T02:43:16.498Z" }, + { url = "https://files.pythonhosted.org/packages/15/19/a510c51e5a383ad804e51040819898d074106dc297adf0e2c78dccc8ab47/aiohttp-3.11.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b0a200e85da5c966277a402736a96457b882360aa15416bf104ca81e6f5807b", size = 1788922, upload-time = "2025-03-17T02:43:18.063Z" }, + { url = "https://files.pythonhosted.org/packages/51/66/30b217d0de5584650340025a285f1d0abf2039e5a683342891e84f250da9/aiohttp-3.11.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d173c0ac508a2175f7c9a115a50db5fd3e35190d96fdd1a17f9cb10a6ab09aa1", size = 1676062, upload-time = "2025-03-17T02:43:19.627Z" }, + { url = "https://files.pythonhosted.org/packages/27/90/9f61d0c7b185e5a413ae7a3e206e7759ea1b208fff420b380ab205ab82b5/aiohttp-3.11.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:413fe39fd929329f697f41ad67936f379cba06fcd4c462b62e5b0f8061ee4a77", size = 1620750, upload-time = "2025-03-17T02:43:21.617Z" }, + { url = "https://files.pythonhosted.org/packages/c9/5a/455a6b8aea18ec8590f0a5642caf6d0494152de09579a4fd4f9530a4a111/aiohttp-3.11.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65c75b14ee74e8eeff2886321e76188cbe938d18c85cff349d948430179ad02c", size = 1655093, upload-time = "2025-03-17T02:43:23.246Z" }, + { url = "https://files.pythonhosted.org/packages/f5/4b/b369e5e809bdb46a306df7b22e611dc8622ebb5313498c11f6e1cb986408/aiohttp-3.11.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:321238a42ed463848f06e291c4bbfb3d15ba5a79221a82c502da3e23d7525d06", size = 1661318, upload-time = "2025-03-17T02:43:24.797Z" }, + { url = "https://files.pythonhosted.org/packages/25/ac/a211dd149485e7c518481b08d7c13e7acd32090daf1e396aaea6b9f2eea9/aiohttp-3.11.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:59a05cdc636431f7ce843c7c2f04772437dd816a5289f16440b19441be6511f1", size = 1650991, upload-time = "2025-03-17T02:43:26.398Z" }, + { url = "https://files.pythonhosted.org/packages/74/c4/8b1d41853f1ccd4cb66edc909ccc2a95b332081661f04324f7064cc200d8/aiohttp-3.11.14-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:daf20d9c3b12ae0fdf15ed92235e190f8284945563c4b8ad95b2d7a31f331cd3", size = 1734371, upload-time = "2025-03-17T02:43:28.244Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e2/e244684266722d819f41d7e798ce8bbee3b72420eb684193a076ea1bf18f/aiohttp-3.11.14-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:05582cb2d156ac7506e68b5eac83179faedad74522ed88f88e5861b78740dc0e", size = 1756128, upload-time = "2025-03-17T02:43:30.43Z" }, + { url = "https://files.pythonhosted.org/packages/e9/59/79d37f2badafbe229c7654dbf631b38419fcaa979a45c04941397ad7251c/aiohttp-3.11.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:12c5869e7ddf6b4b1f2109702b3cd7515667b437da90a5a4a50ba1354fe41881", size = 1694370, upload-time = "2025-03-17T02:43:32.024Z" }, + { url = "https://files.pythonhosted.org/packages/04/0f/aaaf3fc8533f65eba4572a79a935b9033e663f67f763b10db16f1c40a067/aiohttp-3.11.14-cp311-cp311-win32.whl", hash = "sha256:92868f6512714efd4a6d6cb2bfc4903b997b36b97baea85f744229f18d12755e", size = 417192, upload-time = "2025-03-17T02:43:33.562Z" }, + { url = "https://files.pythonhosted.org/packages/07/3c/aa468550b7fcd0c634d4aa8192f33ce32a179ecba08b908a0ed272194f87/aiohttp-3.11.14-cp311-cp311-win_amd64.whl", hash = "sha256:bccd2cb7aa5a3bfada72681bdb91637094d81639e116eac368f8b3874620a654", size = 443590, upload-time = "2025-03-17T02:43:35.376Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "alembic" +version = "1.13.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mako" }, + { name = "sqlalchemy" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/94/a2/840c3b84382dce8624bc2f0ee67567fc74c32478d0c5a5aea981518c91c3/alembic-1.13.3.tar.gz", hash = "sha256:203503117415561e203aa14541740643a611f641517f0209fcae63e9fa09f1a2", size = 1921223, upload-time = "2024-09-23T14:52:14.593Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/12/58f4f11385fddafef5d6f7bfaaf2f42899c8da6b4f95c04b7c3b744851a8/alembic-1.13.3-py3-none-any.whl", hash = "sha256:908e905976d15235fae59c9ac42c4c5b75cfcefe3d27c0fbf7ae15a37715d80e", size = 233217, upload-time = "2024-09-23T14:52:18.183Z" }, +] + +[[package]] +name = "amqp" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "vine" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/79/fc/ec94a357dfc6683d8c86f8b4cfa5416a4c36b28052ec8260c77aca96a443/amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432", size = 129013, upload-time = "2024-11-12T19:55:44.051Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944, upload-time = "2024-11-12T19:55:41.782Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anthropic" +version = "0.43.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ca/d8/238c2bc59e41a787e7b62460adfc7b2edd88f28b0a14e292801a72725369/anthropic-0.43.1.tar.gz", hash = "sha256:c7f13e4b7b515ac4a3111142310b214527c0fc561485e5bc9b582e49fe3adba2", size = 195298, upload-time = "2025-01-17T19:49:18.635Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/2b/63b167d76401f759c8c4ff0266042e60aac6fd3cc0685b27437ceaaf95eb/anthropic-0.43.1-py3-none-any.whl", hash = "sha256:20759c25cd0f4072eb966b0180a41c061c156473bbb674da6a3f1e92e1ad78f8", size = 208170, upload-time = "2025-01-17T19:49:17.102Z" }, +] + +[[package]] +name = "anyio" +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, +] + +[[package]] +name = "apscheduler" +version = "3.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzlocal" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/00/6d6814ddc19be2df62c8c898c4df6b5b1914f3bd024b780028caa392d186/apscheduler-3.11.0.tar.gz", hash = "sha256:4c622d250b0955a65d5d0eb91c33e6d43fd879834bf541e0a18661ae60460133", size = 107347, upload-time = "2024-11-24T19:39:26.463Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/ae/9a053dd9229c0fde6b1f1f33f609ccff1ee79ddda364c756a924c6d8563b/APScheduler-3.11.0-py3-none-any.whl", hash = "sha256:fc134ca32e50f5eadcc4938e3a4545ab19131435e851abb40b34d63d5141c6da", size = 64004, upload-time = "2024-11-24T19:39:24.442Z" }, +] + +[[package]] +name = "ascii-colors" +version = "0.11.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3f/2d/399ec9c4bd76aa62321b6d024003fce3209d0dfcb3676f85cc7e2d4c57fc/ascii_colors-0.11.4.tar.gz", hash = "sha256:b308949c2ada24b6cda89aa9c3c1dba3ed324ab45dc516ea5f6dc77fd3aa4220", size = 106608, upload-time = "2025-05-19T16:07:32.977Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/b6/6b1dc0962d3210256e5da600a60939ce447eb9f0788273d8642a0c2f7498/ascii_colors-0.11.4-py3-none-any.whl", hash = "sha256:1ffd62a0bfb2d51a8ab942f0844fe6b7c11aaa04d19bd6e50ff149b38c9738a6", size = 71383, upload-time = "2025-05-19T16:07:31.199Z" }, +] + +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + +[[package]] +name = "asyncpg" +version = "0.30.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746, upload-time = "2024-10-20T00:30:41.127Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4c/0e/f5d708add0d0b97446c402db7e8dd4c4183c13edaabe8a8500b411e7b495/asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a", size = 674506, upload-time = "2024-10-20T00:29:27.988Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a0/67ec9a75cb24a1d99f97b8437c8d56da40e6f6bd23b04e2f4ea5d5ad82ac/asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed", size = 645922, upload-time = "2024-10-20T00:29:29.391Z" }, + { url = "https://files.pythonhosted.org/packages/5c/d9/a7584f24174bd86ff1053b14bb841f9e714380c672f61c906eb01d8ec433/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a", size = 3079565, upload-time = "2024-10-20T00:29:30.832Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d7/a4c0f9660e333114bdb04d1a9ac70db690dd4ae003f34f691139a5cbdae3/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956", size = 3109962, upload-time = "2024-10-20T00:29:33.114Z" }, + { url = "https://files.pythonhosted.org/packages/3c/21/199fd16b5a981b1575923cbb5d9cf916fdc936b377e0423099f209e7e73d/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056", size = 3064791, upload-time = "2024-10-20T00:29:34.677Z" }, + { url = "https://files.pythonhosted.org/packages/77/52/0004809b3427534a0c9139c08c87b515f1c77a8376a50ae29f001e53962f/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454", size = 3188696, upload-time = "2024-10-20T00:29:36.389Z" }, + { url = "https://files.pythonhosted.org/packages/52/cb/fbad941cd466117be58b774a3f1cc9ecc659af625f028b163b1e646a55fe/asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d", size = 567358, upload-time = "2024-10-20T00:29:37.915Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0a/0a32307cf166d50e1ad120d9b81a33a948a1a5463ebfa5a96cc5606c0863/asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f", size = 629375, upload-time = "2024-10-20T00:29:39.987Z" }, +] + +[[package]] +name = "attrs" +version = "25.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, +] + +[[package]] +name = "backoff" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/d7/5bbeb12c44d7c4f2fb5b56abce497eb5ed9f34d85701de869acedd602619/backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba", size = 17001, upload-time = "2022-10-05T19:19:32.061Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148, upload-time = "2022-10-05T19:19:30.546Z" }, +] + +[[package]] +name = "billiard" +version = "4.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/6a/1405343016bce8354b29d90aad6b0bf6485b5e60404516e4b9a3a9646cf0/billiard-4.2.2.tar.gz", hash = "sha256:e815017a062b714958463e07ba15981d802dc53d41c5b69d28c5a7c238f8ecf3", size = 155592, upload-time = "2025-09-20T14:44:40.456Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/80/ef8dff49aae0e4430f81842f7403e14e0ca59db7bbaf7af41245b67c6b25/billiard-4.2.2-py3-none-any.whl", hash = "sha256:4bc05dcf0d1cc6addef470723aac2a6232f3c7ed7475b0b580473a9145829457", size = 86896, upload-time = "2025-09-20T14:44:39.157Z" }, +] + +[[package]] +name = "boto3" +version = "1.37.38" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0d/b5/d1c2e8c484cea43891629bbab6ca90ce9ca932586750bc0e786c8f096ccf/boto3-1.37.38.tar.gz", hash = "sha256:88c02910933ab7777597d1ca7c62375f52822e0aa1a8e0c51b2598a547af42b2", size = 111623, upload-time = "2025-04-21T19:27:18.06Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d3/87/8189f22ee798177bc7b40afd13f046442c5f91b699e70a950b42ff447e80/boto3-1.37.38-py3-none-any.whl", hash = "sha256:b6d42803607148804dff82389757827a24ce9271f0583748853934c86310999f", size = 139922, upload-time = "2025-04-21T19:27:16.107Z" }, +] + +[[package]] +name = "botocore" +version = "1.37.38" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/34/79/4e072e614339727f79afef704e5993b5b4d2667c1671c757cc4deb954744/botocore-1.37.38.tar.gz", hash = "sha256:c3ea386177171f2259b284db6afc971c959ec103fa2115911c4368bea7cbbc5d", size = 13832365, upload-time = "2025-04-21T19:27:05.245Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/1b/93f3504afc7c523dcaa8a8147cfc75421983e30b08d9f93a533929589630/botocore-1.37.38-py3-none-any.whl", hash = "sha256:23b4097780e156a4dcaadfc1ed156ce25cb95b6087d010c4bb7f7f5d9bc9d219", size = 13499391, upload-time = "2025-04-21T19:27:00.869Z" }, +] + +[[package]] +name = "cachetools" +version = "6.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cc/7e/b975b5814bd36faf009faebe22c1072a1fa1168db34d285ef0ba071ad78c/cachetools-6.2.1.tar.gz", hash = "sha256:3f391e4bd8f8bf0931169baf7456cc822705f4e2a31f840d218f445b9a854201", size = 31325, upload-time = "2025-10-12T14:55:30.139Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/c5/1e741d26306c42e2bf6ab740b2202872727e0f606033c9dd713f8b93f5a8/cachetools-6.2.1-py3-none-any.whl", hash = "sha256:09868944b6dde876dfd44e1d47e18484541eaf12f26f29b7af91b26cc892d701", size = 11280, upload-time = "2025-10-12T14:55:28.382Z" }, +] + +[[package]] +name = "celery" +version = "5.5.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "billiard" }, + { name = "click" }, + { name = "click-didyoumean" }, + { name = "click-plugins" }, + { name = "click-repl" }, + { name = "kombu" }, + { name = "python-dateutil" }, + { name = "vine" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bb/7d/6c289f407d219ba36d8b384b42489ebdd0c84ce9c413875a8aae0c85f35b/celery-5.5.3.tar.gz", hash = "sha256:6c972ae7968c2b5281227f01c3a3f984037d21c5129d07bf3550cc2afc6b10a5", size = 1667144, upload-time = "2025-06-01T11:08:12.563Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/af/0dcccc7fdcdf170f9a1585e5e96b6fb0ba1749ef6be8c89a6202284759bd/celery-5.5.3-py3-none-any.whl", hash = "sha256:0b5761a07057acee94694464ca482416b959568904c9dfa41ce8413a7d65d525", size = 438775, upload-time = "2025-06-01T11:08:09.94Z" }, +] + +[[package]] +name = "certifi" +version = "2025.10.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, + { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, + { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, + { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, + { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, + { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, + { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, + { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/27/c6491ff4954e58a10f69ad90aca8a1b6fe9c5d3c6f380907af3c37435b59/charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8", size = 206988, upload-time = "2025-10-14T04:40:33.79Z" }, + { url = "https://files.pythonhosted.org/packages/94/59/2e87300fe67ab820b5428580a53cad894272dbb97f38a7a814a2a1ac1011/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0", size = 147324, upload-time = "2025-10-14T04:40:34.961Z" }, + { url = "https://files.pythonhosted.org/packages/07/fb/0cf61dc84b2b088391830f6274cb57c82e4da8bbc2efeac8c025edb88772/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3", size = 142742, upload-time = "2025-10-14T04:40:36.105Z" }, + { url = "https://files.pythonhosted.org/packages/62/8b/171935adf2312cd745d290ed93cf16cf0dfe320863ab7cbeeae1dcd6535f/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc", size = 160863, upload-time = "2025-10-14T04:40:37.188Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/ad875b192bda14f2173bfc1bc9a55e009808484a4b256748d931b6948442/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897", size = 157837, upload-time = "2025-10-14T04:40:38.435Z" }, + { url = "https://files.pythonhosted.org/packages/6d/fc/de9cce525b2c5b94b47c70a4b4fb19f871b24995c728e957ee68ab1671ea/charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381", size = 151550, upload-time = "2025-10-14T04:40:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/55/c2/43edd615fdfba8c6f2dfbd459b25a6b3b551f24ea21981e23fb768503ce1/charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815", size = 149162, upload-time = "2025-10-14T04:40:41.163Z" }, + { url = "https://files.pythonhosted.org/packages/03/86/bde4ad8b4d0e9429a4e82c1e8f5c659993a9a863ad62c7df05cf7b678d75/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0", size = 150019, upload-time = "2025-10-14T04:40:42.276Z" }, + { url = "https://files.pythonhosted.org/packages/1f/86/a151eb2af293a7e7bac3a739b81072585ce36ccfb4493039f49f1d3cae8c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161", size = 143310, upload-time = "2025-10-14T04:40:43.439Z" }, + { url = "https://files.pythonhosted.org/packages/b5/fe/43dae6144a7e07b87478fdfc4dbe9efd5defb0e7ec29f5f58a55aeef7bf7/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4", size = 162022, upload-time = "2025-10-14T04:40:44.547Z" }, + { url = "https://files.pythonhosted.org/packages/80/e6/7aab83774f5d2bca81f42ac58d04caf44f0cc2b65fc6db2b3b2e8a05f3b3/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89", size = 149383, upload-time = "2025-10-14T04:40:46.018Z" }, + { url = "https://files.pythonhosted.org/packages/4f/e8/b289173b4edae05c0dde07f69f8db476a0b511eac556dfe0d6bda3c43384/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569", size = 159098, upload-time = "2025-10-14T04:40:47.081Z" }, + { url = "https://files.pythonhosted.org/packages/d8/df/fe699727754cae3f8478493c7f45f777b17c3ef0600e28abfec8619eb49c/charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224", size = 152991, upload-time = "2025-10-14T04:40:48.246Z" }, + { url = "https://files.pythonhosted.org/packages/1a/86/584869fe4ddb6ffa3bd9f491b87a01568797fb9bd8933f557dba9771beaf/charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a", size = 99456, upload-time = "2025-10-14T04:40:49.376Z" }, + { url = "https://files.pythonhosted.org/packages/65/f6/62fdd5feb60530f50f7e38b4f6a1d5203f4d16ff4f9f0952962c044e919a/charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016", size = 106978, upload-time = "2025-10-14T04:40:50.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/9d/0710916e6c82948b3be62d9d398cb4fcf4e97b56d6a6aeccd66c4b2f2bd5/charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1", size = 99969, upload-time = "2025-10-14T04:40:52.272Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, +] + +[[package]] +name = "click" +version = "8.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, +] + +[[package]] +name = "click-didyoumean" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/30/ce/217289b77c590ea1e7c24242d9ddd6e249e52c795ff10fac2c50062c48cb/click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463", size = 3089, upload-time = "2024-03-24T08:22:07.499Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/5b/974430b5ffdb7a4f1941d13d83c64a0395114503cc357c6b9ae4ce5047ed/click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c", size = 3631, upload-time = "2024-03-24T08:22:06.356Z" }, +] + +[[package]] +name = "click-plugins" +version = "1.1.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c3/a4/34847b59150da33690a36da3681d6bbc2ec14ee9a846bc30a6746e5984e4/click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261", size = 8343, upload-time = "2025-06-25T00:47:37.555Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/9a/2abecb28ae875e39c8cad711eb1186d8d14eab564705325e77e4e6ab9ae5/click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6", size = 11051, upload-time = "2025-06-25T00:47:36.731Z" }, +] + +[[package]] +name = "click-repl" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "prompt-toolkit" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cb/a2/57f4ac79838cfae6912f997b4d1a64a858fb0c86d7fcaae6f7b58d267fca/click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9", size = 10449, upload-time = "2023-06-15T12:43:51.141Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/40/9d857001228658f0d59e97ebd4c346fe73e138c6de1bce61dc568a57c7f8/click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812", size = 10289, upload-time = "2023-06-15T12:43:48.626Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "colorlog" +version = "6.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a2/61/f083b5ac52e505dfc1c624eafbf8c7589a0d7f32daa398d2e7590efa5fda/colorlog-6.10.1.tar.gz", hash = "sha256:eb4ae5cb65fe7fec7773c2306061a8e63e02efc2c72eba9d27b0fa23c94f1321", size = 17162, upload-time = "2025-10-16T16:14:11.978Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6d/c1/e419ef3723a074172b68aaa89c9f3de486ed4c2399e2dbd8113a4fdcaf9e/colorlog-6.10.1-py3-none-any.whl", hash = "sha256:2d7e8348291948af66122cff006c9f8da6255d224e7cf8e37d8de2df3bad8c9c", size = 11743, upload-time = "2025-10-16T16:14:10.512Z" }, +] + +[[package]] +name = "configparser" +version = "7.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/ac/ea19242153b5e8be412a726a70e82c7b5c1537c83f61b20995b2eda3dcd7/configparser-7.2.0.tar.gz", hash = "sha256:b629cc8ae916e3afbd36d1b3d093f34193d851e11998920fdcfc4552218b7b70", size = 51273, upload-time = "2025-03-08T16:04:09.339Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/09/fe/f61e7129e9e689d9e40bbf8a36fb90f04eceb477f4617c02c6a18463e81f/configparser-7.2.0-py3-none-any.whl", hash = "sha256:fee5e1f3db4156dcd0ed95bc4edfa3580475537711f67a819c966b389d09ce62", size = 17232, upload-time = "2025-03-08T16:04:07.743Z" }, +] + +[[package]] +name = "coverage" +version = "7.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1c/38/ee22495420457259d2f3390309505ea98f98a5eed40901cf62196abad006/coverage-7.11.0.tar.gz", hash = "sha256:167bd504ac1ca2af7ff3b81d245dfea0292c5032ebef9d66cc08a7d28c1b8050", size = 811905, upload-time = "2025-10-15T15:15:08.542Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/3a/ee1074c15c408ddddddb1db7dd904f6b81bc524e01f5a1c5920e13dbde23/coverage-7.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d58ecaa865c5b9fa56e35efc51d1014d4c0d22838815b9fce57a27dd9576847", size = 215912, upload-time = "2025-10-15T15:12:40.665Z" }, + { url = "https://files.pythonhosted.org/packages/70/c4/9f44bebe5cb15f31608597b037d78799cc5f450044465bcd1ae8cb222fe1/coverage-7.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b679e171f1c104a5668550ada700e3c4937110dbdd153b7ef9055c4f1a1ee3cc", size = 216310, upload-time = "2025-10-15T15:12:42.461Z" }, + { url = "https://files.pythonhosted.org/packages/42/01/5e06077cfef92d8af926bdd86b84fb28bf9bc6ad27343d68be9b501d89f2/coverage-7.11.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca61691ba8c5b6797deb221a0d09d7470364733ea9c69425a640f1f01b7c5bf0", size = 246706, upload-time = "2025-10-15T15:12:44.001Z" }, + { url = "https://files.pythonhosted.org/packages/40/b8/7a3f1f33b35cc4a6c37e759137533119560d06c0cc14753d1a803be0cd4a/coverage-7.11.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:aef1747ede4bd8ca9cfc04cc3011516500c6891f1b33a94add3253f6f876b7b7", size = 248634, upload-time = "2025-10-15T15:12:45.768Z" }, + { url = "https://files.pythonhosted.org/packages/7a/41/7f987eb33de386bc4c665ab0bf98d15fcf203369d6aacae74f5dd8ec489a/coverage-7.11.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1839d08406e4cba2953dcc0ffb312252f14d7c4c96919f70167611f4dee2623", size = 250741, upload-time = "2025-10-15T15:12:47.222Z" }, + { url = "https://files.pythonhosted.org/packages/23/c1/a4e0ca6a4e83069fb8216b49b30a7352061ca0cb38654bd2dc96b7b3b7da/coverage-7.11.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e0eb0a2dcc62478eb5b4cbb80b97bdee852d7e280b90e81f11b407d0b81c4287", size = 246837, upload-time = "2025-10-15T15:12:48.904Z" }, + { url = "https://files.pythonhosted.org/packages/5d/03/ced062a17f7c38b4728ff76c3acb40d8465634b20b4833cdb3cc3a74e115/coverage-7.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc1fbea96343b53f65d5351d8fd3b34fd415a2670d7c300b06d3e14a5af4f552", size = 248429, upload-time = "2025-10-15T15:12:50.73Z" }, + { url = "https://files.pythonhosted.org/packages/97/af/a7c6f194bb8c5a2705ae019036b8fe7f49ea818d638eedb15fdb7bed227c/coverage-7.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:214b622259dd0cf435f10241f1333d32caa64dbc27f8790ab693428a141723de", size = 246490, upload-time = "2025-10-15T15:12:52.646Z" }, + { url = "https://files.pythonhosted.org/packages/ab/c3/aab4df02b04a8fde79068c3c41ad7a622b0ef2b12e1ed154da986a727c3f/coverage-7.11.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:258d9967520cca899695d4eb7ea38be03f06951d6ca2f21fb48b1235f791e601", size = 246208, upload-time = "2025-10-15T15:12:54.586Z" }, + { url = "https://files.pythonhosted.org/packages/30/d8/e282ec19cd658238d60ed404f99ef2e45eed52e81b866ab1518c0d4163cf/coverage-7.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cf9e6ff4ca908ca15c157c409d608da77a56a09877b97c889b98fb2c32b6465e", size = 247126, upload-time = "2025-10-15T15:12:56.485Z" }, + { url = "https://files.pythonhosted.org/packages/d1/17/a635fa07fac23adb1a5451ec756216768c2767efaed2e4331710342a3399/coverage-7.11.0-cp311-cp311-win32.whl", hash = "sha256:fcc15fc462707b0680cff6242c48625da7f9a16a28a41bb8fd7a4280920e676c", size = 218314, upload-time = "2025-10-15T15:12:58.365Z" }, + { url = "https://files.pythonhosted.org/packages/2a/29/2ac1dfcdd4ab9a70026edc8d715ece9b4be9a1653075c658ee6f271f394d/coverage-7.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:865965bf955d92790f1facd64fe7ff73551bd2c1e7e6b26443934e9701ba30b9", size = 219203, upload-time = "2025-10-15T15:12:59.902Z" }, + { url = "https://files.pythonhosted.org/packages/03/21/5ce8b3a0133179115af4c041abf2ee652395837cb896614beb8ce8ddcfd9/coverage-7.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:5693e57a065760dcbeb292d60cc4d0231a6d4b6b6f6a3191561e1d5e8820b745", size = 217879, upload-time = "2025-10-15T15:13:01.35Z" }, + { url = "https://files.pythonhosted.org/packages/5f/04/642c1d8a448ae5ea1369eac8495740a79eb4e581a9fb0cbdce56bbf56da1/coverage-7.11.0-py3-none-any.whl", hash = "sha256:4b7589765348d78fb4e5fb6ea35d07564e387da2fc5efff62e0222971f155f68", size = 207761, upload-time = "2025-10-15T15:15:06.439Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "cryptography" +version = "46.0.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" }, + { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, + { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, + { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, + { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, +] + +[[package]] +name = "dataclasses-json" +version = "0.6.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "marshmallow" }, + { name = "typing-inspect" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/64/a4/f71d9cf3a5ac257c993b5ca3f93df5f7fb395c725e7f1e6479d2514173c3/dataclasses_json-0.6.7.tar.gz", hash = "sha256:b6b3e528266ea45b9535223bc53ca645f5208833c29229e847b3f26a1cc55fc0", size = 32227, upload-time = "2024-06-09T16:20:19.103Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/be/d0d44e092656fe7a06b55e6103cbce807cdbdee17884a5367c68c9860853/dataclasses_json-0.6.7-py3-none-any.whl", hash = "sha256:0dbf33f26c8d5305befd61b39d2b3414e8a407bedc2834dea9b8d642666fb40a", size = 28686, upload-time = "2024-06-09T16:20:16.715Z" }, +] + +[[package]] +name = "dembrane" +version = "0.0.1" +source = { virtual = "." } +dependencies = [ + { name = "aiofiles" }, + { name = "aiohttp" }, + { name = "alembic" }, + { name = "anthropic" }, + { name = "apscheduler" }, + { name = "asyncpg" }, + { name = "backoff" }, + { name = "boto3" }, + { name = "colorlog" }, + { name = "configparser" }, + { name = "directus-py-sdk" }, + { name = "dramatiq", extra = ["redis", "watch"] }, + { name = "dramatiq-workflow" }, + { name = "fastapi" }, + { name = "ffmpeg-python" }, + { name = "flower" }, + { name = "gevent" }, + { name = "google-cloud-aiplatform" }, + { name = "gunicorn" }, + { name = "isort" }, + { name = "jinja2" }, + { name = "langchain" }, + { name = "langchain-community" }, + { name = "langchain-experimental" }, + { name = "langchain-openai" }, + { name = "lightrag-dembrane" }, + { name = "litellm" }, + { name = "lz4" }, + { name = "mypy" }, + { name = "neo4j" }, + { name = "nest-asyncio" }, + { name = "networkx" }, + { name = "numpy" }, + { name = "openai" }, + { name = "pandas" }, + { name = "pandas-stubs" }, + { name = "pgvector" }, + { name = "pipmaster" }, + { name = "psycopg", extra = ["binary", "pool"] }, + { name = "pydantic" }, + { name = "pydub" }, + { name = "pylance" }, + { name = "pypdf" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "pytest-mock" }, + { name = "pytest-xdist" }, + { name = "python-dotenv" }, + { name = "python-jose", extra = ["cryptography"] }, + { name = "python-multipart" }, + { name = "pyyaml" }, + { name = "ruff" }, + { name = "scikit-learn" }, + { name = "sentry-dramatiq" }, + { name = "sentry-sdk" }, + { name = "setuptools" }, + { name = "sqlalchemy" }, + { name = "tenacity" }, + { name = "tiktoken" }, + { name = "types-aiofiles" }, + { name = "types-python-jose" }, + { name = "types-requests" }, + { name = "types-tqdm" }, + { name = "uvicorn", extra = ["standard"] }, +] + +[package.metadata] +requires-dist = [ + { name = "aiofiles", specifier = "==23.2.*" }, + { name = "aiohttp", specifier = "==3.11.14" }, + { name = "alembic", specifier = "==1.13.*" }, + { name = "anthropic", specifier = "==0.43.*" }, + { name = "apscheduler", specifier = "==3.11.*" }, + { name = "asyncpg", specifier = "==0.30.0" }, + { name = "backoff", specifier = "==2.2.*" }, + { name = "boto3", specifier = "==1.37.*" }, + { name = "colorlog", specifier = ">=6.9.0" }, + { name = "configparser", specifier = "==7.2.0" }, + { name = "directus-py-sdk", specifier = "==1.1.1" }, + { name = "dramatiq", extras = ["redis", "watch"], specifier = "==1.17.*" }, + { name = "dramatiq-workflow", specifier = "==0.2.*" }, + { name = "fastapi", specifier = "==0.109.*" }, + { name = "ffmpeg-python", specifier = ">=0.2.0" }, + { name = "flower", specifier = ">=2.0.1" }, + { name = "gevent", specifier = ">=25.4.2" }, + { name = "google-cloud-aiplatform", specifier = "==1.120.*" }, + { name = "gunicorn", specifier = "==21.2.*" }, + { name = "isort", specifier = "==5.13.*" }, + { name = "jinja2", specifier = "==3.1.*" }, + { name = "langchain", specifier = "==0.1.*" }, + { name = "langchain-community", specifier = "==0.0.*" }, + { name = "langchain-experimental", specifier = "==0.0.*" }, + { name = "langchain-openai", specifier = "==0.0.*" }, + { name = "lightrag-dembrane", specifier = "==1.2.7.8" }, + { name = "litellm", specifier = "==1.76.*" }, + { name = "lz4", specifier = "==4.4.*" }, + { name = "mypy", specifier = ">=1.16.0" }, + { name = "neo4j", specifier = "==5.28.1" }, + { name = "nest-asyncio", specifier = "==1.6.0" }, + { name = "networkx", specifier = "==3.4.*" }, + { name = "numpy", specifier = "==1.26.*" }, + { name = "openai", specifier = "==1.99.*" }, + { name = "pandas", specifier = "==2.2.*" }, + { name = "pandas-stubs", specifier = ">=2.2.2.240514" }, + { name = "pgvector", specifier = "==0.2.*" }, + { name = "pipmaster", specifier = "==0.5.1" }, + { name = "psycopg", extras = ["binary", "pool"], specifier = "==3.1.*" }, + { name = "pydantic", specifier = "==2.10.6" }, + { name = "pydub", specifier = "==0.25.1" }, + { name = "pylance", specifier = ">=0.30.0" }, + { name = "pypdf", specifier = "==4.0.*" }, + { name = "pytest" }, + { name = "pytest", specifier = "==8.3.4" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "pytest-mock" }, + { name = "pytest-xdist", specifier = ">=3.6.1" }, + { name = "python-dotenv", specifier = "==1.0.*" }, + { name = "python-jose", extras = ["cryptography"], specifier = ">=3.3.0" }, + { name = "python-multipart", specifier = "==0.0.*" }, + { name = "pyyaml", specifier = "==6.0.2" }, + { name = "ruff", specifier = ">=0.11.12" }, + { name = "scikit-learn", specifier = "==1.4.*" }, + { name = "sentry-dramatiq", specifier = "==0.3.*" }, + { name = "sentry-sdk", specifier = "==2.2.1" }, + { name = "setuptools", specifier = "==75.8.0" }, + { name = "sqlalchemy", specifier = "==2.0.*" }, + { name = "tenacity", specifier = "==8.3.*" }, + { name = "tiktoken", specifier = "==0.9.0" }, + { name = "types-aiofiles", specifier = "==23.2.*" }, + { name = "types-python-jose", specifier = ">=3.3.4.20240106" }, + { name = "types-requests", specifier = ">=2.32.0.20240602" }, + { name = "types-tqdm", specifier = ">4" }, + { name = "uvicorn", extras = ["standard"], specifier = "==0.27.*" }, +] + +[[package]] +name = "directus-py-sdk" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, + { name = "sqlparse" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/81/2b/5a8c3722d9947a26bf9af6ae2f1ca25d8c2199cc2e0038aa348f9b8aec62/directus_py_sdk-1.1.1.tar.gz", hash = "sha256:d20d6503c01fcba09d342a5348e2039399085ae0212dd9b5c634ca60628fe542", size = 16689, upload-time = "2025-07-29T13:54:47.441Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/1c/e210bd60fca009a18ec296955f7634af45fc10eaba3fd2a899e4ed2d1f82/directus_py_sdk-1.1.1-py3-none-any.whl", hash = "sha256:a199e2aba6bac3d46801cb014bc78c73a63d59da2dd54ba65d9aed54fc0687cb", size = 14717, upload-time = "2025-07-29T13:54:46.004Z" }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, +] + +[[package]] +name = "docstring-parser" +version = "0.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, +] + +[[package]] +name = "dramatiq" +version = "1.17.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "prometheus-client" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/7a/6792ddc64a77d22bfd97261b751a7a76cf2f9d62edc59aafb679ac48b77d/dramatiq-1.17.1.tar.gz", hash = "sha256:2675d2f57e0d82db3a7d2a60f1f9c536365349db78c7f8d80a63e4c54697647a", size = 99071, upload-time = "2024-10-26T05:09:28.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/36/925c7afd5db4f1a3f00676b9c3c58f31ff7ae29a347282d86c8d429280a5/dramatiq-1.17.1-py3-none-any.whl", hash = "sha256:951cdc334478dff8e5150bb02a6f7a947d215ee24b5aedaf738eff20e17913df", size = 120382, upload-time = "2024-10-26T05:09:26.436Z" }, +] + +[package.optional-dependencies] +redis = [ + { name = "redis" }, +] +watch = [ + { name = "watchdog" }, + { name = "watchdog-gevent" }, +] + +[[package]] +name = "dramatiq-workflow" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dramatiq" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5f/76/37aac2a78b554f26c5433316f8c1372eaa04ee1ec7cca88211b58566dd17/dramatiq_workflow-0.2.0.tar.gz", hash = "sha256:ae5b9774510abe59f52517b01b46e4c1c3f87f39a24baf64171669cfaf466566", size = 14058, upload-time = "2024-12-20T21:21:26.386Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/84/9c6fadd9645e773909c05ea9727535563f66c8505d6bc487196584283505/dramatiq_workflow-0.2.0-py3-none-any.whl", hash = "sha256:48e3c39cf6fc797dec4ad8360b15c82e222b25ea55450c8cc6a659b4709c55f1", size = 15126, upload-time = "2024-12-20T21:21:21.926Z" }, +] + +[[package]] +name = "ecdsa" +version = "0.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/1f/924e3caae75f471eae4b26bd13b698f6af2c44279f67af317439c2f4c46a/ecdsa-0.19.1.tar.gz", hash = "sha256:478cba7b62555866fcb3bb3fe985e06decbdb68ef55713c4e5ab98c57d508e61", size = 201793, upload-time = "2025-03-13T11:52:43.25Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/a3/460c57f094a4a165c84a1341c373b0a4f5ec6ac244b998d5021aade89b77/ecdsa-0.19.1-py2.py3-none-any.whl", hash = "sha256:30638e27cf77b7e15c4c4cc1973720149e1033827cfd00661ca5c8cc0cdb24c3", size = 150607, upload-time = "2025-03-13T11:52:41.757Z" }, +] + +[[package]] +name = "execnet" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bb/ff/b4c0dc78fbe20c3e59c0c7334de0c27eb4001a2b2017999af398bf730817/execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3", size = 166524, upload-time = "2024-04-08T09:04:19.245Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/09/2aea36ff60d16dd8879bdb2f5b3ee0ba8d08cbbdcdfe870e695ce3784385/execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc", size = 40612, upload-time = "2024-04-08T09:04:17.414Z" }, +] + +[[package]] +name = "fastapi" +version = "0.109.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/01/d5/33a8992fe0e811211cd1cbc219cefa4732f9fb0555921346a59d1fec0040/fastapi-0.109.2.tar.gz", hash = "sha256:f3817eac96fe4f65a2ebb4baa000f394e55f5fccdaf7f75250804bc58f354f73", size = 11720963, upload-time = "2024-02-04T21:26:10.672Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/97/60351307ab4502908d29f64f2801a36709a3f1888447bb328bc373d6ca0e/fastapi-0.109.2-py3-none-any.whl", hash = "sha256:2c9bab24667293b501cad8dd388c05240c850b58ec5876ee3283c47d6e1e3a4d", size = 92071, upload-time = "2024-02-04T21:26:07.478Z" }, +] + +[[package]] +name = "fastuuid" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/7d/d9daedf0f2ebcacd20d599928f8913e9d2aea1d56d2d355a93bfa2b611d7/fastuuid-0.14.0.tar.gz", hash = "sha256:178947fc2f995b38497a74172adee64fdeb8b7ec18f2a5934d037641ba265d26", size = 18232, upload-time = "2025-10-19T22:19:22.402Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/f3/12481bda4e5b6d3e698fbf525df4443cc7dce746f246b86b6fcb2fba1844/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:73946cb950c8caf65127d4e9a325e2b6be0442a224fd51ba3b6ac44e1912ce34", size = 516386, upload-time = "2025-10-19T22:42:40.176Z" }, + { url = "https://files.pythonhosted.org/packages/59/19/2fc58a1446e4d72b655648eb0879b04e88ed6fa70d474efcf550f640f6ec/fastuuid-0.14.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:12ac85024637586a5b69645e7ed986f7535106ed3013640a393a03e461740cb7", size = 264569, upload-time = "2025-10-19T22:25:50.977Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/3c74756e5b02c40cfcc8b1d8b5bac4edbd532b55917a6bcc9113550e99d1/fastuuid-0.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:05a8dde1f395e0c9b4be515b7a521403d1e8349443e7641761af07c7ad1624b1", size = 254366, upload-time = "2025-10-19T22:29:49.166Z" }, + { url = "https://files.pythonhosted.org/packages/52/96/d761da3fccfa84f0f353ce6e3eb8b7f76b3aa21fd25e1b00a19f9c80a063/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09378a05020e3e4883dfdab438926f31fea15fd17604908f3d39cbeb22a0b4dc", size = 278978, upload-time = "2025-10-19T22:35:41.306Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c2/f84c90167cc7765cb82b3ff7808057608b21c14a38531845d933a4637307/fastuuid-0.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbb0c4b15d66b435d2538f3827f05e44e2baafcc003dd7d8472dc67807ab8fd8", size = 279692, upload-time = "2025-10-19T22:25:36.997Z" }, + { url = "https://files.pythonhosted.org/packages/af/7b/4bacd03897b88c12348e7bd77943bac32ccf80ff98100598fcff74f75f2e/fastuuid-0.14.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cd5a7f648d4365b41dbf0e38fe8da4884e57bed4e77c83598e076ac0c93995e7", size = 303384, upload-time = "2025-10-19T22:29:46.578Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a2/584f2c29641df8bd810d00c1f21d408c12e9ad0c0dafdb8b7b29e5ddf787/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c0a94245afae4d7af8c43b3159d5e3934c53f47140be0be624b96acd672ceb73", size = 460921, upload-time = "2025-10-19T22:36:42.006Z" }, + { url = "https://files.pythonhosted.org/packages/24/68/c6b77443bb7764c760e211002c8638c0c7cce11cb584927e723215ba1398/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2b29e23c97e77c3a9514d70ce343571e469098ac7f5a269320a0f0b3e193ab36", size = 480575, upload-time = "2025-10-19T22:28:18.975Z" }, + { url = "https://files.pythonhosted.org/packages/5a/87/93f553111b33f9bb83145be12868c3c475bf8ea87c107063d01377cc0e8e/fastuuid-0.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1e690d48f923c253f28151b3a6b4e335f2b06bf669c68a02665bc150b7839e94", size = 452317, upload-time = "2025-10-19T22:25:32.75Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8c/a04d486ca55b5abb7eaa65b39df8d891b7b1635b22db2163734dc273579a/fastuuid-0.14.0-cp311-cp311-win32.whl", hash = "sha256:a6f46790d59ab38c6aa0e35c681c0484b50dc0acf9e2679c005d61e019313c24", size = 154804, upload-time = "2025-10-19T22:24:15.615Z" }, + { url = "https://files.pythonhosted.org/packages/9c/b2/2d40bf00820de94b9280366a122cbaa60090c8cf59e89ac3938cf5d75895/fastuuid-0.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:e150eab56c95dc9e3fefc234a0eedb342fac433dacc273cd4d150a5b0871e1fa", size = 156099, upload-time = "2025-10-19T22:24:31.646Z" }, +] + +[[package]] +name = "ffmpeg-python" +version = "0.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "future" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dd/5e/d5f9105d59c1325759d838af4e973695081fbbc97182baf73afc78dec266/ffmpeg-python-0.2.0.tar.gz", hash = "sha256:65225db34627c578ef0e11c8b1eb528bb35e024752f6f10b78c011f6f64c4127", size = 21543, upload-time = "2019-07-06T00:19:08.989Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/0c/56be52741f75bad4dc6555991fabd2e07b432d333da82c11ad701123888a/ffmpeg_python-0.2.0-py3-none-any.whl", hash = "sha256:ac441a0404e053f8b6a1113a77c0f452f1cfc62f6344a769475ffdc0f56c23c5", size = 25024, upload-time = "2019-07-06T00:19:07.215Z" }, +] + +[[package]] +name = "filelock" +version = "3.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/46/0028a82567109b5ef6e4d2a1f04a583fb513e6cf9527fcdd09afd817deeb/filelock-3.20.0.tar.gz", hash = "sha256:711e943b4ec6be42e1d4e6690b48dc175c822967466bb31c0c293f34334c13f4", size = 18922, upload-time = "2025-10-08T18:03:50.056Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/91/7216b27286936c16f5b4d0c530087e4a54eead683e6b0b73dd0c64844af6/filelock-3.20.0-py3-none-any.whl", hash = "sha256:339b4732ffda5cd79b13f4e2711a31b0365ce445d95d243bb996273d072546a2", size = 16054, upload-time = "2025-10-08T18:03:48.35Z" }, +] + +[[package]] +name = "flower" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "celery" }, + { name = "humanize" }, + { name = "prometheus-client" }, + { name = "pytz" }, + { name = "tornado" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/a1/357f1b5d8946deafdcfdd604f51baae9de10aafa2908d0b7322597155f92/flower-2.0.1.tar.gz", hash = "sha256:5ab717b979530770c16afb48b50d2a98d23c3e9fe39851dcf6bc4d01845a02a0", size = 3220408, upload-time = "2023-08-13T14:37:46.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/ff/ee2f67c0ff146ec98b5df1df637b2bc2d17beeb05df9f427a67bd7a7d79c/flower-2.0.1-py2.py3-none-any.whl", hash = "sha256:9db2c621eeefbc844c8dd88be64aef61e84e2deb29b271e02ab2b5b9f01068e2", size = 383553, upload-time = "2023-08-13T14:37:41.552Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, + { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, + { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, + { url = "https://files.pythonhosted.org/packages/11/b1/71a477adc7c36e5fb628245dfbdea2166feae310757dea848d02bd0689fd/frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f", size = 231067, upload-time = "2025-10-06T05:35:49.97Z" }, + { url = "https://files.pythonhosted.org/packages/45/7e/afe40eca3a2dc19b9904c0f5d7edfe82b5304cb831391edec0ac04af94c2/frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695", size = 233160, upload-time = "2025-10-06T05:35:51.729Z" }, + { url = "https://files.pythonhosted.org/packages/a6/aa/7416eac95603ce428679d273255ffc7c998d4132cfae200103f164b108aa/frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52", size = 228544, upload-time = "2025-10-06T05:35:53.246Z" }, + { url = "https://files.pythonhosted.org/packages/8b/3d/2a2d1f683d55ac7e3875e4263d28410063e738384d3adc294f5ff3d7105e/frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581", size = 243797, upload-time = "2025-10-06T05:35:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/78/1e/2d5565b589e580c296d3bb54da08d206e797d941a83a6fdea42af23be79c/frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567", size = 247923, upload-time = "2025-10-06T05:35:55.861Z" }, + { url = "https://files.pythonhosted.org/packages/aa/c3/65872fcf1d326a7f101ad4d86285c403c87be7d832b7470b77f6d2ed5ddc/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b", size = 230886, upload-time = "2025-10-06T05:35:57.399Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/ac9ced601d62f6956f03cc794f9e04c81719509f85255abf96e2510f4265/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92", size = 245731, upload-time = "2025-10-06T05:35:58.563Z" }, + { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, + { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, + { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, + { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, + { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, +] + +[[package]] +name = "fsspec" +version = "2025.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/7f/2747c0d332b9acfa75dc84447a066fdf812b5a6b8d30472b74d309bfe8cb/fsspec-2025.10.0.tar.gz", hash = "sha256:b6789427626f068f9a83ca4e8a3cc050850b6c0f71f99ddb4f542b8266a26a59", size = 309285, upload-time = "2025-10-30T14:58:44.036Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/02/a6b21098b1d5d6249b7c5ab69dde30108a71e4e819d4a9778f1de1d5b70d/fsspec-2025.10.0-py3-none-any.whl", hash = "sha256:7c7712353ae7d875407f97715f0e1ffcc21e33d5b24556cb1e090ae9409ec61d", size = 200966, upload-time = "2025-10-30T14:58:42.53Z" }, +] + +[[package]] +name = "future" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/b2/4140c69c6a66432916b26158687e821ba631a4c9273c474343badf84d3ba/future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05", size = 1228490, upload-time = "2024-02-21T11:52:38.461Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/71/ae30dadffc90b9006d77af76b393cb9dfbfc9629f339fc1574a1c52e6806/future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216", size = 491326, upload-time = "2024-02-21T11:52:35.956Z" }, +] + +[[package]] +name = "gevent" +version = "25.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation == 'CPython' and sys_platform == 'win32'" }, + { name = "greenlet", marker = "platform_python_implementation == 'CPython'" }, + { name = "zope-event" }, + { name = "zope-interface" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/48/b3ef2673ffb940f980966694e40d6d32560f3ffa284ecaeb5ea3a90a6d3f/gevent-25.9.1.tar.gz", hash = "sha256:adf9cd552de44a4e6754c51ff2e78d9193b7fa6eab123db9578a210e657235dd", size = 5059025, upload-time = "2025-09-17T16:15:34.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/86/03f8db0704fed41b0fa830425845f1eb4e20c92efa3f18751ee17809e9c6/gevent-25.9.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:18e5aff9e8342dc954adb9c9c524db56c2f3557999463445ba3d9cbe3dada7b7", size = 1792418, upload-time = "2025-09-17T15:41:24.384Z" }, + { url = "https://files.pythonhosted.org/packages/5f/35/f6b3a31f0849a62cfa2c64574bcc68a781d5499c3195e296e892a121a3cf/gevent-25.9.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1cdf6db28f050ee103441caa8b0448ace545364f775059d5e2de089da975c457", size = 1875700, upload-time = "2025-09-17T15:48:59.652Z" }, + { url = "https://files.pythonhosted.org/packages/66/1e/75055950aa9b48f553e061afa9e3728061b5ccecca358cef19166e4ab74a/gevent-25.9.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:812debe235a8295be3b2a63b136c2474241fa5c58af55e6a0f8cfc29d4936235", size = 1831365, upload-time = "2025-09-17T15:49:19.426Z" }, + { url = "https://files.pythonhosted.org/packages/31/e8/5c1f6968e5547e501cfa03dcb0239dff55e44c3660a37ec534e32a0c008f/gevent-25.9.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b28b61ff9216a3d73fe8f35669eefcafa957f143ac534faf77e8a19eb9e6883a", size = 2122087, upload-time = "2025-09-17T15:15:12.329Z" }, + { url = "https://files.pythonhosted.org/packages/c0/2c/ebc5d38a7542af9fb7657bfe10932a558bb98c8a94e4748e827d3823fced/gevent-25.9.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5e4b6278b37373306fc6b1e5f0f1cf56339a1377f67c35972775143d8d7776ff", size = 1808776, upload-time = "2025-09-17T15:52:40.16Z" }, + { url = "https://files.pythonhosted.org/packages/e6/26/e1d7d6c8ffbf76fe1fbb4e77bdb7f47d419206adc391ec40a8ace6ebbbf0/gevent-25.9.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d99f0cb2ce43c2e8305bf75bee61a8bde06619d21b9d0316ea190fc7a0620a56", size = 2179141, upload-time = "2025-09-17T15:24:09.895Z" }, + { url = "https://files.pythonhosted.org/packages/1d/6c/bb21fd9c095506aeeaa616579a356aa50935165cc0f1e250e1e0575620a7/gevent-25.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:72152517ecf548e2f838c61b4be76637d99279dbaa7e01b3924df040aa996586", size = 1677941, upload-time = "2025-09-17T19:59:50.185Z" }, +] + +[[package]] +name = "google-api-core" +version = "2.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-auth" }, + { name = "googleapis-common-protos" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/da/83d7043169ac2c8c7469f0e375610d78ae2160134bf1b80634c482fa079c/google_api_core-2.28.1.tar.gz", hash = "sha256:2b405df02d68e68ce0fbc138559e6036559e685159d148ae5861013dc201baf8", size = 176759, upload-time = "2025-10-28T21:34:51.529Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/d4/90197b416cb61cefd316964fd9e7bd8324bcbafabf40eef14a9f20b81974/google_api_core-2.28.1-py3-none-any.whl", hash = "sha256:4021b0f8ceb77a6fb4de6fde4502cecab45062e66ff4f2895169e0b35bc9466c", size = 173706, upload-time = "2025-10-28T21:34:50.151Z" }, +] + +[package.optional-dependencies] +grpc = [ + { name = "grpcio" }, + { name = "grpcio-status" }, +] + +[[package]] +name = "google-auth" +version = "2.42.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cachetools" }, + { name = "pyasn1-modules" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/25/6b/22a77135757c3a7854c9f008ffed6bf4e8851616d77faf13147e9ab5aae6/google_auth-2.42.1.tar.gz", hash = "sha256:30178b7a21aa50bffbdc1ffcb34ff770a2f65c712170ecd5446c4bef4dc2b94e", size = 295541, upload-time = "2025-10-30T16:42:19.381Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/92/05/adeb6c495aec4f9d93f9e2fc29eeef6e14d452bba11d15bdb874ce1d5b10/google_auth-2.42.1-py2.py3-none-any.whl", hash = "sha256:eb73d71c91fc95dbd221a2eb87477c278a355e7367a35c0d84e6b0e5f9b4ad11", size = 222550, upload-time = "2025-10-30T16:42:17.878Z" }, +] + +[[package]] +name = "google-cloud-aiplatform" +version = "1.120.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docstring-parser" }, + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "google-cloud-bigquery" }, + { name = "google-cloud-resource-manager" }, + { name = "google-cloud-storage" }, + { name = "google-genai" }, + { name = "packaging" }, + { name = "proto-plus" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "shapely" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1f/d0/baae0ba4bf0b2ad1c408562f8e0943c7aa43a3d7ff479a0aa1db11f03108/google_cloud_aiplatform-1.120.0.tar.gz", hash = "sha256:56d28ced011fe0935ddf42762da6758ad8beae057f470e7e87200ef50c4deaaa", size = 9674037, upload-time = "2025-10-08T18:02:31.841Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/fd/a2bf59444a59bf6a88c0f22921126616f22ae8f8acac780d701ccef78fd1/google_cloud_aiplatform-1.120.0-py2.py3-none-any.whl", hash = "sha256:cbceda8bdd642fc2181096c5cb035feb3586c579abe97ae8a6de9cb0a40ec72c", size = 8043461, upload-time = "2025-10-08T18:02:28.693Z" }, +] + +[[package]] +name = "google-cloud-bigquery" +version = "3.30.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "google-resumable-media" }, + { name = "packaging" }, + { name = "python-dateutil" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/2f/3dda76b3ec029578838b1fe6396e6b86eb574200352240e23dea49265bb7/google_cloud_bigquery-3.30.0.tar.gz", hash = "sha256:7e27fbafc8ed33cc200fe05af12ecd74d279fe3da6692585a3cef7aee90575b6", size = 474389, upload-time = "2025-02-27T18:49:45.416Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/6d/856a6ca55c1d9d99129786c929a27dd9d31992628ebbff7f5d333352981f/google_cloud_bigquery-3.30.0-py2.py3-none-any.whl", hash = "sha256:f4d28d846a727f20569c9b2d2f4fa703242daadcb2ec4240905aa485ba461877", size = 247885, upload-time = "2025-02-27T18:49:43.454Z" }, +] + +[[package]] +name = "google-cloud-core" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/03/ef0bc99d0e0faf4fdbe67ac445e18cdaa74824fd93cd069e7bb6548cb52d/google_cloud_core-2.5.0.tar.gz", hash = "sha256:7c1b7ef5c92311717bd05301aa1a91ffbc565673d3b0b4163a52d8413a186963", size = 36027, upload-time = "2025-10-29T23:17:39.513Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/20/bfa472e327c8edee00f04beecc80baeddd2ab33ee0e86fd7654da49d45e9/google_cloud_core-2.5.0-py3-none-any.whl", hash = "sha256:67d977b41ae6c7211ee830c7912e41003ea8194bff15ae7d72fd6f51e57acabc", size = 29469, upload-time = "2025-10-29T23:17:38.548Z" }, +] + +[[package]] +name = "google-cloud-resource-manager" +version = "1.15.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core", extra = ["grpc"] }, + { name = "google-auth" }, + { name = "grpc-google-iam-v1" }, + { name = "grpcio" }, + { name = "proto-plus" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/19/b95d0e8814ce42522e434cdd85c0cb6236d874d9adf6685fc8e6d1fda9d1/google_cloud_resource_manager-1.15.0.tar.gz", hash = "sha256:3d0b78c3daa713f956d24e525b35e9e9a76d597c438837171304d431084cedaf", size = 449227, upload-time = "2025-10-20T14:57:01.108Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/93/5aef41a5f146ad4559dd7040ae5fa8e7ddcab4dfadbef6cb4b66d775e690/google_cloud_resource_manager-1.15.0-py3-none-any.whl", hash = "sha256:0ccde5db644b269ddfdf7b407a2c7b60bdbf459f8e666344a5285601d00c7f6d", size = 397151, upload-time = "2025-10-20T14:53:45.409Z" }, +] + +[[package]] +name = "google-cloud-storage" +version = "2.19.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-api-core" }, + { name = "google-auth" }, + { name = "google-cloud-core" }, + { name = "google-crc32c" }, + { name = "google-resumable-media" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/36/76/4d965702e96bb67976e755bed9828fa50306dca003dbee08b67f41dd265e/google_cloud_storage-2.19.0.tar.gz", hash = "sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2", size = 5535488, upload-time = "2024-12-05T01:35:06.49Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/94/6db383d8ee1adf45dc6c73477152b82731fa4c4a46d9c1932cc8757e0fd4/google_cloud_storage-2.19.0-py2.py3-none-any.whl", hash = "sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba", size = 131787, upload-time = "2024-12-05T01:35:04.736Z" }, +] + +[[package]] +name = "google-crc32c" +version = "1.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/ae/87802e6d9f9d69adfaedfcfd599266bf386a54d0be058b532d04c794f76d/google_crc32c-1.7.1.tar.gz", hash = "sha256:2bff2305f98846f3e825dbeec9ee406f89da7962accdb29356e4eadc251bd472", size = 14495, upload-time = "2025-03-26T14:29:13.32Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/94/220139ea87822b6fdfdab4fb9ba81b3fff7ea2c82e2af34adc726085bffc/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:6fbab4b935989e2c3610371963ba1b86afb09537fd0c633049be82afe153ac06", size = 30468, upload-time = "2025-03-26T14:32:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/94/97/789b23bdeeb9d15dc2904660463ad539d0318286d7633fe2760c10ed0c1c/google_crc32c-1.7.1-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:ed66cbe1ed9cbaaad9392b5259b3eba4a9e565420d734e6238813c428c3336c9", size = 30313, upload-time = "2025-03-26T14:57:38.758Z" }, + { url = "https://files.pythonhosted.org/packages/81/b8/976a2b843610c211e7ccb3e248996a61e87dbb2c09b1499847e295080aec/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee6547b657621b6cbed3562ea7826c3e11cab01cd33b74e1f677690652883e77", size = 33048, upload-time = "2025-03-26T14:41:30.679Z" }, + { url = "https://files.pythonhosted.org/packages/c9/16/a3842c2cf591093b111d4a5e2bfb478ac6692d02f1b386d2a33283a19dc9/google_crc32c-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d68e17bad8f7dd9a49181a1f5a8f4b251c6dbc8cc96fb79f1d321dfd57d66f53", size = 32669, upload-time = "2025-03-26T14:41:31.432Z" }, + { url = "https://files.pythonhosted.org/packages/04/17/ed9aba495916fcf5fe4ecb2267ceb851fc5f273c4e4625ae453350cfd564/google_crc32c-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:6335de12921f06e1f774d0dd1fbea6bf610abe0887a1638f64d694013138be5d", size = 33476, upload-time = "2025-03-26T14:29:10.211Z" }, + { url = "https://files.pythonhosted.org/packages/16/1b/1693372bf423ada422f80fd88260dbfd140754adb15cbc4d7e9a68b1cb8e/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85fef7fae11494e747c9fd1359a527e5970fc9603c90764843caabd3a16a0a48", size = 28241, upload-time = "2025-03-26T14:41:45.898Z" }, + { url = "https://files.pythonhosted.org/packages/fd/3c/2a19a60a473de48717b4efb19398c3f914795b64a96cf3fbe82588044f78/google_crc32c-1.7.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6efb97eb4369d52593ad6f75e7e10d053cf00c48983f7a973105bc70b0ac4d82", size = 28048, upload-time = "2025-03-26T14:41:46.696Z" }, +] + +[[package]] +name = "google-genai" +version = "1.47.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "google-auth" }, + { name = "httpx" }, + { name = "pydantic" }, + { name = "requests" }, + { name = "tenacity" }, + { name = "typing-extensions" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/97/784fba9bc6c41263ff90cb9063eadfdd755dde79cfa5a8d0e397b067dcf9/google_genai-1.47.0.tar.gz", hash = "sha256:ecece00d0a04e6739ea76cc8dad82ec9593d9380aaabef078990e60574e5bf59", size = 241471, upload-time = "2025-10-29T22:01:02.88Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/ef/e080e8d67c270ea320956bb911a9359664fc46d3b87d1f029decd33e5c4c/google_genai-1.47.0-py3-none-any.whl", hash = "sha256:e3851237556cbdec96007d8028b4b1f2425cdc5c099a8dc36b72a57e42821b60", size = 241506, upload-time = "2025-10-29T22:01:00.982Z" }, +] + +[[package]] +name = "google-resumable-media" +version = "2.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "google-crc32c" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/5a/0efdc02665dca14e0837b62c8a1a93132c264bd02054a15abb2218afe0ae/google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0", size = 2163099, upload-time = "2024-08-07T22:20:38.555Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/35/b8d3baf8c46695858cb9d8835a53baa1eeb9906ddaf2f728a5f5b640fd1e/google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa", size = 81251, upload-time = "2024-08-07T22:20:36.409Z" }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.71.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/30/43/b25abe02db2911397819003029bef768f68a974f2ece483e6084d1a5f754/googleapis_common_protos-1.71.0.tar.gz", hash = "sha256:1aec01e574e29da63c80ba9f7bbf1ccfaacf1da877f23609fe236ca7c72a2e2e", size = 146454, upload-time = "2025-10-20T14:58:08.732Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/e8/eba9fece11d57a71e3e22ea672742c8f3cf23b35730c9e96db768b295216/googleapis_common_protos-1.71.0-py3-none-any.whl", hash = "sha256:59034a1d849dc4d18971997a72ac56246570afdd17f9369a0ff68218d50ab78c", size = 294576, upload-time = "2025-10-20T14:56:21.295Z" }, +] + +[package.optional-dependencies] +grpc = [ + { name = "grpcio" }, +] + +[[package]] +name = "greenlet" +version = "3.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260, upload-time = "2025-08-07T13:24:33.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, + { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/95d48d7e3d433e6dae5b1682e4292242a53f22df82e6d3dda81b1701a960/greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3", size = 644646, upload-time = "2025-08-07T13:45:26.523Z" }, + { url = "https://files.pythonhosted.org/packages/d5/5e/405965351aef8c76b8ef7ad370e5da58d57ef6068df197548b015464001a/greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633", size = 640519, upload-time = "2025-08-07T13:53:13.928Z" }, + { url = "https://files.pythonhosted.org/packages/25/5d/382753b52006ce0218297ec1b628e048c4e64b155379331f25a7316eb749/greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079", size = 639707, upload-time = "2025-08-07T13:18:27.146Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, + { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, + { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" }, +] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.14.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos", extra = ["grpc"] }, + { name = "grpcio" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/1e/1011451679a983f2f5c6771a1682542ecb027776762ad031fd0d7129164b/grpc_google_iam_v1-0.14.3.tar.gz", hash = "sha256:879ac4ef33136c5491a6300e27575a9ec760f6cdf9a2518798c1b8977a5dc389", size = 23745, upload-time = "2025-10-15T21:14:53.318Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/bd/330a1bbdb1afe0b96311249e699b6dc9cfc17916394fd4503ac5aca2514b/grpc_google_iam_v1-0.14.3-py3-none-any.whl", hash = "sha256:7a7f697e017a067206a3dfef44e4c634a34d3dee135fe7d7a4613fe3e59217e6", size = 32690, upload-time = "2025-10-15T21:14:51.72Z" }, +] + +[[package]] +name = "grpcio" +version = "1.76.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/e0/318c1ce3ae5a17894d5791e87aea147587c9e702f24122cc7a5c8bbaeeb1/grpcio-1.76.0.tar.gz", hash = "sha256:7be78388d6da1a25c0d5ec506523db58b18be22d9c37d8d3a32c08be4987bd73", size = 12785182, upload-time = "2025-10-21T16:23:12.106Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/00/8163a1beeb6971f66b4bbe6ac9457b97948beba8dd2fc8e1281dce7f79ec/grpcio-1.76.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2e1743fbd7f5fa713a1b0a8ac8ebabf0ec980b5d8809ec358d488e273b9cf02a", size = 5843567, upload-time = "2025-10-21T16:20:52.829Z" }, + { url = "https://files.pythonhosted.org/packages/10/c1/934202f5cf335e6d852530ce14ddb0fef21be612ba9ecbbcbd4d748ca32d/grpcio-1.76.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a8c2cf1209497cf659a667d7dea88985e834c24b7c3b605e6254cbb5076d985c", size = 11848017, upload-time = "2025-10-21T16:20:56.705Z" }, + { url = "https://files.pythonhosted.org/packages/11/0b/8dec16b1863d74af6eb3543928600ec2195af49ca58b16334972f6775663/grpcio-1.76.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:08caea849a9d3c71a542827d6df9d5a69067b0a1efbea8a855633ff5d9571465", size = 6412027, upload-time = "2025-10-21T16:20:59.3Z" }, + { url = "https://files.pythonhosted.org/packages/d7/64/7b9e6e7ab910bea9d46f2c090380bab274a0b91fb0a2fe9b0cd399fffa12/grpcio-1.76.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:f0e34c2079d47ae9f6188211db9e777c619a21d4faba6977774e8fa43b085e48", size = 7075913, upload-time = "2025-10-21T16:21:01.645Z" }, + { url = "https://files.pythonhosted.org/packages/68/86/093c46e9546073cefa789bd76d44c5cb2abc824ca62af0c18be590ff13ba/grpcio-1.76.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8843114c0cfce61b40ad48df65abcfc00d4dba82eae8718fab5352390848c5da", size = 6615417, upload-time = "2025-10-21T16:21:03.844Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b6/5709a3a68500a9c03da6fb71740dcdd5ef245e39266461a03f31a57036d8/grpcio-1.76.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8eddfb4d203a237da6f3cc8a540dad0517d274b5a1e9e636fd8d2c79b5c1d397", size = 7199683, upload-time = "2025-10-21T16:21:06.195Z" }, + { url = "https://files.pythonhosted.org/packages/91/d3/4b1f2bf16ed52ce0b508161df3a2d186e4935379a159a834cb4a7d687429/grpcio-1.76.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:32483fe2aab2c3794101c2a159070584e5db11d0aa091b2c0ea9c4fc43d0d749", size = 8163109, upload-time = "2025-10-21T16:21:08.498Z" }, + { url = "https://files.pythonhosted.org/packages/5c/61/d9043f95f5f4cf085ac5dd6137b469d41befb04bd80280952ffa2a4c3f12/grpcio-1.76.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dcfe41187da8992c5f40aa8c5ec086fa3672834d2be57a32384c08d5a05b4c00", size = 7626676, upload-time = "2025-10-21T16:21:10.693Z" }, + { url = "https://files.pythonhosted.org/packages/36/95/fd9a5152ca02d8881e4dd419cdd790e11805979f499a2e5b96488b85cf27/grpcio-1.76.0-cp311-cp311-win32.whl", hash = "sha256:2107b0c024d1b35f4083f11245c0e23846ae64d02f40b2b226684840260ed054", size = 3997688, upload-time = "2025-10-21T16:21:12.746Z" }, + { url = "https://files.pythonhosted.org/packages/60/9c/5c359c8d4c9176cfa3c61ecd4efe5affe1f38d9bae81e81ac7186b4c9cc8/grpcio-1.76.0-cp311-cp311-win_amd64.whl", hash = "sha256:522175aba7af9113c48ec10cc471b9b9bd4f6ceb36aeb4544a8e2c80ed9d252d", size = 4709315, upload-time = "2025-10-21T16:21:15.26Z" }, +] + +[[package]] +name = "grpcio-status" +version = "1.76.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3f/46/e9f19d5be65e8423f886813a2a9d0056ba94757b0c5007aa59aed1a961fa/grpcio_status-1.76.0.tar.gz", hash = "sha256:25fcbfec74c15d1a1cb5da3fab8ee9672852dc16a5a9eeb5baf7d7a9952943cd", size = 13679, upload-time = "2025-10-21T16:28:52.545Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/cc/27ba60ad5a5f2067963e6a858743500df408eb5855e98be778eaef8c9b02/grpcio_status-1.76.0-py3-none-any.whl", hash = "sha256:380568794055a8efbbd8871162df92012e0228a5f6dffaf57f2a00c534103b18", size = 14425, upload-time = "2025-10-21T16:28:40.853Z" }, +] + +[[package]] +name = "gunicorn" +version = "21.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/89/acd9879fa6a5309b4bf16a5a8855f1e58f26d38e0c18ede9b3a70996b021/gunicorn-21.2.0.tar.gz", hash = "sha256:88ec8bff1d634f98e61b9f65bc4bf3cd918a90806c6f5c48bc5603849ec81033", size = 3632557, upload-time = "2023-07-19T11:46:46.917Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/2a/c3a878eccb100ccddf45c50b6b8db8cf3301a6adede6e31d48e8531cab13/gunicorn-21.2.0-py3-none-any.whl", hash = "sha256:3213aa5e8c24949e792bcacfc176fef362e7aac80b76c56f6b5122bf350722f0", size = 80176, upload-time = "2023-07-19T11:46:44.51Z" }, +] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "hf-xet" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/6e/0f11bacf08a67f7fb5ee09740f2ca54163863b07b70d579356e9222ce5d8/hf_xet-1.2.0.tar.gz", hash = "sha256:a8c27070ca547293b6890c4bf389f713f80e8c478631432962bb7f4bc0bd7d7f", size = 506020, upload-time = "2025-10-24T19:04:32.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/96/2d/22338486473df5923a9ab7107d375dbef9173c338ebef5098ef593d2b560/hf_xet-1.2.0-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:46740d4ac024a7ca9b22bebf77460ff43332868b661186a8e46c227fdae01848", size = 2866099, upload-time = "2025-10-24T19:04:15.366Z" }, + { url = "https://files.pythonhosted.org/packages/7f/8c/c5becfa53234299bc2210ba314eaaae36c2875e0045809b82e40a9544f0c/hf_xet-1.2.0-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:27df617a076420d8845bea087f59303da8be17ed7ec0cd7ee3b9b9f579dff0e4", size = 2722178, upload-time = "2025-10-24T19:04:13.695Z" }, + { url = "https://files.pythonhosted.org/packages/9a/92/cf3ab0b652b082e66876d08da57fcc6fa2f0e6c70dfbbafbd470bb73eb47/hf_xet-1.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3651fd5bfe0281951b988c0facbe726aa5e347b103a675f49a3fa8144c7968fd", size = 3320214, upload-time = "2025-10-24T19:04:03.596Z" }, + { url = "https://files.pythonhosted.org/packages/46/92/3f7ec4a1b6a65bf45b059b6d4a5d38988f63e193056de2f420137e3c3244/hf_xet-1.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d06fa97c8562fb3ee7a378dd9b51e343bc5bc8190254202c9771029152f5e08c", size = 3229054, upload-time = "2025-10-24T19:04:01.949Z" }, + { url = "https://files.pythonhosted.org/packages/0b/dd/7ac658d54b9fb7999a0ccb07ad863b413cbaf5cf172f48ebcd9497ec7263/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:4c1428c9ae73ec0939410ec73023c4f842927f39db09b063b9482dac5a3bb737", size = 3413812, upload-time = "2025-10-24T19:04:24.585Z" }, + { url = "https://files.pythonhosted.org/packages/92/68/89ac4e5b12a9ff6286a12174c8538a5930e2ed662091dd2572bbe0a18c8a/hf_xet-1.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a55558084c16b09b5ed32ab9ed38421e2d87cf3f1f89815764d1177081b99865", size = 3508920, upload-time = "2025-10-24T19:04:26.927Z" }, + { url = "https://files.pythonhosted.org/packages/cb/44/870d44b30e1dcfb6a65932e3e1506c103a8a5aea9103c337e7a53180322c/hf_xet-1.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:e6584a52253f72c9f52f9e549d5895ca7a471608495c4ecaa6cc73dba2b24d69", size = 2905735, upload-time = "2025-10-24T19:04:35.928Z" }, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, +] + +[[package]] +name = "httptools" +version = "0.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/46/120a669232c7bdedb9d52d4aeae7e6c7dfe151e99dc70802e2fc7a5e1993/httptools-0.7.1.tar.gz", hash = "sha256:abd72556974f8e7c74a259655924a717a2365b236c882c3f6f8a45fe94703ac9", size = 258961, upload-time = "2025-10-10T03:55:08.559Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9c/08/17e07e8d89ab8f343c134616d72eebfe03798835058e2ab579dcc8353c06/httptools-0.7.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:474d3b7ab469fefcca3697a10d11a32ee2b9573250206ba1e50d5980910da657", size = 206521, upload-time = "2025-10-10T03:54:31.002Z" }, + { url = "https://files.pythonhosted.org/packages/aa/06/c9c1b41ff52f16aee526fd10fbda99fa4787938aa776858ddc4a1ea825ec/httptools-0.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3c3b7366bb6c7b96bd72d0dbe7f7d5eead261361f013be5f6d9590465ea1c70", size = 110375, upload-time = "2025-10-10T03:54:31.941Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cc/10935db22fda0ee34c76f047590ca0a8bd9de531406a3ccb10a90e12ea21/httptools-0.7.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:379b479408b8747f47f3b253326183d7c009a3936518cdb70db58cffd369d9df", size = 456621, upload-time = "2025-10-10T03:54:33.176Z" }, + { url = "https://files.pythonhosted.org/packages/0e/84/875382b10d271b0c11aa5d414b44f92f8dd53e9b658aec338a79164fa548/httptools-0.7.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cad6b591a682dcc6cf1397c3900527f9affef1e55a06c4547264796bbd17cf5e", size = 454954, upload-time = "2025-10-10T03:54:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/30/e1/44f89b280f7e46c0b1b2ccee5737d46b3bb13136383958f20b580a821ca0/httptools-0.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:eb844698d11433d2139bbeeb56499102143beb582bd6c194e3ba69c22f25c274", size = 440175, upload-time = "2025-10-10T03:54:35.942Z" }, + { url = "https://files.pythonhosted.org/packages/6f/7e/b9287763159e700e335028bc1824359dc736fa9b829dacedace91a39b37e/httptools-0.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f65744d7a8bdb4bda5e1fa23e4ba16832860606fcc09d674d56e425e991539ec", size = 440310, upload-time = "2025-10-10T03:54:37.1Z" }, + { url = "https://files.pythonhosted.org/packages/b3/07/5b614f592868e07f5c94b1f301b5e14a21df4e8076215a3bccb830a687d8/httptools-0.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:135fbe974b3718eada677229312e97f3b31f8a9c8ffa3ae6f565bf808d5b6bcb", size = 86875, upload-time = "2025-10-10T03:54:38.421Z" }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, +] + +[[package]] +name = "huggingface-hub" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "fsspec" }, + { name = "hf-xet", marker = "platform_machine == 'AMD64' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'arm64' or platform_machine == 'x86_64'" }, + { name = "httpx" }, + { name = "packaging" }, + { name = "pyyaml" }, + { name = "shellingham" }, + { name = "tqdm" }, + { name = "typer-slim" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/e0/308849e8ff9590505815f4a300cb8941a21c5889fb94c955d992539b5bef/huggingface_hub-1.0.1.tar.gz", hash = "sha256:87b506d5b45f0d1af58df7cf8bab993ded25d6077c2e959af58444df8b9589f3", size = 419291, upload-time = "2025-10-28T12:48:43.526Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/fb/d71f914bc69e6357cbde04db62ef15497cd27926d95f03b4930997c4c390/huggingface_hub-1.0.1-py3-none-any.whl", hash = "sha256:7e255cd9b3432287a34a86933057abb1b341d20b97fb01c40cbd4e053764ae13", size = 503841, upload-time = "2025-10-28T12:48:41.821Z" }, +] + +[[package]] +name = "humanize" +version = "4.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/43/50033d25ad96a7f3845f40999b4778f753c3901a11808a584fed7c00d9f5/humanize-4.14.0.tar.gz", hash = "sha256:2fa092705ea640d605c435b1ca82b2866a1b601cdf96f076d70b79a855eba90d", size = 82939, upload-time = "2025-10-15T13:04:51.214Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/5b/9512c5fb6c8218332b530f13500c6ff5f3ce3342f35e0dd7be9ac3856fd3/humanize-4.14.0-py3-none-any.whl", hash = "sha256:d57701248d040ad456092820e6fde56c930f17749956ac47f4f655c0c547bfff", size = 132092, upload-time = "2025-10-15T13:04:49.404Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "isort" +version = "5.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/f9/c1eb8635a24e87ade2efce21e3ce8cd6b8630bb685ddc9cdaca1349b2eb5/isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109", size = 175303, upload-time = "2023-12-13T20:37:26.124Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/b3/8def84f539e7d2289a02f0524b944b15d7c75dab7628bedf1c4f0992029c/isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6", size = 92310, upload-time = "2023-12-13T20:37:23.244Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "jiter" +version = "0.11.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/68/0357982493a7b20925aece061f7fb7a2678e3b232f8d73a6edb7e5304443/jiter-0.11.1.tar.gz", hash = "sha256:849dcfc76481c0ea0099391235b7ca97d7279e0fa4c86005457ac7c88e8b76dc", size = 168385, upload-time = "2025-10-17T11:31:15.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/34/c9e6cfe876f9a24f43ed53fe29f052ce02bd8d5f5a387dbf46ad3764bef0/jiter-0.11.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9b0088ff3c374ce8ce0168523ec8e97122ebb788f950cf7bb8e39c7dc6a876a2", size = 310160, upload-time = "2025-10-17T11:28:59.174Z" }, + { url = "https://files.pythonhosted.org/packages/bc/9f/b06ec8181d7165858faf2ac5287c54fe52b2287760b7fe1ba9c06890255f/jiter-0.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:74433962dd3c3090655e02e461267095d6c84f0741c7827de11022ef8d7ff661", size = 316573, upload-time = "2025-10-17T11:29:00.905Z" }, + { url = "https://files.pythonhosted.org/packages/66/49/3179d93090f2ed0c6b091a9c210f266d2d020d82c96f753260af536371d0/jiter-0.11.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d98030e345e6546df2cc2c08309c502466c66c4747b043f1a0d415fada862b8", size = 348998, upload-time = "2025-10-17T11:29:02.321Z" }, + { url = "https://files.pythonhosted.org/packages/ae/9d/63db2c8eabda7a9cad65a2e808ca34aaa8689d98d498f5a2357d7a2e2cec/jiter-0.11.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d6db0b2e788db46bec2cf729a88b6dd36959af2abd9fa2312dfba5acdd96dcb", size = 363413, upload-time = "2025-10-17T11:29:03.787Z" }, + { url = "https://files.pythonhosted.org/packages/25/ff/3e6b3170c5053053c7baddb8d44e2bf11ff44cd71024a280a8438ae6ba32/jiter-0.11.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55678fbbda261eafe7289165dd2ddd0e922df5f9a1ae46d7c79a5a15242bd7d1", size = 487144, upload-time = "2025-10-17T11:29:05.37Z" }, + { url = "https://files.pythonhosted.org/packages/b0/50/b63fcadf699893269b997f4c2e88400bc68f085c6db698c6e5e69d63b2c1/jiter-0.11.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6a6b74fae8e40497653b52ce6ca0f1b13457af769af6fb9c1113efc8b5b4d9be", size = 376215, upload-time = "2025-10-17T11:29:07.123Z" }, + { url = "https://files.pythonhosted.org/packages/39/8c/57a8a89401134167e87e73471b9cca321cf651c1fd78c45f3a0f16932213/jiter-0.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a55a453f8b035eb4f7852a79a065d616b7971a17f5e37a9296b4b38d3b619e4", size = 359163, upload-time = "2025-10-17T11:29:09.047Z" }, + { url = "https://files.pythonhosted.org/packages/4b/96/30b0cdbffbb6f753e25339d3dbbe26890c9ef119928314578201c758aace/jiter-0.11.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2638148099022e6bdb3f42904289cd2e403609356fb06eb36ddec2d50958bc29", size = 385344, upload-time = "2025-10-17T11:29:10.69Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d5/31dae27c1cc9410ad52bb514f11bfa4f286f7d6ef9d287b98b8831e156ec/jiter-0.11.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:252490567a5d990986f83b95a5f1ca1bf205ebd27b3e9e93bb7c2592380e29b9", size = 517972, upload-time = "2025-10-17T11:29:12.174Z" }, + { url = "https://files.pythonhosted.org/packages/61/1e/5905a7a3aceab80de13ab226fd690471a5e1ee7e554dc1015e55f1a6b896/jiter-0.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d431d52b0ca2436eea6195f0f48528202100c7deda354cb7aac0a302167594d5", size = 508408, upload-time = "2025-10-17T11:29:13.597Z" }, + { url = "https://files.pythonhosted.org/packages/91/12/1c49b97aa49077e136e8591cef7162f0d3e2860ae457a2d35868fd1521ef/jiter-0.11.1-cp311-cp311-win32.whl", hash = "sha256:db6f41e40f8bae20c86cb574b48c4fd9f28ee1c71cb044e9ec12e78ab757ba3a", size = 203937, upload-time = "2025-10-17T11:29:14.894Z" }, + { url = "https://files.pythonhosted.org/packages/6d/9d/2255f7c17134ee9892c7e013c32d5bcf4bce64eb115402c9fe5e727a67eb/jiter-0.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0cc407b8e6cdff01b06bb80f61225c8b090c3df108ebade5e0c3c10993735b19", size = 207589, upload-time = "2025-10-17T11:29:16.166Z" }, + { url = "https://files.pythonhosted.org/packages/3c/28/6307fc8f95afef84cae6caf5429fee58ef16a582c2ff4db317ceb3e352fa/jiter-0.11.1-cp311-cp311-win_arm64.whl", hash = "sha256:fe04ea475392a91896d1936367854d346724a1045a247e5d1c196410473b8869", size = 188391, upload-time = "2025-10-17T11:29:17.488Z" }, + { url = "https://files.pythonhosted.org/packages/9d/51/bd41562dd284e2a18b6dc0a99d195fd4a3560d52ab192c42e56fe0316643/jiter-0.11.1-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:e642b5270e61dd02265866398707f90e365b5db2eb65a4f30c789d826682e1f6", size = 306871, upload-time = "2025-10-17T11:31:03.616Z" }, + { url = "https://files.pythonhosted.org/packages/ba/cb/64e7f21dd357e8cd6b3c919c26fac7fc198385bbd1d85bb3b5355600d787/jiter-0.11.1-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:464ba6d000585e4e2fd1e891f31f1231f497273414f5019e27c00a4b8f7a24ad", size = 301454, upload-time = "2025-10-17T11:31:05.338Z" }, + { url = "https://files.pythonhosted.org/packages/55/b0/54bdc00da4ef39801b1419a01035bd8857983de984fd3776b0be6b94add7/jiter-0.11.1-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:055568693ab35e0bf3a171b03bb40b2dcb10352359e0ab9b5ed0da2bf1eb6f6f", size = 336801, upload-time = "2025-10-17T11:31:06.893Z" }, + { url = "https://files.pythonhosted.org/packages/de/8f/87176ed071d42e9db415ed8be787ef4ef31a4fa27f52e6a4fbf34387bd28/jiter-0.11.1-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0c69ea798d08a915ba4478113efa9e694971e410056392f4526d796f136d3fa", size = 343452, upload-time = "2025-10-17T11:31:08.259Z" }, +] + +[[package]] +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, +] + +[[package]] +name = "joblib" +version = "1.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e8/5d/447af5ea094b9e4c4054f82e223ada074c552335b9b4b2d14bd9b35a67c4/joblib-1.5.2.tar.gz", hash = "sha256:3faa5c39054b2f03ca547da9b2f52fde67c06240c31853f306aea97f13647b55", size = 331077, upload-time = "2025-08-27T12:15:46.575Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/e8/685f47e0d754320684db4425a0967f7d3fa70126bffd76110b7009a0090f/joblib-1.5.2-py3-none-any.whl", hash = "sha256:4e1f0bdbb987e6d843c70cf43714cb276623def372df3c22fe5266b2670bc241", size = 308396, upload-time = "2025-08-27T12:15:45.188Z" }, +] + +[[package]] +name = "jsonpatch" +version = "1.33" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jsonpointer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/78/18813351fe5d63acad16aec57f94ec2b70a09e53ca98145589e185423873/jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c", size = 21699, upload-time = "2023-06-26T12:07:29.144Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/73/07/02e16ed01e04a374e644b575638ec7987ae846d25ad97bcc9945a3ee4b0e/jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade", size = 12898, upload-time = "2023-06-16T21:01:28.466Z" }, +] + +[[package]] +name = "jsonpointer" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6a/0a/eebeb1fa92507ea94016a2a790b93c2ae41a7e18778f85471dc54475ed25/jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef", size = 9114, upload-time = "2024-06-10T19:24:42.462Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/71/92/5e77f98553e9e75130c78900d000368476aed74276eb8ae8796f65f00918/jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942", size = 7595, upload-time = "2024-06-10T19:24:40.698Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, +] + +[[package]] +name = "kombu" +version = "5.5.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "amqp" }, + { name = "packaging" }, + { name = "tzdata" }, + { name = "vine" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0f/d3/5ff936d8319ac86b9c409f1501b07c426e6ad41966fedace9ef1b966e23f/kombu-5.5.4.tar.gz", hash = "sha256:886600168275ebeada93b888e831352fe578168342f0d1d5833d88ba0d847363", size = 461992, upload-time = "2025-06-01T10:19:22.281Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/70/a07dcf4f62598c8ad579df241af55ced65bed76e42e45d3c368a6d82dbc1/kombu-5.5.4-py3-none-any.whl", hash = "sha256:a12ed0557c238897d8e518f1d1fdf84bd1516c5e305af2dacd85c2015115feb8", size = 210034, upload-time = "2025-06-01T10:19:20.436Z" }, +] + +[[package]] +name = "langchain" +version = "0.1.20" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "dataclasses-json" }, + { name = "langchain-community" }, + { name = "langchain-core" }, + { name = "langchain-text-splitters" }, + { name = "langsmith" }, + { name = "numpy" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "sqlalchemy" }, + { name = "tenacity" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/88/94/8d917da143b30c3088be9f51719634827ab19207cb290a51de3859747783/langchain-0.1.20.tar.gz", hash = "sha256:f35c95eed8c8375e02dce95a34f2fd4856a4c98269d6dc34547a23dba5beab7e", size = 420688, upload-time = "2024-05-10T21:59:40.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/28/da40a6b12e7842a0c8b443f8cc5c6f59e49d7a9071cfad064b9639c6b044/langchain-0.1.20-py3-none-any.whl", hash = "sha256:09991999fbd6c3421a12db3c7d1f52d55601fc41d9b2a3ef51aab2e0e9c38da9", size = 1014619, upload-time = "2024-05-10T21:59:36.417Z" }, +] + +[[package]] +name = "langchain-community" +version = "0.0.38" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "dataclasses-json" }, + { name = "langchain-core" }, + { name = "langsmith" }, + { name = "numpy" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "sqlalchemy" }, + { name = "tenacity" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/b7/c20502452183d27b8c0466febb227fae3213f77e9a13683de685e7227f39/langchain_community-0.0.38.tar.gz", hash = "sha256:127fc4b75bc67b62fe827c66c02e715a730fef8fe69bd2023d466bab06b5810d", size = 1373468, upload-time = "2024-05-08T22:44:26.295Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/d3/1f4d1941ae5a627299c8ea052847b99ad6674b97b699d8a08fc4faf25d3e/langchain_community-0.0.38-py3-none-any.whl", hash = "sha256:ecb48660a70a08c90229be46b0cc5f6bc9f38f2833ee44c57dfab9bf3a2c121a", size = 2028164, upload-time = "2024-05-08T22:44:23.434Z" }, +] + +[[package]] +name = "langchain-core" +version = "0.1.53" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jsonpatch" }, + { name = "langsmith" }, + { name = "packaging" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "tenacity" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/65/3aaff91481b9d629a31630a40000d403bff24b3c62d9abc87dc998298cce/langchain_core-0.1.53.tar.gz", hash = "sha256:df3773a553b5335eb645827b99a61a7018cea4b11dc45efa2613fde156441cec", size = 236665, upload-time = "2024-11-02T00:27:25.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/10/285fa149ce95300d91ea0bb124eec28889e5ebbcb59434d1fe2f31098d72/langchain_core-0.1.53-py3-none-any.whl", hash = "sha256:02a88a21e3bd294441b5b741625fa4b53b1c684fd58ba6e5d9028e53cbe8542f", size = 303059, upload-time = "2024-11-02T00:27:23.144Z" }, +] + +[[package]] +name = "langchain-experimental" +version = "0.0.58" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain" }, + { name = "langchain-core" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/8f/fb7dd61abbc016de4fe3860b80be9a1faa47ec1d33bab6d15ed20fd30df1/langchain_experimental-0.0.58.tar.gz", hash = "sha256:8ef10ff6b39f44ef468f8f21beb3749957d2262ec64d05db2719934936ca0285", size = 133442, upload-time = "2024-05-08T04:43:08.763Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/16/fe6aaa26273e21e20a5564fd10d638ac32ce184e113908d560956025e03f/langchain_experimental-0.0.58-py3-none-any.whl", hash = "sha256:106d3bc7df3dd20687378db7534c2fc21e2589201d43de42f832a1e3913dd55b", size = 199359, upload-time = "2024-05-08T04:43:07.208Z" }, +] + +[[package]] +name = "langchain-openai" +version = "0.0.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, + { name = "openai" }, + { name = "tiktoken" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c7/bb/451a3d2244350a40aa3dc822a1c405391bba1f79a8830986bd8b27d62260/langchain_openai-0.0.8.tar.gz", hash = "sha256:b7aba7fcc52305e78b08197ebc54fc45cc06dbc40ba5b913bc48a22b30a4f5c9", size = 25908, upload-time = "2024-02-27T12:26:41.204Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/63/012be16114559243aabcc9ec570366df84591dc9f8f3c2349a398e9b3626/langchain_openai-0.0.8-py3-none-any.whl", hash = "sha256:4862fc72cecbee0240aaa6df0234d5893dd30cd33ca23ac5cfdd86c11d2c44df", size = 32286, upload-time = "2024-02-27T12:26:39.99Z" }, +] + +[[package]] +name = "langchain-text-splitters" +version = "0.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "langchain-core" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e8/fa/88d65b0f696d8d4f37037f1418f89bc1078cd74d20054623bb7fffcecaf1/langchain_text_splitters-0.0.2.tar.gz", hash = "sha256:ac8927dc0ba08eba702f6961c9ed7df7cead8de19a9f7101ab2b5ea34201b3c1", size = 18638, upload-time = "2024-05-16T03:16:36.815Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/6a/804fe5ca07129046a4cedc0697222ddde6156cd874c4c4ba29e4d271828a/langchain_text_splitters-0.0.2-py3-none-any.whl", hash = "sha256:13887f32705862c1e1454213cb7834a63aae57c26fcd80346703a1d09c46168d", size = 23539, upload-time = "2024-05-16T03:16:35.727Z" }, +] + +[[package]] +name = "langsmith" +version = "0.1.147" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "httpx" }, + { name = "orjson", marker = "platform_python_implementation != 'PyPy'" }, + { name = "pydantic" }, + { name = "requests" }, + { name = "requests-toolbelt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6c/56/201dd94d492ae47c1bf9b50cacc1985113dc2288d8f15857e1f4a6818376/langsmith-0.1.147.tar.gz", hash = "sha256:2e933220318a4e73034657103b3b1a3a6109cc5db3566a7e8e03be8d6d7def7a", size = 300453, upload-time = "2024-11-27T17:32:41.297Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/f0/63b06b99b730b9954f8709f6f7d9b8d076fa0a973e472efe278089bde42b/langsmith-0.1.147-py3-none-any.whl", hash = "sha256:7166fc23b965ccf839d64945a78e9f1157757add228b086141eb03a60d699a15", size = 311812, upload-time = "2024-11-27T17:32:39.569Z" }, +] + +[[package]] +name = "lightrag-dembrane" +version = "1.2.7.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8f/61/856d3f89dd0f87d39d0f5bfc716b6e2ef58eaf11a7bfe3d04bc0b229d2c7/lightrag_dembrane-1.2.7.8.tar.gz", hash = "sha256:5836ae15f6968acc6cd5c772bc59581820fcef1dcc323ae2d0248957cb1651f6", size = 1002398, upload-time = "2025-05-15T13:08:53.319Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/ea/e8d9f8e9a097f2dfd0cf82816ea2e932f51015d68137a4f27804ddd6b3eb/lightrag_dembrane-1.2.7.8-py3-none-any.whl", hash = "sha256:8e713cc438bda3522137044229dbccf866b48c865c4927b9a5ce8345cc32f432", size = 989173, upload-time = "2025-05-15T13:08:50.331Z" }, +] + +[[package]] +name = "litellm" +version = "1.76.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "click" }, + { name = "fastuuid" }, + { name = "httpx" }, + { name = "importlib-metadata" }, + { name = "jinja2" }, + { name = "jsonschema" }, + { name = "openai" }, + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "tiktoken" }, + { name = "tokenizers" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/46/57b6539365616452bb6f4401487448ce62e62755738fce55d8222d7a557e/litellm-1.76.3.tar.gz", hash = "sha256:fc81219c59b17b26cc81276ce32582f3715612877ab11c1ea2c26e4853ac67e8", size = 10210403, upload-time = "2025-09-07T01:59:19.55Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/d9/5f8ed27241b487f51f04573b8ba06d4460ebed9f792ff5cc148649fbf862/litellm-1.76.3-py3-none-any.whl", hash = "sha256:d62e3ff2a80ec5e551c6d7a0fe199ffe718ecb6cbaa43fc9250dd8d7c0944352", size = 9000797, upload-time = "2025-09-07T01:59:16.261Z" }, +] + +[[package]] +name = "lz4" +version = "4.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/5a/945f5086326d569f14c84ac6f7fcc3229f0b9b1e8cc536b951fd53dfb9e1/lz4-4.4.4.tar.gz", hash = "sha256:070fd0627ec4393011251a094e08ed9fdcc78cb4e7ab28f507638eee4e39abda", size = 171884, upload-time = "2025-04-01T22:55:58.62Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/28/e8/63843dc5ecb1529eb38e1761ceed04a0ad52a9ad8929ab8b7930ea2e4976/lz4-4.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ddfc7194cd206496c445e9e5b0c47f970ce982c725c87bd22de028884125b68f", size = 220898, upload-time = "2025-04-01T22:55:23.085Z" }, + { url = "https://files.pythonhosted.org/packages/e4/94/c53de5f07c7dc11cf459aab2a1d754f5df5f693bfacbbe1e4914bfd02f1e/lz4-4.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:714f9298c86f8e7278f1c6af23e509044782fa8220eb0260f8f8f1632f820550", size = 189685, upload-time = "2025-04-01T22:55:24.413Z" }, + { url = "https://files.pythonhosted.org/packages/fe/59/c22d516dd0352f2a3415d1f665ccef2f3e74ecec3ca6a8f061a38f97d50d/lz4-4.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8474c91de47733856c6686df3c4aca33753741da7e757979369c2c0d32918ba", size = 1239225, upload-time = "2025-04-01T22:55:25.737Z" }, + { url = "https://files.pythonhosted.org/packages/81/af/665685072e71f3f0e626221b7922867ec249cd8376aca761078c8f11f5da/lz4-4.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80dd27d7d680ea02c261c226acf1d41de2fd77af4fb2da62b278a9376e380de0", size = 1265881, upload-time = "2025-04-01T22:55:26.817Z" }, + { url = "https://files.pythonhosted.org/packages/90/04/b4557ae381d3aa451388a29755cc410066f5e2f78c847f66f154f4520a68/lz4-4.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b7d6dddfd01b49aedb940fdcaf32f41dc58c926ba35f4e31866aeec2f32f4f4", size = 1185593, upload-time = "2025-04-01T22:55:27.896Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e4/03636979f4e8bf92c557f998ca98ee4e6ef92e92eaf0ed6d3c7f2524e790/lz4-4.4.4-cp311-cp311-win32.whl", hash = "sha256:4134b9fd70ac41954c080b772816bb1afe0c8354ee993015a83430031d686a4c", size = 88259, upload-time = "2025-04-01T22:55:29.03Z" }, + { url = "https://files.pythonhosted.org/packages/07/f0/9efe53b4945441a5d2790d455134843ad86739855b7e6199977bf6dc8898/lz4-4.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:f5024d3ca2383470f7c4ef4d0ed8eabad0b22b23eeefde1c192cf1a38d5e9f78", size = 99916, upload-time = "2025-04-01T22:55:29.933Z" }, + { url = "https://files.pythonhosted.org/packages/87/c8/1675527549ee174b9e1db089f7ddfbb962a97314657269b1e0344a5eaf56/lz4-4.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:6ea715bb3357ea1665f77874cf8f55385ff112553db06f3742d3cdcec08633f7", size = 89741, upload-time = "2025-04-01T22:55:31.184Z" }, +] + +[[package]] +name = "mako" +version = "1.3.10" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, +] + +[[package]] +name = "marshmallow" +version = "3.26.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/5e/5e53d26b42ab75491cda89b871dab9e97c840bf12c63ec58a1919710cd06/marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6", size = 221825, upload-time = "2025-02-03T15:32:25.093Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/75/51952c7b2d3873b44a0028b1bd26a25078c18f92f256608e8d1dc61b39fd/marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c", size = 50878, upload-time = "2025-02-03T15:32:22.295Z" }, +] + +[[package]] +name = "multidict" +version = "6.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/80/1e/5492c365f222f907de1039b91f922b93fa4f764c713ee858d235495d8f50/multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5", size = 101834, upload-time = "2025-10-06T14:52:30.657Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604, upload-time = "2025-10-06T14:48:54.277Z" }, + { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715, upload-time = "2025-10-06T14:48:55.445Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332, upload-time = "2025-10-06T14:48:56.706Z" }, + { url = "https://files.pythonhosted.org/packages/31/61/0c2d50241ada71ff61a79518db85ada85fdabfcf395d5968dae1cbda04e5/multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c", size = 245212, upload-time = "2025-10-06T14:48:58.042Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e0/919666a4e4b57fff1b57f279be1c9316e6cdc5de8a8b525d76f6598fefc7/multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7", size = 246671, upload-time = "2025-10-06T14:49:00.004Z" }, + { url = "https://files.pythonhosted.org/packages/a1/cc/d027d9c5a520f3321b65adea289b965e7bcbd2c34402663f482648c716ce/multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7", size = 225491, upload-time = "2025-10-06T14:49:01.393Z" }, + { url = "https://files.pythonhosted.org/packages/75/c4/bbd633980ce6155a28ff04e6a6492dd3335858394d7bb752d8b108708558/multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9", size = 257322, upload-time = "2025-10-06T14:49:02.745Z" }, + { url = "https://files.pythonhosted.org/packages/4c/6d/d622322d344f1f053eae47e033b0b3f965af01212de21b10bcf91be991fb/multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8", size = 254694, upload-time = "2025-10-06T14:49:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/a8/9f/78f8761c2705d4c6d7516faed63c0ebdac569f6db1bef95e0d5218fdc146/multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd", size = 246715, upload-time = "2025-10-06T14:49:05.967Z" }, + { url = "https://files.pythonhosted.org/packages/78/59/950818e04f91b9c2b95aab3d923d9eabd01689d0dcd889563988e9ea0fd8/multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb", size = 243189, upload-time = "2025-10-06T14:49:07.37Z" }, + { url = "https://files.pythonhosted.org/packages/7a/3d/77c79e1934cad2ee74991840f8a0110966d9599b3af95964c0cd79bb905b/multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6", size = 237845, upload-time = "2025-10-06T14:49:08.759Z" }, + { url = "https://files.pythonhosted.org/packages/63/1b/834ce32a0a97a3b70f86437f685f880136677ac00d8bce0027e9fd9c2db7/multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2", size = 246374, upload-time = "2025-10-06T14:49:10.574Z" }, + { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345, upload-time = "2025-10-06T14:49:12.331Z" }, + { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940, upload-time = "2025-10-06T14:49:13.821Z" }, + { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229, upload-time = "2025-10-06T14:49:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308, upload-time = "2025-10-06T14:49:16.871Z" }, + { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037, upload-time = "2025-10-06T14:49:18.457Z" }, + { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023, upload-time = "2025-10-06T14:49:19.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, +] + +[[package]] +name = "mypy" +version = "1.18.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c0/77/8f0d0001ffad290cef2f7f216f96c814866248a0b92a722365ed54648e7e/mypy-1.18.2.tar.gz", hash = "sha256:06a398102a5f203d7477b2923dda3634c36727fa5c237d8f859ef90c42a9924b", size = 3448846, upload-time = "2025-09-19T00:11:10.519Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/87/cafd3ae563f88f94eec33f35ff722d043e09832ea8530ef149ec1efbaf08/mypy-1.18.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:807d9315ab9d464125aa9fcf6d84fde6e1dc67da0b6f80e7405506b8ac72bc7f", size = 12731198, upload-time = "2025-09-19T00:09:44.857Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e0/1e96c3d4266a06d4b0197ace5356d67d937d8358e2ee3ffac71faa843724/mypy-1.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:776bb00de1778caf4db739c6e83919c1d85a448f71979b6a0edd774ea8399341", size = 11817879, upload-time = "2025-09-19T00:09:47.131Z" }, + { url = "https://files.pythonhosted.org/packages/72/ef/0c9ba89eb03453e76bdac5a78b08260a848c7bfc5d6603634774d9cd9525/mypy-1.18.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1379451880512ffce14505493bd9fe469e0697543717298242574882cf8cdb8d", size = 12427292, upload-time = "2025-09-19T00:10:22.472Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/ec4a061dd599eb8179d5411d99775bec2a20542505988f40fc2fee781068/mypy-1.18.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1331eb7fd110d60c24999893320967594ff84c38ac6d19e0a76c5fd809a84c86", size = 13163750, upload-time = "2025-09-19T00:09:51.472Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5f/2cf2ceb3b36372d51568f2208c021870fe7834cf3186b653ac6446511839/mypy-1.18.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ca30b50a51e7ba93b00422e486cbb124f1c56a535e20eff7b2d6ab72b3b2e37", size = 13351827, upload-time = "2025-09-19T00:09:58.311Z" }, + { url = "https://files.pythonhosted.org/packages/c8/7d/2697b930179e7277529eaaec1513f8de622818696857f689e4a5432e5e27/mypy-1.18.2-cp311-cp311-win_amd64.whl", hash = "sha256:664dc726e67fa54e14536f6e1224bcfce1d9e5ac02426d2326e2bb4e081d1ce8", size = 9757983, upload-time = "2025-09-19T00:10:09.071Z" }, + { url = "https://files.pythonhosted.org/packages/87/e3/be76d87158ebafa0309946c4a73831974d4d6ab4f4ef40c3b53a385a66fd/mypy-1.18.2-py3-none-any.whl", hash = "sha256:22a1748707dd62b58d2ae53562ffc4d7f8bcc727e8ac7cbc69c053ddc874d47e", size = 2352367, upload-time = "2025-09-19T00:10:15.489Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "neo4j" +version = "5.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4b/20/733dac16f7cedc80b23093415822c9763302519cba0e7c8bcdb5c01fc512/neo4j-5.28.1.tar.gz", hash = "sha256:ae8e37a1d895099062c75bc359b2cce62099baac7be768d0eba7180c1298e214", size = 231094, upload-time = "2025-02-10T08:36:22.566Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/57/94225fe5e9dabdc0ff60c88cbfcedf11277f4b34e7ab1373d3e62dbdd207/neo4j-5.28.1-py3-none-any.whl", hash = "sha256:6755ef9e5f4e14b403aef1138fb6315b120631a0075c138b5ddb2a06b87b09fd", size = 312258, upload-time = "2025-02-10T08:36:16.209Z" }, +] + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" }, +] + +[[package]] +name = "networkx" +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368, upload-time = "2024-10-21T12:39:38.695Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/54/dd730b32ea14ea797530a4479b2ed46a6fb250f682a9cfb997e968bf0261/networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f", size = 1723263, upload-time = "2024-10-21T12:39:36.247Z" }, +] + +[[package]] +name = "numpy" +version = "1.26.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129, upload-time = "2024-02-06T00:26:44.495Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554, upload-time = "2024-02-05T23:51:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127, upload-time = "2024-02-05T23:52:15.314Z" }, + { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994, upload-time = "2024-02-05T23:52:47.569Z" }, + { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005, upload-time = "2024-02-05T23:53:15.637Z" }, + { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297, upload-time = "2024-02-05T23:53:42.16Z" }, + { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567, upload-time = "2024-02-05T23:54:11.696Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812, upload-time = "2024-02-05T23:54:26.453Z" }, + { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913, upload-time = "2024-02-05T23:54:53.933Z" }, +] + +[[package]] +name = "openai" +version = "1.99.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8a/d2/ef89c6f3f36b13b06e271d3cc984ddd2f62508a0972c1cbcc8485a6644ff/openai-1.99.9.tar.gz", hash = "sha256:f2082d155b1ad22e83247c3de3958eb4255b20ccf4a1de2e6681b6957b554e92", size = 506992, upload-time = "2025-08-12T02:31:10.054Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/fb/df274ca10698ee77b07bff952f302ea627cc12dac6b85289485dd77db6de/openai-1.99.9-py3-none-any.whl", hash = "sha256:9dbcdb425553bae1ac5d947147bebbd630d91bbfc7788394d4c4f3a35682ab3a", size = 786816, upload-time = "2025-08-12T02:31:08.34Z" }, +] + +[[package]] +name = "orjson" +version = "3.11.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/fe/ed708782d6709cc60eb4c2d8a361a440661f74134675c72990f2c48c785f/orjson-3.11.4.tar.gz", hash = "sha256:39485f4ab4c9b30a3943cfe99e1a213c4776fb69e8abd68f66b83d5a0b0fdc6d", size = 5945188, upload-time = "2025-10-24T15:50:38.027Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/63/1d/1ea6005fffb56715fd48f632611e163d1604e8316a5bad2288bee9a1c9eb/orjson-3.11.4-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5e59d23cd93ada23ec59a96f215139753fbfe3a4d989549bcb390f8c00370b39", size = 243498, upload-time = "2025-10-24T15:48:48.101Z" }, + { url = "https://files.pythonhosted.org/packages/37/d7/ffed10c7da677f2a9da307d491b9eb1d0125b0307019c4ad3d665fd31f4f/orjson-3.11.4-cp311-cp311-macosx_15_0_arm64.whl", hash = "sha256:5c3aedecfc1beb988c27c79d52ebefab93b6c3921dbec361167e6559aba2d36d", size = 128961, upload-time = "2025-10-24T15:48:49.571Z" }, + { url = "https://files.pythonhosted.org/packages/a2/96/3e4d10a18866d1368f73c8c44b7fe37cc8a15c32f2a7620be3877d4c55a3/orjson-3.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da9e5301f1c2caa2a9a4a303480d79c9ad73560b2e7761de742ab39fe59d9175", size = 130321, upload-time = "2025-10-24T15:48:50.713Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1f/465f66e93f434f968dd74d5b623eb62c657bdba2332f5a8be9f118bb74c7/orjson-3.11.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8873812c164a90a79f65368f8f96817e59e35d0cc02786a5356f0e2abed78040", size = 129207, upload-time = "2025-10-24T15:48:52.193Z" }, + { url = "https://files.pythonhosted.org/packages/28/43/d1e94837543321c119dff277ae8e348562fe8c0fafbb648ef7cb0c67e521/orjson-3.11.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d7feb0741ebb15204e748f26c9638e6665a5fa93c37a2c73d64f1669b0ddc63", size = 136323, upload-time = "2025-10-24T15:48:54.806Z" }, + { url = "https://files.pythonhosted.org/packages/bf/04/93303776c8890e422a5847dd012b4853cdd88206b8bbd3edc292c90102d1/orjson-3.11.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:01ee5487fefee21e6910da4c2ee9eef005bee568a0879834df86f888d2ffbdd9", size = 137440, upload-time = "2025-10-24T15:48:56.326Z" }, + { url = "https://files.pythonhosted.org/packages/1e/ef/75519d039e5ae6b0f34d0336854d55544ba903e21bf56c83adc51cd8bf82/orjson-3.11.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d40d46f348c0321df01507f92b95a377240c4ec31985225a6668f10e2676f9a", size = 136680, upload-time = "2025-10-24T15:48:57.476Z" }, + { url = "https://files.pythonhosted.org/packages/b5/18/bf8581eaae0b941b44efe14fee7b7862c3382fbc9a0842132cfc7cf5ecf4/orjson-3.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95713e5fc8af84d8edc75b785d2386f653b63d62b16d681687746734b4dfc0be", size = 136160, upload-time = "2025-10-24T15:48:59.631Z" }, + { url = "https://files.pythonhosted.org/packages/c4/35/a6d582766d351f87fc0a22ad740a641b0a8e6fc47515e8614d2e4790ae10/orjson-3.11.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad73ede24f9083614d6c4ca9a85fe70e33be7bf047ec586ee2363bc7418fe4d7", size = 140318, upload-time = "2025-10-24T15:49:00.834Z" }, + { url = "https://files.pythonhosted.org/packages/76/b3/5a4801803ab2e2e2d703bce1a56540d9f99a9143fbec7bf63d225044fef8/orjson-3.11.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:842289889de515421f3f224ef9c1f1efb199a32d76d8d2ca2706fa8afe749549", size = 406330, upload-time = "2025-10-24T15:49:02.327Z" }, + { url = "https://files.pythonhosted.org/packages/80/55/a8f682f64833e3a649f620eafefee175cbfeb9854fc5b710b90c3bca45df/orjson-3.11.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3b2427ed5791619851c52a1261b45c233930977e7de8cf36de05636c708fa905", size = 149580, upload-time = "2025-10-24T15:49:03.517Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e4/c132fa0c67afbb3eb88274fa98df9ac1f631a675e7877037c611805a4413/orjson-3.11.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c36e524af1d29982e9b190573677ea02781456b2e537d5840e4538a5ec41907", size = 139846, upload-time = "2025-10-24T15:49:04.761Z" }, + { url = "https://files.pythonhosted.org/packages/54/06/dc3491489efd651fef99c5908e13951abd1aead1257c67f16135f95ce209/orjson-3.11.4-cp311-cp311-win32.whl", hash = "sha256:87255b88756eab4a68ec61837ca754e5d10fa8bc47dc57f75cedfeaec358d54c", size = 135781, upload-time = "2025-10-24T15:49:05.969Z" }, + { url = "https://files.pythonhosted.org/packages/79/b7/5e5e8d77bd4ea02a6ac54c42c818afb01dd31961be8a574eb79f1d2cfb1e/orjson-3.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:e2d5d5d798aba9a0e1fede8d853fa899ce2cb930ec0857365f700dffc2c7af6a", size = 131391, upload-time = "2025-10-24T15:49:07.355Z" }, + { url = "https://files.pythonhosted.org/packages/0f/dc/9484127cc1aa213be398ed735f5f270eedcb0c0977303a6f6ddc46b60204/orjson-3.11.4-cp311-cp311-win_arm64.whl", hash = "sha256:6bb6bb41b14c95d4f2702bce9975fda4516f1db48e500102fc4d8119032ff045", size = 126252, upload-time = "2025-10-24T15:49:08.869Z" }, +] + +[[package]] +name = "packaging" +version = "23.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fb/2b/9b9c33ffed44ee921d0967086d653047286054117d584f1b1a7c22ceaf7b/packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", size = 146714, upload-time = "2023-10-01T13:50:05.279Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7", size = 53011, upload-time = "2023-10-01T13:50:03.745Z" }, +] + +[[package]] +name = "pandas" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213, upload-time = "2024-09-20T13:10:04.827Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/44/d9502bf0ed197ba9bf1103c9867d5904ddcaf869e52329787fc54ed70cc8/pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039", size = 12602222, upload-time = "2024-09-20T13:08:56.254Z" }, + { url = "https://files.pythonhosted.org/packages/52/11/9eac327a38834f162b8250aab32a6781339c69afe7574368fffe46387edf/pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd", size = 11321274, upload-time = "2024-09-20T13:08:58.645Z" }, + { url = "https://files.pythonhosted.org/packages/45/fb/c4beeb084718598ba19aa9f5abbc8aed8b42f90930da861fcb1acdb54c3a/pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698", size = 15579836, upload-time = "2024-09-20T19:01:57.571Z" }, + { url = "https://files.pythonhosted.org/packages/cd/5f/4dba1d39bb9c38d574a9a22548c540177f78ea47b32f99c0ff2ec499fac5/pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc", size = 13058505, upload-time = "2024-09-20T13:09:01.501Z" }, + { url = "https://files.pythonhosted.org/packages/b9/57/708135b90391995361636634df1f1130d03ba456e95bcf576fada459115a/pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3", size = 16744420, upload-time = "2024-09-20T19:02:00.678Z" }, + { url = "https://files.pythonhosted.org/packages/86/4a/03ed6b7ee323cf30404265c284cee9c65c56a212e0a08d9ee06984ba2240/pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32", size = 14440457, upload-time = "2024-09-20T13:09:04.105Z" }, + { url = "https://files.pythonhosted.org/packages/ed/8c/87ddf1fcb55d11f9f847e3c69bb1c6f8e46e2f40ab1a2d2abadb2401b007/pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5", size = 11617166, upload-time = "2024-09-20T13:09:06.917Z" }, +] + +[[package]] +name = "pandas-stubs" +version = "2.3.2.250926" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "types-pytz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1b/3b/32be58a125db39d0b5f62cc93795f32b5bb2915bd5c4a46f0e35171985e2/pandas_stubs-2.3.2.250926.tar.gz", hash = "sha256:c64b9932760ceefb96a3222b953e6a251321a9832a28548be6506df473a66406", size = 102147, upload-time = "2025-09-26T19:50:39.522Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/40/96/1e4a035eaf4dce9610aac6e43026d0c6baa05773daf6d21e635a4fe19e21/pandas_stubs-2.3.2.250926-py3-none-any.whl", hash = "sha256:81121818453dcfe00f45c852f4dceee043640b813830f6e7bd084a4ef7ff7270", size = 159995, upload-time = "2025-09-26T19:50:38.241Z" }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + +[[package]] +name = "pgvector" +version = "0.2.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/bb/4686b1090a7c68fa367e981130a074dc6c1236571d914ffa6e05c882b59d/pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b", size = 9638, upload-time = "2024-02-07T19:35:03.8Z" }, +] + +[[package]] +name = "pipmaster" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ascii-colors" }, + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/4a/484b223f0f76a9a318545cce6dc2bc978ebda10607af7428f4093ca580f8/pipmaster-0.5.1.tar.gz", hash = "sha256:967323aedcb31372aa46c3b1c36c563cf629a4a059c4e93bfa67dd30668ed1b4", size = 9053, upload-time = "2025-03-10T20:57:46.12Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/64/30642aa0415b7a080ed7439af39c8c307f723f47193f22659e1b51386c74/pipmaster-0.5.1-py3-none-any.whl", hash = "sha256:8bbeccd5fa6d83b76afe4158f2be1388dc91161255767b718e4bb6e6ea7a1e92", size = 9600, upload-time = "2025-03-10T20:57:42.746Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "prometheus-client" +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/23/53/3edb5d68ecf6b38fcbcc1ad28391117d2a322d9a1a3eff04bfdb184d8c3b/prometheus_client-0.23.1.tar.gz", hash = "sha256:6ae8f9081eaaaf153a2e959d2e6c4f4fb57b12ef76c8c7980202f1e57b48b2ce", size = 80481, upload-time = "2025-09-18T20:47:25.043Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b8/db/14bafcb4af2139e046d03fd00dea7873e48eafe18b7d2797e73d6681f210/prometheus_client-0.23.1-py3-none-any.whl", hash = "sha256:dd1913e6e76b59cfe44e7a4b83e01afc9873c1bdfd2ed8739f1e76aeca115f99", size = 61145, upload-time = "2025-09-18T20:47:23.875Z" }, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.52" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" }, +] + +[[package]] +name = "propcache" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, + { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, + { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, + { url = "https://files.pythonhosted.org/packages/58/1a/3c62c127a8466c9c843bccb503d40a273e5cc69838805f322e2826509e0d/propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566", size = 214929, upload-time = "2025-10-08T19:46:28.62Z" }, + { url = "https://files.pythonhosted.org/packages/56/b9/8fa98f850960b367c4b8fe0592e7fc341daa7a9462e925228f10a60cf74f/propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165", size = 221778, upload-time = "2025-10-08T19:46:30.358Z" }, + { url = "https://files.pythonhosted.org/packages/46/a6/0ab4f660eb59649d14b3d3d65c439421cf2f87fe5dd68591cbe3c1e78a89/propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc", size = 228144, upload-time = "2025-10-08T19:46:32.607Z" }, + { url = "https://files.pythonhosted.org/packages/52/6a/57f43e054fb3d3a56ac9fc532bc684fc6169a26c75c353e65425b3e56eef/propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48", size = 210030, upload-time = "2025-10-08T19:46:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/40/e2/27e6feebb5f6b8408fa29f5efbb765cd54c153ac77314d27e457a3e993b7/propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570", size = 208252, upload-time = "2025-10-08T19:46:35.309Z" }, + { url = "https://files.pythonhosted.org/packages/9e/f8/91c27b22ccda1dbc7967f921c42825564fa5336a01ecd72eb78a9f4f53c2/propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85", size = 202064, upload-time = "2025-10-08T19:46:36.993Z" }, + { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, + { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, + { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, + { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, + { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, + { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, +] + +[[package]] +name = "proto-plus" +version = "1.26.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", size = 56142, upload-time = "2025-03-10T15:54:38.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", size = 50163, upload-time = "2025-03-10T15:54:37.335Z" }, +] + +[[package]] +name = "protobuf" +version = "6.33.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/ff/64a6c8f420818bb873713988ca5492cba3a7946be57e027ac63495157d97/protobuf-6.33.0.tar.gz", hash = "sha256:140303d5c8d2037730c548f8c7b93b20bb1dc301be280c378b82b8894589c954", size = 443463, upload-time = "2025-10-15T20:39:52.159Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/ee/52b3fa8feb6db4a833dfea4943e175ce645144532e8a90f72571ad85df4e/protobuf-6.33.0-cp310-abi3-win32.whl", hash = "sha256:d6101ded078042a8f17959eccd9236fb7a9ca20d3b0098bbcb91533a5680d035", size = 425593, upload-time = "2025-10-15T20:39:40.29Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c6/7a465f1825872c55e0341ff4a80198743f73b69ce5d43ab18043699d1d81/protobuf-6.33.0-cp310-abi3-win_amd64.whl", hash = "sha256:9a031d10f703f03768f2743a1c403af050b6ae1f3480e9c140f39c45f81b13ee", size = 436882, upload-time = "2025-10-15T20:39:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/e1/a9/b6eee662a6951b9c3640e8e452ab3e09f117d99fc10baa32d1581a0d4099/protobuf-6.33.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:905b07a65f1a4b72412314082c7dbfae91a9e8b68a0cc1577515f8df58ecf455", size = 427521, upload-time = "2025-10-15T20:39:43.803Z" }, + { url = "https://files.pythonhosted.org/packages/10/35/16d31e0f92c6d2f0e77c2a3ba93185130ea13053dd16200a57434c882f2b/protobuf-6.33.0-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:e0697ece353e6239b90ee43a9231318302ad8353c70e6e45499fa52396debf90", size = 324445, upload-time = "2025-10-15T20:39:44.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/eb/2a981a13e35cda8b75b5585aaffae2eb904f8f351bdd3870769692acbd8a/protobuf-6.33.0-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:e0a1715e4f27355afd9570f3ea369735afc853a6c3951a6afe1f80d8569ad298", size = 339159, upload-time = "2025-10-15T20:39:46.186Z" }, + { url = "https://files.pythonhosted.org/packages/21/51/0b1cbad62074439b867b4e04cc09b93f6699d78fd191bed2bbb44562e077/protobuf-6.33.0-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:35be49fd3f4fefa4e6e2aacc35e8b837d6703c37a2168a55ac21e9b1bc7559ef", size = 323172, upload-time = "2025-10-15T20:39:47.465Z" }, + { url = "https://files.pythonhosted.org/packages/07/d1/0a28c21707807c6aacd5dc9c3704b2aa1effbf37adebd8caeaf68b17a636/protobuf-6.33.0-py3-none-any.whl", hash = "sha256:25c9e1963c6734448ea2d308cfa610e692b801304ba0908d7bfa564ac5132995", size = 170477, upload-time = "2025-10-15T20:39:51.311Z" }, +] + +[[package]] +name = "psycopg" +version = "3.1.20" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5c/6d/0939210f3ba089b360cf0d3741494719152567bc81303cca2c0f1e67c78a/psycopg-3.1.20.tar.gz", hash = "sha256:32f5862ab79f238496236f97fe374a7ab55b4b4bb839a74802026544735f9a07", size = 147567, upload-time = "2024-06-30T17:03:55.421Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2d/e9/126bbfd5dded758bb109526c5f5f2c2538fe293b15b6fa208db7078c72c4/psycopg-3.1.20-py3-none-any.whl", hash = "sha256:898a29f49ac9c903d554f5a6cdc44a8fc564325557c18f82e51f39c1f4fc2aeb", size = 179473, upload-time = "2024-06-30T16:57:04.093Z" }, +] + +[package.optional-dependencies] +binary = [ + { name = "psycopg-binary", marker = "implementation_name != 'pypy'" }, +] +pool = [ + { name = "psycopg-pool" }, +] + +[[package]] +name = "psycopg-binary" +version = "3.1.20" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/1c/45e5f240765e80076b08c3ed02c5dfeb5e97d549769b81f8382485d70a15/psycopg_binary-3.1.20-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:802989350fcbc783732bfef660afb34439a62727642a05e8bb9acf7d68993627", size = 3350503, upload-time = "2024-06-30T16:58:27.18Z" }, + { url = "https://files.pythonhosted.org/packages/52/b8/acf96d388692d0bbf2346286f8b175778bc24046aca9181f50d9df9f4714/psycopg_binary-3.1.20-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:01b0e39128715fc37fed6cdc50ab58278eacb75709af503eb607654030975f09", size = 3480091, upload-time = "2024-06-30T16:58:33.872Z" }, + { url = "https://files.pythonhosted.org/packages/41/d4/20604282ff08823d0e90cf092738ea21b339f56a172d8583565b272fc4be/psycopg_binary-3.1.20-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77af1086bedfa0729465565c636de3519079ba523d7b7ee6e8b9486beb1ee905", size = 4434555, upload-time = "2024-06-30T16:58:40.795Z" }, + { url = "https://files.pythonhosted.org/packages/73/e0/3917b766508bb749e08225492d45ba7463b559de1c8a41d3f8f3cf0927cb/psycopg_binary-3.1.20-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9b9562395d441e225f354e8c6303ee6993a93aaeb0dbb5b94368f3249ab2388", size = 4231402, upload-time = "2024-06-30T16:58:48.586Z" }, + { url = "https://files.pythonhosted.org/packages/b4/9b/251435896f7459beda355ef3e3919b6b20d067582cd6838ba248d3cff188/psycopg_binary-3.1.20-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e814d69e5447a93e7b98117ec95a8ce606d3742092fd120960551ed67c376fea", size = 4484218, upload-time = "2024-06-30T16:58:56.911Z" }, + { url = "https://files.pythonhosted.org/packages/a1/12/b2057f9bb8b5f408139266a5b48bfd7578340296d7314d964b9f09e5b18f/psycopg_binary-3.1.20-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adf1c2061600235ae9b11d7ad357cab89ac583a76bdb0199f7a29ac947939c20", size = 4176668, upload-time = "2024-06-30T16:59:02.496Z" }, + { url = "https://files.pythonhosted.org/packages/80/9c/a62fe4167427a06e69882d274ba90903507afc89caf6bcc3671790a20875/psycopg_binary-3.1.20-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:50f1d807b4167f973a6f67bca39bf656b737f7426be158a1dc9cb0000d020744", size = 3102502, upload-time = "2024-06-30T16:59:07.216Z" }, + { url = "https://files.pythonhosted.org/packages/98/83/bceca23dd830d4069949e70dec9feb03c114cc551b104f0e2b48b1e598c6/psycopg_binary-3.1.20-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4cf6ec1490232a5b208dae94a8269dc739e6762684c8658a0f3570402db934ae", size = 3080005, upload-time = "2024-06-30T16:59:14.927Z" }, + { url = "https://files.pythonhosted.org/packages/fc/83/bab7c8495e0eb11bf710663afb2849c2d3c91a2bf61b2bd597941f57f80b/psycopg_binary-3.1.20-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:309c09ec50a9c5c8492c2922ee666df1e30a08b08a9b63083d0daa414eccd09c", size = 3182315, upload-time = "2024-06-30T16:59:21.18Z" }, + { url = "https://files.pythonhosted.org/packages/ca/9b/bd4970faed24ae4a850ee8c6ebd621e98fd86e2962e13038603a726e2504/psycopg_binary-3.1.20-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e2c33a01799f93ef8c11a023df66280e39ca3c3249a2581adb2a0e5e80801088", size = 3222552, upload-time = "2024-06-30T16:59:27.663Z" }, + { url = "https://files.pythonhosted.org/packages/5d/0b/7ab0744f282df53968f5066d5fd8bf3f994f90bf2a8003ab40278818d0f2/psycopg_binary-3.1.20-cp311-cp311-win_amd64.whl", hash = "sha256:2c67532057fda72579b02d9d61e9cc8975982844bd5c3c9dc7f84ce8bcac859c", size = 2899115, upload-time = "2024-06-30T16:59:35.512Z" }, +] + +[[package]] +name = "psycopg-pool" +version = "3.2.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/8f/3ec52b17087c2ed5fa32b64fd4814dde964c9aa4bd49d0d30fc24725ca6d/psycopg_pool-3.2.7.tar.gz", hash = "sha256:a77d531bfca238e49e5fb5832d65b98e69f2c62bfda3d2d4d833696bdc9ca54b", size = 29765, upload-time = "2025-10-26T00:46:10.379Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/59/74e752f605c6f0e351d4cf1c54fb9a1616dc800db4572b95bbfbb1a6225f/psycopg_pool-3.2.7-py3-none-any.whl", hash = "sha256:4b47bb59d887ef5da522eb63746b9f70e2faf967d34aac4f56ffc65e9606728f", size = 38232, upload-time = "2025-10-26T00:46:00.496Z" }, +] + +[[package]] +name = "pyarrow" +version = "22.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/53/04a7fdc63e6056116c9ddc8b43bc28c12cdd181b85cbeadb79278475f3ae/pyarrow-22.0.0.tar.gz", hash = "sha256:3d600dc583260d845c7d8a6db540339dd883081925da2bd1c5cb808f720b3cd9", size = 1151151, upload-time = "2025-10-24T12:30:00.762Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/b7/18f611a8cdc43417f9394a3ccd3eace2f32183c08b9eddc3d17681819f37/pyarrow-22.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:3e294c5eadfb93d78b0763e859a0c16d4051fc1c5231ae8956d61cb0b5666f5a", size = 34272022, upload-time = "2025-10-24T10:04:28.973Z" }, + { url = "https://files.pythonhosted.org/packages/26/5c/f259e2526c67eb4b9e511741b19870a02363a47a35edbebc55c3178db22d/pyarrow-22.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:69763ab2445f632d90b504a815a2a033f74332997052b721002298ed6de40f2e", size = 35995834, upload-time = "2025-10-24T10:04:35.467Z" }, + { url = "https://files.pythonhosted.org/packages/50/8d/281f0f9b9376d4b7f146913b26fac0aa2829cd1ee7e997f53a27411bbb92/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:b41f37cabfe2463232684de44bad753d6be08a7a072f6a83447eeaf0e4d2a215", size = 45030348, upload-time = "2025-10-24T10:04:43.366Z" }, + { url = "https://files.pythonhosted.org/packages/f5/e5/53c0a1c428f0976bf22f513d79c73000926cb00b9c138d8e02daf2102e18/pyarrow-22.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:35ad0f0378c9359b3f297299c3309778bb03b8612f987399a0333a560b43862d", size = 47699480, upload-time = "2025-10-24T10:04:51.486Z" }, + { url = "https://files.pythonhosted.org/packages/95/e1/9dbe4c465c3365959d183e6345d0a8d1dc5b02ca3f8db4760b3bc834cf25/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8382ad21458075c2e66a82a29d650f963ce51c7708c7c0ff313a8c206c4fd5e8", size = 48011148, upload-time = "2025-10-24T10:04:59.585Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b4/7caf5d21930061444c3cf4fa7535c82faf5263e22ce43af7c2759ceb5b8b/pyarrow-22.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1a812a5b727bc09c3d7ea072c4eebf657c2f7066155506ba31ebf4792f88f016", size = 50276964, upload-time = "2025-10-24T10:05:08.175Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f3/cec89bd99fa3abf826f14d4e53d3d11340ce6f6af4d14bdcd54cd83b6576/pyarrow-22.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:ec5d40dd494882704fb876c16fa7261a69791e784ae34e6b5992e977bd2e238c", size = 28106517, upload-time = "2025-10-24T10:05:14.314Z" }, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" }, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/e6/78ebbb10a8c8e4b61a59249394a4a594c1a7af95593dc933a349c8d00964/pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6", size = 307892, upload-time = "2025-03-28T02:41:22.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, +] + +[[package]] +name = "pycparser" +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, +] + +[[package]] +name = "pydantic" +version = "2.10.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b7/ae/d5220c5c52b158b1de7ca89fc5edb72f304a70a4c540c84c8844bf4008de/pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236", size = 761681, upload-time = "2025-01-24T01:42:12.693Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/3c/8cc1cc84deffa6e25d2d0c688ebb80635dfdbf1dbea3e30c541c8cf4d860/pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584", size = 431696, upload-time = "2025-01-24T01:42:10.371Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443, upload-time = "2024-12-18T11:31:54.917Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/89/f3450af9d09d44eea1f2c369f49e8f181d742f28220f88cc4dfaae91ea6e/pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc", size = 1893421, upload-time = "2024-12-18T11:27:55.409Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e3/71fe85af2021f3f386da42d291412e5baf6ce7716bd7101ea49c810eda90/pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7", size = 1814998, upload-time = "2024-12-18T11:27:57.252Z" }, + { url = "https://files.pythonhosted.org/packages/a6/3c/724039e0d848fd69dbf5806894e26479577316c6f0f112bacaf67aa889ac/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15", size = 1826167, upload-time = "2024-12-18T11:27:59.146Z" }, + { url = "https://files.pythonhosted.org/packages/2b/5b/1b29e8c1fb5f3199a9a57c1452004ff39f494bbe9bdbe9a81e18172e40d3/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306", size = 1865071, upload-time = "2024-12-18T11:28:02.625Z" }, + { url = "https://files.pythonhosted.org/packages/89/6c/3985203863d76bb7d7266e36970d7e3b6385148c18a68cc8915fd8c84d57/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99", size = 2036244, upload-time = "2024-12-18T11:28:04.442Z" }, + { url = "https://files.pythonhosted.org/packages/0e/41/f15316858a246b5d723f7d7f599f79e37493b2e84bfc789e58d88c209f8a/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459", size = 2737470, upload-time = "2024-12-18T11:28:07.679Z" }, + { url = "https://files.pythonhosted.org/packages/a8/7c/b860618c25678bbd6d1d99dbdfdf0510ccb50790099b963ff78a124b754f/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048", size = 1992291, upload-time = "2024-12-18T11:28:10.297Z" }, + { url = "https://files.pythonhosted.org/packages/bf/73/42c3742a391eccbeab39f15213ecda3104ae8682ba3c0c28069fbcb8c10d/pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d", size = 1994613, upload-time = "2024-12-18T11:28:13.362Z" }, + { url = "https://files.pythonhosted.org/packages/94/7a/941e89096d1175d56f59340f3a8ebaf20762fef222c298ea96d36a6328c5/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b", size = 2002355, upload-time = "2024-12-18T11:28:16.587Z" }, + { url = "https://files.pythonhosted.org/packages/6e/95/2359937a73d49e336a5a19848713555605d4d8d6940c3ec6c6c0ca4dcf25/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474", size = 2126661, upload-time = "2024-12-18T11:28:18.407Z" }, + { url = "https://files.pythonhosted.org/packages/2b/4c/ca02b7bdb6012a1adef21a50625b14f43ed4d11f1fc237f9d7490aa5078c/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6", size = 2153261, upload-time = "2024-12-18T11:28:21.471Z" }, + { url = "https://files.pythonhosted.org/packages/72/9d/a241db83f973049a1092a079272ffe2e3e82e98561ef6214ab53fe53b1c7/pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c", size = 1812361, upload-time = "2024-12-18T11:28:23.53Z" }, + { url = "https://files.pythonhosted.org/packages/e8/ef/013f07248041b74abd48a385e2110aa3a9bbfef0fbd97d4e6d07d2f5b89a/pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc", size = 1982484, upload-time = "2024-12-18T11:28:25.391Z" }, + { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102, upload-time = "2024-12-18T11:28:28.593Z" }, +] + +[[package]] +name = "pydub" +version = "0.25.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/9a/e6bca0eed82db26562c73b5076539a4a08d3cffd19c3cc5913a3e61145fd/pydub-0.25.1.tar.gz", hash = "sha256:980a33ce9949cab2a569606b65674d748ecbca4f0796887fd6f46173a7b0d30f", size = 38326, upload-time = "2021-03-10T02:09:54.659Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/53/d78dc063216e62fc55f6b2eebb447f6a4b0a59f55c8406376f76bf959b08/pydub-0.25.1-py2.py3-none-any.whl", hash = "sha256:65617e33033874b59d87db603aa1ed450633288aefead953b30bded59cb599a6", size = 32327, upload-time = "2021-03-10T02:09:53.503Z" }, +] + +[[package]] +name = "pyjwt" +version = "2.10.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, +] + +[[package]] +name = "pylance" +version = "0.38.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "pyarrow" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/fd/1584088917524acd974c86a1addaa679e7201b9073cfed3ef7e495315fd7/pylance-0.38.3-cp39-abi3-macosx_10_15_x86_64.whl", hash = "sha256:f6c42a8b1c3ffa3ab55cc608351775d537b96ab0fa283075a96495fdf47e1920", size = 46482483, upload-time = "2025-10-28T12:01:46.109Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f9/5e5bcd547c7cc7a126fec2b32ebc42c22ecad3fcd73620793904bc8667bb/pylance-0.38.3-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:14329be831e3de21149f40a1437ad4bc6e5b7427e019586c0115fe05d2f016c1", size = 42459485, upload-time = "2025-10-28T11:38:51.793Z" }, + { url = "https://files.pythonhosted.org/packages/2c/69/8ccd88ca597bb5c1f0b8f8ec4491cafbe388ef867328c07b29fb467f1e34/pylance-0.38.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77a872b99c9c4b6a84ff6f4254fd9405ec7672413edd3b4ce3b4ed232fdf1ac3", size = 44575863, upload-time = "2025-10-28T11:34:54.118Z" }, + { url = "https://files.pythonhosted.org/packages/3c/b4/78edb7e2c5a2e604b035a38d35857e5cdb242cec4171f386eed0920941c4/pylance-0.38.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88c87a0ada27856e9917dbf9eb59879891339a37d0cfb05b5df81caab2b11f31", size = 48011877, upload-time = "2025-10-28T11:38:20.902Z" }, + { url = "https://files.pythonhosted.org/packages/19/9b/45539c0724be34455655e70a4a6a3123ad338719687391dc037db0f7462d/pylance-0.38.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:b6a3c786f80d0bf8a2da379b89211d3ad9f3c723394bd9e78e5a234d65701b59", size = 44592430, upload-time = "2025-10-28T11:35:19.708Z" }, + { url = "https://files.pythonhosted.org/packages/6d/f3/8bed7707fc4b5bc6032ba1b7af8f2211af9d39f6fe925ae78961a14c85c7/pylance-0.38.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a169b2a254b8cb26399a0cc570784995280d2900d0ed8e50fbd3c38f21a3af76", size = 48024285, upload-time = "2025-10-28T11:38:53.192Z" }, + { url = "https://files.pythonhosted.org/packages/06/49/84966b5e28648740b157bd90e7a89ec99a835cb66a0a619e41c60ec71073/pylance-0.38.3-cp39-abi3-win_amd64.whl", hash = "sha256:ea106742c2032d3ed8aa9232923eed2054aabf683a769ddc564c285ce20b950d", size = 49747295, upload-time = "2025-10-28T11:56:25.262Z" }, +] + +[[package]] +name = "pypdf" +version = "4.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5f/de/5ee74158c3090ec99eae9f90c9e9c18f207fa5c722b0e95d6fa7faebcdf8/pypdf-4.0.2.tar.gz", hash = "sha256:3316d9ddfcff5df67ae3cdfe8b945c432aa43e7f970bae7c2a4ab4fe129cd937", size = 280173, upload-time = "2024-02-18T15:45:10.729Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/87/30f8a2963247fd7b1267e600379c5e3f51c9849a07d042398e4485b7415c/pypdf-4.0.2-py3-none-any.whl", hash = "sha256:a62daa2a24d5a608ba1b6284dde185317ce3644f89b9ebe5314d0c5d1c9f257d", size = 283953, upload-time = "2024-02-18T15:45:07.857Z" }, +] + +[[package]] +name = "pytest" +version = "8.3.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919, upload-time = "2024-12-01T12:54:25.98Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083, upload-time = "2024-12-01T12:54:19.735Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/86/9e3c5f48f7b7b638b216e4b9e645f54d199d7abbbab7a64a13b4e12ba10f/pytest_asyncio-1.2.0.tar.gz", hash = "sha256:c609a64a2a8768462d0c99811ddb8bd2583c33fd33cf7f21af1c142e824ffb57", size = 50119, upload-time = "2025-09-12T07:33:53.816Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/93/2fa34714b7a4ae72f2f8dad66ba17dd9a2c793220719e736dda28b7aec27/pytest_asyncio-1.2.0-py3-none-any.whl", hash = "sha256:8e17ae5e46d8e7efe51ab6494dd2010f4ca8dae51652aa3c8d55acf50bfb2e99", size = 15095, upload-time = "2025-09-12T07:33:52.639Z" }, +] + +[[package]] +name = "pytest-cov" +version = "7.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, +] + +[[package]] +name = "pytest-mock" +version = "3.15.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/68/14/eb014d26be205d38ad5ad20d9a80f7d201472e08167f0bb4361e251084a9/pytest_mock-3.15.1.tar.gz", hash = "sha256:1849a238f6f396da19762269de72cb1814ab44416fa73a8686deac10b0d87a0f", size = 34036, upload-time = "2025-09-16T16:37:27.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/cc/06253936f4a7fa2e0f48dfe6d851d9c56df896a9ab09ac019d70b760619c/pytest_mock-3.15.1-py3-none-any.whl", hash = "sha256:0a25e2eb88fe5168d535041d09a4529a188176ae608a6d249ee65abc0949630d", size = 10095, upload-time = "2025-09-16T16:37:25.734Z" }, +] + +[[package]] +name = "pytest-xdist" +version = "3.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "execnet" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/b4/439b179d1ff526791eb921115fca8e44e596a13efeda518b9d845a619450/pytest_xdist-3.8.0.tar.gz", hash = "sha256:7e578125ec9bc6050861aa93f2d59f1d8d085595d6551c2c90b6f4fad8d3a9f1", size = 88069, upload-time = "2025-07-01T13:30:59.346Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ca/31/d4e37e9e550c2b92a9cbc2e4d0b7420a27224968580b5a447f420847c975/pytest_xdist-3.8.0-py3-none-any.whl", hash = "sha256:202ca578cfeb7370784a8c33d6d05bc6e13b4f25b5053c30a152269fd10f0b88", size = 46396, upload-time = "2025-07-01T13:30:56.632Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/57/e84d88dfe0aec03b7a2d4327012c1627ab5f03652216c63d49846d7a6c58/python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca", size = 39115, upload-time = "2024-01-23T06:33:00.505Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/3e/b68c118422ec867fa7ab88444e1274aa40681c606d59ac27de5a5588f082/python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a", size = 19863, upload-time = "2024-01-23T06:32:58.246Z" }, +] + +[[package]] +name = "python-jose" +version = "3.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ecdsa" }, + { name = "pyasn1" }, + { name = "rsa" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c6/77/3a1c9039db7124eb039772b935f2244fbb73fc8ee65b9acf2375da1c07bf/python_jose-3.5.0.tar.gz", hash = "sha256:fb4eaa44dbeb1c26dcc69e4bd7ec54a1cb8dd64d3b4d81ef08d90ff453f2b01b", size = 92726, upload-time = "2025-05-28T17:31:54.288Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/c3/0bd11992072e6a1c513b16500a5d07f91a24017c5909b02c72c62d7ad024/python_jose-3.5.0-py2.py3-none-any.whl", hash = "sha256:abd1202f23d34dfad2c3d28cb8617b90acf34132c7afd60abd0b0b7d3cb55771", size = 34624, upload-time = "2025-05-28T17:31:52.802Z" }, +] + +[package.optional-dependencies] +cryptography = [ + { name = "cryptography" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, +] + +[[package]] +name = "pytz" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, +] + +[[package]] +name = "redis" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout", marker = "python_full_version < '3.11.3'" }, + { name = "pyjwt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/cf/128b1b6d7086200c9f387bd4be9b2572a30b90745ef078bd8b235042dc9f/redis-5.3.1.tar.gz", hash = "sha256:ca49577a531ea64039b5a36db3d6cd1a0c7a60c34124d46924a45b956e8cf14c", size = 4626200, upload-time = "2025-07-25T08:06:27.778Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7f/26/5c5fa0e83c3621db835cfc1f1d789b37e7fa99ed54423b5f519beb931aa7/redis-5.3.1-py3-none-any.whl", hash = "sha256:dc1909bd24669cc31b5f67a039700b16ec30571096c5f1f0d9d2324bff31af97", size = 272833, upload-time = "2025-07-25T08:06:26.317Z" }, +] + +[[package]] +name = "referencing" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, +] + +[[package]] +name = "regex" +version = "2025.10.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/c8/1d2160d36b11fbe0a61acb7c3c81ab032d9ec8ad888ac9e0a61b85ab99dd/regex-2025.10.23.tar.gz", hash = "sha256:8cbaf8ceb88f96ae2356d01b9adf5e6306fa42fa6f7eab6b97794e37c959ac26", size = 401266, upload-time = "2025-10-21T15:58:20.23Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/e5/74b7cd5cd76b4171f9793042045bb1726f7856dd56e582fc3e058a7a8a5e/regex-2025.10.23-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6c531155bf9179345e85032052a1e5fe1a696a6abf9cea54b97e8baefff970fd", size = 487960, upload-time = "2025-10-21T15:54:53.253Z" }, + { url = "https://files.pythonhosted.org/packages/b9/08/854fa4b3b20471d1df1c71e831b6a1aa480281e37791e52a2df9641ec5c6/regex-2025.10.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:912e9df4e89d383681268d38ad8f5780d7cccd94ba0e9aa09ca7ab7ab4f8e7eb", size = 290425, upload-time = "2025-10-21T15:54:55.21Z" }, + { url = "https://files.pythonhosted.org/packages/ab/d3/6272b1dd3ca1271661e168762b234ad3e00dbdf4ef0c7b9b72d2d159efa7/regex-2025.10.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f375c61bfc3138b13e762fe0ae76e3bdca92497816936534a0177201666f44f", size = 288278, upload-time = "2025-10-21T15:54:56.862Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/c7b365dd9d9bc0a36e018cb96f2ffb60d2ba8deb589a712b437f67de2920/regex-2025.10.23-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e248cc9446081119128ed002a3801f8031e0c219b5d3c64d3cc627da29ac0a33", size = 793289, upload-time = "2025-10-21T15:54:58.352Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fb/b8fbe9aa16cf0c21f45ec5a6c74b4cecbf1a1c0deb7089d4a6f83a9c1caa/regex-2025.10.23-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b52bf9282fdf401e4f4e721f0f61fc4b159b1307244517789702407dd74e38ca", size = 860321, upload-time = "2025-10-21T15:54:59.813Z" }, + { url = "https://files.pythonhosted.org/packages/b0/81/bf41405c772324926a9bd8a640dedaa42da0e929241834dfce0733070437/regex-2025.10.23-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c084889ab2c59765a0d5ac602fd1c3c244f9b3fcc9a65fdc7ba6b74c5287490", size = 907011, upload-time = "2025-10-21T15:55:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/a4/fb/5ad6a8b92d3f88f3797b51bb4ef47499acc2d0b53d2fbe4487a892f37a73/regex-2025.10.23-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d80e8eb79009bdb0936658c44ca06e2fbbca67792013e3818eea3f5f228971c2", size = 800312, upload-time = "2025-10-21T15:55:04.15Z" }, + { url = "https://files.pythonhosted.org/packages/42/48/b4efba0168a2b57f944205d823f8e8a3a1ae6211a34508f014ec2c712f4f/regex-2025.10.23-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6f259118ba87b814a8ec475380aee5f5ae97a75852a3507cf31d055b01b5b40", size = 782839, upload-time = "2025-10-21T15:55:05.641Z" }, + { url = "https://files.pythonhosted.org/packages/13/2a/c9efb4c6c535b0559c1fa8e431e0574d229707c9ca718600366fcfef6801/regex-2025.10.23-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9b8c72a242683dcc72d37595c4f1278dfd7642b769e46700a8df11eab19dfd82", size = 854270, upload-time = "2025-10-21T15:55:07.27Z" }, + { url = "https://files.pythonhosted.org/packages/34/2d/68eecc1bdaee020e8ba549502291c9450d90d8590d0552247c9b543ebf7b/regex-2025.10.23-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a8d7b7a0a3df9952f9965342159e0c1f05384c0f056a47ce8b61034f8cecbe83", size = 845771, upload-time = "2025-10-21T15:55:09.477Z" }, + { url = "https://files.pythonhosted.org/packages/a5/cd/a1ae499cf9b87afb47a67316bbf1037a7c681ffe447c510ed98c0aa2c01c/regex-2025.10.23-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:413bfea20a484c524858125e92b9ce6ffdd0a4b97d4ff96b5859aa119b0f1bdd", size = 788778, upload-time = "2025-10-21T15:55:11.396Z" }, + { url = "https://files.pythonhosted.org/packages/38/f9/70765e63f5ea7d43b2b6cd4ee9d3323f16267e530fb2a420d92d991cf0fc/regex-2025.10.23-cp311-cp311-win32.whl", hash = "sha256:f76deef1f1019a17dad98f408b8f7afc4bd007cbe835ae77b737e8c7f19ae575", size = 265666, upload-time = "2025-10-21T15:55:13.306Z" }, + { url = "https://files.pythonhosted.org/packages/9c/1a/18e9476ee1b63aaec3844d8e1cb21842dc19272c7e86d879bfc0dcc60db3/regex-2025.10.23-cp311-cp311-win_amd64.whl", hash = "sha256:59bba9f7125536f23fdab5deeea08da0c287a64c1d3acc1c7e99515809824de8", size = 277600, upload-time = "2025-10-21T15:55:15.087Z" }, + { url = "https://files.pythonhosted.org/packages/1d/1b/c019167b1f7a8ec77251457e3ff0339ed74ca8bce1ea13138dc98309c923/regex-2025.10.23-cp311-cp311-win_arm64.whl", hash = "sha256:b103a752b6f1632ca420225718d6ed83f6a6ced3016dd0a4ab9a6825312de566", size = 269974, upload-time = "2025-10-21T15:55:16.841Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888, upload-time = "2023-05-01T04:11:33.229Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481, upload-time = "2023-05-01T04:11:28.427Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.28.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/48/dc/95f074d43452b3ef5d06276696ece4b3b5d696e7c9ad7173c54b1390cd70/rpds_py-0.28.0.tar.gz", hash = "sha256:abd4df20485a0983e2ca334a216249b6186d6e3c1627e106651943dbdb791aea", size = 27419, upload-time = "2025-10-22T22:24:29.327Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/34/058d0db5471c6be7bef82487ad5021ff8d1d1d27794be8730aad938649cf/rpds_py-0.28.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:03065002fd2e287725d95fbc69688e0c6daf6c6314ba38bdbaa3895418e09296", size = 362344, upload-time = "2025-10-22T22:21:39.713Z" }, + { url = "https://files.pythonhosted.org/packages/5d/67/9503f0ec8c055a0782880f300c50a2b8e5e72eb1f94dfc2053da527444dd/rpds_py-0.28.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28ea02215f262b6d078daec0b45344c89e161eab9526b0d898221d96fdda5f27", size = 348440, upload-time = "2025-10-22T22:21:41.056Z" }, + { url = "https://files.pythonhosted.org/packages/68/2e/94223ee9b32332a41d75b6f94b37b4ce3e93878a556fc5f152cbd856a81f/rpds_py-0.28.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25dbade8fbf30bcc551cb352376c0ad64b067e4fc56f90e22ba70c3ce205988c", size = 379068, upload-time = "2025-10-22T22:21:42.593Z" }, + { url = "https://files.pythonhosted.org/packages/b4/25/54fd48f9f680cfc44e6a7f39a5fadf1d4a4a1fd0848076af4a43e79f998c/rpds_py-0.28.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c03002f54cc855860bfdc3442928ffdca9081e73b5b382ed0b9e8efe6e5e205", size = 390518, upload-time = "2025-10-22T22:21:43.998Z" }, + { url = "https://files.pythonhosted.org/packages/1b/85/ac258c9c27f2ccb1bd5d0697e53a82ebcf8088e3186d5d2bf8498ee7ed44/rpds_py-0.28.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9699fa7990368b22032baf2b2dce1f634388e4ffc03dfefaaac79f4695edc95", size = 525319, upload-time = "2025-10-22T22:21:45.645Z" }, + { url = "https://files.pythonhosted.org/packages/40/cb/c6734774789566d46775f193964b76627cd5f42ecf246d257ce84d1912ed/rpds_py-0.28.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9b06fe1a75e05e0713f06ea0c89ecb6452210fd60e2f1b6ddc1067b990e08d9", size = 404896, upload-time = "2025-10-22T22:21:47.544Z" }, + { url = "https://files.pythonhosted.org/packages/1f/53/14e37ce83202c632c89b0691185dca9532288ff9d390eacae3d2ff771bae/rpds_py-0.28.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9f83e7b326a3f9ec3ef84cda98fb0a74c7159f33e692032233046e7fd15da2", size = 382862, upload-time = "2025-10-22T22:21:49.176Z" }, + { url = "https://files.pythonhosted.org/packages/6a/83/f3642483ca971a54d60caa4449f9d6d4dbb56a53e0072d0deff51b38af74/rpds_py-0.28.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:0d3259ea9ad8743a75a43eb7819324cdab393263c91be86e2d1901ee65c314e0", size = 398848, upload-time = "2025-10-22T22:21:51.024Z" }, + { url = "https://files.pythonhosted.org/packages/44/09/2d9c8b2f88e399b4cfe86efdf2935feaf0394e4f14ab30c6c5945d60af7d/rpds_py-0.28.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a7548b345f66f6695943b4ef6afe33ccd3f1b638bd9afd0f730dd255c249c9e", size = 412030, upload-time = "2025-10-22T22:21:52.665Z" }, + { url = "https://files.pythonhosted.org/packages/dd/f5/e1cec473d4bde6df1fd3738be8e82d64dd0600868e76e92dfeaebbc2d18f/rpds_py-0.28.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9a40040aa388b037eb39416710fbcce9443498d2eaab0b9b45ae988b53f5c67", size = 559700, upload-time = "2025-10-22T22:21:54.123Z" }, + { url = "https://files.pythonhosted.org/packages/8d/be/73bb241c1649edbf14e98e9e78899c2c5e52bbe47cb64811f44d2cc11808/rpds_py-0.28.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f60c7ea34e78c199acd0d3cda37a99be2c861dd2b8cf67399784f70c9f8e57d", size = 584581, upload-time = "2025-10-22T22:21:56.102Z" }, + { url = "https://files.pythonhosted.org/packages/9c/9c/ffc6e9218cd1eb5c2c7dbd276c87cd10e8c2232c456b554169eb363381df/rpds_py-0.28.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1571ae4292649100d743b26d5f9c63503bb1fedf538a8f29a98dce2d5ba6b4e6", size = 549981, upload-time = "2025-10-22T22:21:58.253Z" }, + { url = "https://files.pythonhosted.org/packages/5f/50/da8b6d33803a94df0149345ee33e5d91ed4d25fc6517de6a25587eae4133/rpds_py-0.28.0-cp311-cp311-win32.whl", hash = "sha256:5cfa9af45e7c1140af7321fa0bef25b386ee9faa8928c80dc3a5360971a29e8c", size = 214729, upload-time = "2025-10-22T22:21:59.625Z" }, + { url = "https://files.pythonhosted.org/packages/12/fd/b0f48c4c320ee24c8c20df8b44acffb7353991ddf688af01eef5f93d7018/rpds_py-0.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd8d86b5d29d1b74100982424ba53e56033dc47720a6de9ba0259cf81d7cecaa", size = 223977, upload-time = "2025-10-22T22:22:01.092Z" }, + { url = "https://files.pythonhosted.org/packages/b4/21/c8e77a2ac66e2ec4e21f18a04b4e9a0417ecf8e61b5eaeaa9360a91713b4/rpds_py-0.28.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e27d3a5709cc2b3e013bf93679a849213c79ae0573f9b894b284b55e729e120", size = 217326, upload-time = "2025-10-22T22:22:02.944Z" }, + { url = "https://files.pythonhosted.org/packages/ae/bc/b43f2ea505f28119bd551ae75f70be0c803d2dbcd37c1b3734909e40620b/rpds_py-0.28.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f5e7101145427087e493b9c9b959da68d357c28c562792300dd21a095118ed16", size = 363913, upload-time = "2025-10-22T22:24:07.129Z" }, + { url = "https://files.pythonhosted.org/packages/28/f2/db318195d324c89a2c57dc5195058cbadd71b20d220685c5bd1da79ee7fe/rpds_py-0.28.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:31eb671150b9c62409a888850aaa8e6533635704fe2b78335f9aaf7ff81eec4d", size = 350452, upload-time = "2025-10-22T22:24:08.754Z" }, + { url = "https://files.pythonhosted.org/packages/ae/f2/1391c819b8573a4898cedd6b6c5ec5bc370ce59e5d6bdcebe3c9c1db4588/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b55c1f64482f7d8bd39942f376bfdf2f6aec637ee8c805b5041e14eeb771db", size = 380957, upload-time = "2025-10-22T22:24:10.826Z" }, + { url = "https://files.pythonhosted.org/packages/5a/5c/e5de68ee7eb7248fce93269833d1b329a196d736aefb1a7481d1e99d1222/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24743a7b372e9a76171f6b69c01aedf927e8ac3e16c474d9fe20d552a8cb45c7", size = 391919, upload-time = "2025-10-22T22:24:12.559Z" }, + { url = "https://files.pythonhosted.org/packages/fb/4f/2376336112cbfeb122fd435d608ad8d5041b3aed176f85a3cb32c262eb80/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:389c29045ee8bbb1627ea190b4976a310a295559eaf9f1464a1a6f2bf84dde78", size = 528541, upload-time = "2025-10-22T22:24:14.197Z" }, + { url = "https://files.pythonhosted.org/packages/68/53/5ae232e795853dd20da7225c5dd13a09c0a905b1a655e92bdf8d78a99fd9/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23690b5827e643150cf7b49569679ec13fe9a610a15949ed48b85eb7f98f34ec", size = 405629, upload-time = "2025-10-22T22:24:16.001Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2d/351a3b852b683ca9b6b8b38ed9efb2347596973849ba6c3a0e99877c10aa/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f0c9266c26580e7243ad0d72fc3e01d6b33866cfab5084a6da7576bcf1c4f72", size = 384123, upload-time = "2025-10-22T22:24:17.585Z" }, + { url = "https://files.pythonhosted.org/packages/e0/15/870804daa00202728cc91cb8e2385fa9f1f4eb49857c49cfce89e304eae6/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4c6c4db5d73d179746951486df97fd25e92396be07fc29ee8ff9a8f5afbdfb27", size = 400923, upload-time = "2025-10-22T22:24:19.512Z" }, + { url = "https://files.pythonhosted.org/packages/53/25/3706b83c125fa2a0bccceac951de3f76631f6bd0ee4d02a0ed780712ef1b/rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3b695a8fa799dd2cfdb4804b37096c5f6dba1ac7f48a7fbf6d0485bcd060316", size = 413767, upload-time = "2025-10-22T22:24:21.316Z" }, + { url = "https://files.pythonhosted.org/packages/ef/f9/ce43dbe62767432273ed2584cef71fef8411bddfb64125d4c19128015018/rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:6aa1bfce3f83baf00d9c5fcdbba93a3ab79958b4c7d7d1f55e7fe68c20e63912", size = 561530, upload-time = "2025-10-22T22:24:22.958Z" }, + { url = "https://files.pythonhosted.org/packages/46/c9/ffe77999ed8f81e30713dd38fd9ecaa161f28ec48bb80fa1cd9118399c27/rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b0f9dceb221792b3ee6acb5438eb1f02b0cb2c247796a72b016dcc92c6de829", size = 585453, upload-time = "2025-10-22T22:24:24.779Z" }, + { url = "https://files.pythonhosted.org/packages/ed/d2/4a73b18821fd4669762c855fd1f4e80ceb66fb72d71162d14da58444a763/rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5d0145edba8abd3db0ab22b5300c99dc152f5c9021fab861be0f0544dc3cbc5f", size = 552199, upload-time = "2025-10-22T22:24:26.54Z" }, +] + +[[package]] +name = "rsa" +version = "4.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, +] + +[[package]] +name = "ruff" +version = "0.14.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/62/50b7727004dfe361104dfbf898c45a9a2fdfad8c72c04ae62900224d6ecf/ruff-0.14.3.tar.gz", hash = "sha256:4ff876d2ab2b161b6de0aa1f5bd714e8e9b4033dc122ee006925fbacc4f62153", size = 5558687, upload-time = "2025-10-31T00:26:26.878Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ce/8e/0c10ff1ea5d4360ab8bfca4cb2c9d979101a391f3e79d2616c9bf348cd26/ruff-0.14.3-py3-none-linux_armv6l.whl", hash = "sha256:876b21e6c824f519446715c1342b8e60f97f93264012de9d8d10314f8a79c371", size = 12535613, upload-time = "2025-10-31T00:25:44.302Z" }, + { url = "https://files.pythonhosted.org/packages/d3/c8/6724f4634c1daf52409fbf13fefda64aa9c8f81e44727a378b7b73dc590b/ruff-0.14.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b6fd8c79b457bedd2abf2702b9b472147cd860ed7855c73a5247fa55c9117654", size = 12855812, upload-time = "2025-10-31T00:25:47.793Z" }, + { url = "https://files.pythonhosted.org/packages/de/03/db1bce591d55fd5f8a08bb02517fa0b5097b2ccabd4ea1ee29aa72b67d96/ruff-0.14.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:71ff6edca490c308f083156938c0c1a66907151263c4abdcb588602c6e696a14", size = 11944026, upload-time = "2025-10-31T00:25:49.657Z" }, + { url = "https://files.pythonhosted.org/packages/0b/75/4f8dbd48e03272715d12c87dc4fcaaf21b913f0affa5f12a4e9c6f8a0582/ruff-0.14.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:786ee3ce6139772ff9272aaf43296d975c0217ee1b97538a98171bf0d21f87ed", size = 12356818, upload-time = "2025-10-31T00:25:51.949Z" }, + { url = "https://files.pythonhosted.org/packages/ec/9b/506ec5b140c11d44a9a4f284ea7c14ebf6f8b01e6e8917734a3325bff787/ruff-0.14.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cd6291d0061811c52b8e392f946889916757610d45d004e41140d81fb6cd5ddc", size = 12336745, upload-time = "2025-10-31T00:25:54.248Z" }, + { url = "https://files.pythonhosted.org/packages/c7/e1/c560d254048c147f35e7f8131d30bc1f63a008ac61595cf3078a3e93533d/ruff-0.14.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a497ec0c3d2c88561b6d90f9c29f5ae68221ac00d471f306fa21fa4264ce5fcd", size = 13101684, upload-time = "2025-10-31T00:25:56.253Z" }, + { url = "https://files.pythonhosted.org/packages/a5/32/e310133f8af5cd11f8cc30f52522a3ebccc5ea5bff4b492f94faceaca7a8/ruff-0.14.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e231e1be58fc568950a04fbe6887c8e4b85310e7889727e2b81db205c45059eb", size = 14535000, upload-time = "2025-10-31T00:25:58.397Z" }, + { url = "https://files.pythonhosted.org/packages/a2/a1/7b0470a22158c6d8501eabc5e9b6043c99bede40fa1994cadf6b5c2a61c7/ruff-0.14.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:469e35872a09c0e45fecf48dd960bfbce056b5db2d5e6b50eca329b4f853ae20", size = 14156450, upload-time = "2025-10-31T00:26:00.889Z" }, + { url = "https://files.pythonhosted.org/packages/0a/96/24bfd9d1a7f532b560dcee1a87096332e461354d3882124219bcaff65c09/ruff-0.14.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d6bc90307c469cb9d28b7cfad90aaa600b10d67c6e22026869f585e1e8a2db0", size = 13568414, upload-time = "2025-10-31T00:26:03.291Z" }, + { url = "https://files.pythonhosted.org/packages/a7/e7/138b883f0dfe4ad5b76b58bf4ae675f4d2176ac2b24bdd81b4d966b28c61/ruff-0.14.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2f8a0bbcffcfd895df39c9a4ecd59bb80dca03dc43f7fb63e647ed176b741e", size = 13315293, upload-time = "2025-10-31T00:26:05.708Z" }, + { url = "https://files.pythonhosted.org/packages/33/f4/c09bb898be97b2eb18476b7c950df8815ef14cf956074177e9fbd40b7719/ruff-0.14.3-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:678fdd7c7d2d94851597c23ee6336d25f9930b460b55f8598e011b57c74fd8c5", size = 13539444, upload-time = "2025-10-31T00:26:08.09Z" }, + { url = "https://files.pythonhosted.org/packages/9c/aa/b30a1db25fc6128b1dd6ff0741fa4abf969ded161599d07ca7edd0739cc0/ruff-0.14.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1ec1ac071e7e37e0221d2f2dbaf90897a988c531a8592a6a5959f0603a1ecf5e", size = 12252581, upload-time = "2025-10-31T00:26:10.297Z" }, + { url = "https://files.pythonhosted.org/packages/da/13/21096308f384d796ffe3f2960b17054110a9c3828d223ca540c2b7cc670b/ruff-0.14.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:afcdc4b5335ef440d19e7df9e8ae2ad9f749352190e96d481dc501b753f0733e", size = 12307503, upload-time = "2025-10-31T00:26:12.646Z" }, + { url = "https://files.pythonhosted.org/packages/cb/cc/a350bac23f03b7dbcde3c81b154706e80c6f16b06ff1ce28ed07dc7b07b0/ruff-0.14.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:7bfc42f81862749a7136267a343990f865e71fe2f99cf8d2958f684d23ce3dfa", size = 12675457, upload-time = "2025-10-31T00:26:15.044Z" }, + { url = "https://files.pythonhosted.org/packages/cb/76/46346029fa2f2078826bc88ef7167e8c198e58fe3126636e52f77488cbba/ruff-0.14.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a65e448cfd7e9c59fae8cf37f9221585d3354febaad9a07f29158af1528e165f", size = 13403980, upload-time = "2025-10-31T00:26:17.81Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a4/35f1ef68c4e7b236d4a5204e3669efdeefaef21f0ff6a456792b3d8be438/ruff-0.14.3-py3-none-win32.whl", hash = "sha256:f3d91857d023ba93e14ed2d462ab62c3428f9bbf2b4fbac50a03ca66d31991f7", size = 12500045, upload-time = "2025-10-31T00:26:20.503Z" }, + { url = "https://files.pythonhosted.org/packages/03/15/51960ae340823c9859fb60c63301d977308735403e2134e17d1d2858c7fb/ruff-0.14.3-py3-none-win_amd64.whl", hash = "sha256:d7b7006ac0756306db212fd37116cce2bd307e1e109375e1c6c106002df0ae5f", size = 13594005, upload-time = "2025-10-31T00:26:22.533Z" }, + { url = "https://files.pythonhosted.org/packages/b7/73/4de6579bac8e979fca0a77e54dec1f1e011a0d268165eb8a9bc0982a6564/ruff-0.14.3-py3-none-win_arm64.whl", hash = "sha256:26eb477ede6d399d898791d01961e16b86f02bc2486d0d1a7a9bb2379d055dc1", size = 12590017, upload-time = "2025-10-31T00:26:24.52Z" }, +] + +[[package]] +name = "s3transfer" +version = "0.11.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/2b/5c9562795c2eb2b5f63536961754760c25bf0f34af93d36aa28dea2fb303/s3transfer-0.11.5.tar.gz", hash = "sha256:8c8aad92784779ab8688a61aefff3e28e9ebdce43142808eaa3f0b0f402f68b7", size = 149107, upload-time = "2025-04-17T19:23:19.051Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/39/13402e323666d17850eca87e4cd6ecfcf9fd7809cac9efdcce10272fc29d/s3transfer-0.11.5-py3-none-any.whl", hash = "sha256:757af0f2ac150d3c75bc4177a32355c3862a98d20447b69a0161812992fe0bd4", size = 84782, upload-time = "2025-04-17T19:23:17.516Z" }, +] + +[[package]] +name = "scikit-learn" +version = "1.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "joblib" }, + { name = "numpy" }, + { name = "scipy" }, + { name = "threadpoolctl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ef/e5/c09d20723bfd91315f6f4ddc77912b0dcc09588b4ca7ad2ffa204607ad7f/scikit-learn-1.4.2.tar.gz", hash = "sha256:daa1c471d95bad080c6e44b4946c9390a4842adc3082572c20e4f8884e39e959", size = 7763055, upload-time = "2024-04-09T19:54:06.726Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/59/11/63de36e6933b03490fdfe5cbc9b5a68870a1281d8e705a23b33076dc82fb/scikit_learn-1.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:45dee87ac5309bb82e3ea633955030df9bbcb8d2cdb30383c6cd483691c546cc", size = 11558461, upload-time = "2024-04-09T19:53:22.402Z" }, + { url = "https://files.pythonhosted.org/packages/f2/30/1299e84d2ba3bc735baf17cebbf5b9d55144243c41b3ec6559ce3cf61e23/scikit_learn-1.4.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1d0b25d9c651fd050555aadd57431b53d4cf664e749069da77f3d52c5ad14b3b", size = 10451621, upload-time = "2024-04-09T19:53:25.577Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6d/2b03edb51e688db0dc2958ab18edf71c8cc313172636cbdc0b1fc7670777/scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0203c368058ab92efc6168a1507d388d41469c873e96ec220ca8e74079bf62e", size = 11523470, upload-time = "2024-04-09T19:53:29.433Z" }, + { url = "https://files.pythonhosted.org/packages/4e/53/14405a47292b59235d811a2af8634aba188ccfd1a38ef4b8042f3447d79a/scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44c62f2b124848a28fd695db5bc4da019287abf390bfce602ddc8aa1ec186aae", size = 12146964, upload-time = "2024-04-09T19:53:32.662Z" }, + { url = "https://files.pythonhosted.org/packages/79/3d/02d5d3ed359498fec3abdf65407d3c07e3b8765af17464969055aaec5171/scikit_learn-1.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cd7b524115499b18b63f0c96f4224eb885564937a0b3477531b2b63ce331904", size = 10602955, upload-time = "2024-04-09T19:53:35.147Z" }, +] + +[[package]] +name = "scipy" +version = "1.16.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0a/ca/d8ace4f98322d01abcd52d381134344bf7b431eba7ed8b42bdea5a3c2ac9/scipy-1.16.3.tar.gz", hash = "sha256:01e87659402762f43bd2fee13370553a17ada367d42e7487800bf2916535aecb", size = 30597883, upload-time = "2025-10-28T17:38:54.068Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/5f/6f37d7439de1455ce9c5a556b8d1db0979f03a796c030bafdf08d35b7bf9/scipy-1.16.3-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:40be6cf99e68b6c4321e9f8782e7d5ff8265af28ef2cd56e9c9b2638fa08ad97", size = 36630881, upload-time = "2025-10-28T17:31:47.104Z" }, + { url = "https://files.pythonhosted.org/packages/7c/89/d70e9f628749b7e4db2aa4cd89735502ff3f08f7b9b27d2e799485987cd9/scipy-1.16.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:8be1ca9170fcb6223cc7c27f4305d680ded114a1567c0bd2bfcbf947d1b17511", size = 28941012, upload-time = "2025-10-28T17:31:53.411Z" }, + { url = "https://files.pythonhosted.org/packages/a8/a8/0e7a9a6872a923505dbdf6bb93451edcac120363131c19013044a1e7cb0c/scipy-1.16.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bea0a62734d20d67608660f69dcda23e7f90fb4ca20974ab80b6ed40df87a005", size = 20931935, upload-time = "2025-10-28T17:31:57.361Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c7/020fb72bd79ad798e4dbe53938543ecb96b3a9ac3fe274b7189e23e27353/scipy-1.16.3-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:2a207a6ce9c24f1951241f4693ede2d393f59c07abc159b2cb2be980820e01fb", size = 23534466, upload-time = "2025-10-28T17:32:01.875Z" }, + { url = "https://files.pythonhosted.org/packages/be/a0/668c4609ce6dbf2f948e167836ccaf897f95fb63fa231c87da7558a374cd/scipy-1.16.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:532fb5ad6a87e9e9cd9c959b106b73145a03f04c7d57ea3e6f6bb60b86ab0876", size = 33593618, upload-time = "2025-10-28T17:32:06.902Z" }, + { url = "https://files.pythonhosted.org/packages/ca/6e/8942461cf2636cdae083e3eb72622a7fbbfa5cf559c7d13ab250a5dbdc01/scipy-1.16.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0151a0749efeaaab78711c78422d413c583b8cdd2011a3c1d6c794938ee9fdb2", size = 35899798, upload-time = "2025-10-28T17:32:12.665Z" }, + { url = "https://files.pythonhosted.org/packages/79/e8/d0f33590364cdbd67f28ce79368b373889faa4ee959588beddf6daef9abe/scipy-1.16.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b7180967113560cca57418a7bc719e30366b47959dd845a93206fbed693c867e", size = 36226154, upload-time = "2025-10-28T17:32:17.961Z" }, + { url = "https://files.pythonhosted.org/packages/39/c1/1903de608c0c924a1749c590064e65810f8046e437aba6be365abc4f7557/scipy-1.16.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:deb3841c925eeddb6afc1e4e4a45e418d19ec7b87c5df177695224078e8ec733", size = 38878540, upload-time = "2025-10-28T17:32:23.907Z" }, + { url = "https://files.pythonhosted.org/packages/f1/d0/22ec7036ba0b0a35bccb7f25ab407382ed34af0b111475eb301c16f8a2e5/scipy-1.16.3-cp311-cp311-win_amd64.whl", hash = "sha256:53c3844d527213631e886621df5695d35e4f6a75f620dca412bcd292f6b87d78", size = 38722107, upload-time = "2025-10-28T17:32:29.921Z" }, + { url = "https://files.pythonhosted.org/packages/7b/60/8a00e5a524bb3bf8898db1650d350f50e6cffb9d7a491c561dc9826c7515/scipy-1.16.3-cp311-cp311-win_arm64.whl", hash = "sha256:9452781bd879b14b6f055b26643703551320aa8d79ae064a71df55c00286a184", size = 25506272, upload-time = "2025-10-28T17:32:34.577Z" }, +] + +[[package]] +name = "sentry-dramatiq" +version = "0.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dramatiq" }, + { name = "sentry-sdk" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e3/a2/0a604154bac61c38003b21f714eaed5fc866855e98d918ba0717a756f00d/sentry_dramatiq-0.3.3.tar.gz", hash = "sha256:569d9a7ce933c37f3d7fe6087187fdd938adf756f8930d37e8e0af824da58636", size = 5263, upload-time = "2023-05-02T12:51:26.596Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/ca/0eafb6c2c7ea94e3192b2fdb58b40e72e430b240b358af7f62bf4afa0037/sentry_dramatiq-0.3.3-py3-none-any.whl", hash = "sha256:bc461c754537fa27cddb061c01ef9b6fea8412e5118c85206cc6938dfb4910ba", size = 4592, upload-time = "2023-05-02T12:51:24.28Z" }, +] + +[[package]] +name = "sentry-sdk" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/ec/47b10a8a18884b2838e55b58d4e3f3f6e106c74fcc979732c22eefe42f2f/sentry_sdk-2.2.1.tar.gz", hash = "sha256:8aa2ec825724d8d9d645cab68e6034928b1a6a148503af3e361db3fa6401183f", size = 266202, upload-time = "2024-05-21T11:15:31.475Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/77/618705a558e61b51c883222ce365d562d4f528c8b2a8eaa43d4c3ae1a382/sentry_sdk-2.2.1-py2.py3-none-any.whl", hash = "sha256:7d617a1b30e80c41f3b542347651fcf90bb0a36f3a398be58b4f06b79c8d85bc", size = 281558, upload-time = "2024-05-21T11:15:28.449Z" }, +] + +[[package]] +name = "setuptools" +version = "75.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/92/ec/089608b791d210aec4e7f97488e67ab0d33add3efccb83a056cbafe3a2a6/setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6", size = 1343222, upload-time = "2025-01-08T18:28:23.98Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/8a/b9dc7678803429e4a3bc9ba462fa3dd9066824d3c607490235c6a796be5a/setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3", size = 1228782, upload-time = "2025-01-08T18:28:20.912Z" }, +] + +[[package]] +name = "shapely" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/bc/0989043118a27cccb4e906a46b7565ce36ca7b57f5a18b78f4f1b0f72d9d/shapely-2.1.2.tar.gz", hash = "sha256:2ed4ecb28320a433db18a5bf029986aa8afcfd740745e78847e330d5d94922a9", size = 315489, upload-time = "2025-09-24T13:51:41.432Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/8d/1ff672dea9ec6a7b5d422eb6d095ed886e2e523733329f75fdcb14ee1149/shapely-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:91121757b0a36c9aac3427a651a7e6567110a4a67c97edf04f8d55d4765f6618", size = 1820038, upload-time = "2025-09-24T13:50:15.628Z" }, + { url = "https://files.pythonhosted.org/packages/4f/ce/28fab8c772ce5db23a0d86bf0adaee0c4c79d5ad1db766055fa3dab442e2/shapely-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a9c722ba774cf50b5d4541242b4cce05aafd44a015290c82ba8a16931ff63d", size = 1626039, upload-time = "2025-09-24T13:50:16.881Z" }, + { url = "https://files.pythonhosted.org/packages/70/8b/868b7e3f4982f5006e9395c1e12343c66a8155c0374fdc07c0e6a1ab547d/shapely-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cc4f7397459b12c0b196c9efe1f9d7e92463cbba142632b4cc6d8bbbbd3e2b09", size = 3001519, upload-time = "2025-09-24T13:50:18.606Z" }, + { url = "https://files.pythonhosted.org/packages/13/02/58b0b8d9c17c93ab6340edd8b7308c0c5a5b81f94ce65705819b7416dba5/shapely-2.1.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:136ab87b17e733e22f0961504d05e77e7be8c9b5a8184f685b4a91a84efe3c26", size = 3110842, upload-time = "2025-09-24T13:50:21.77Z" }, + { url = "https://files.pythonhosted.org/packages/af/61/8e389c97994d5f331dcffb25e2fa761aeedfb52b3ad9bcdd7b8671f4810a/shapely-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:16c5d0fc45d3aa0a69074979f4f1928ca2734fb2e0dde8af9611e134e46774e7", size = 4021316, upload-time = "2025-09-24T13:50:23.626Z" }, + { url = "https://files.pythonhosted.org/packages/d3/d4/9b2a9fe6039f9e42ccf2cb3e84f219fd8364b0c3b8e7bbc857b5fbe9c14c/shapely-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ddc759f72b5b2b0f54a7e7cde44acef680a55019eb52ac63a7af2cf17cb9cd2", size = 4178586, upload-time = "2025-09-24T13:50:25.443Z" }, + { url = "https://files.pythonhosted.org/packages/16/f6/9840f6963ed4decf76b08fd6d7fed14f8779fb7a62cb45c5617fa8ac6eab/shapely-2.1.2-cp311-cp311-win32.whl", hash = "sha256:2fa78b49485391224755a856ed3b3bd91c8455f6121fee0db0e71cefb07d0ef6", size = 1543961, upload-time = "2025-09-24T13:50:26.968Z" }, + { url = "https://files.pythonhosted.org/packages/38/1e/3f8ea46353c2a33c1669eb7327f9665103aa3a8dfe7f2e4ef714c210b2c2/shapely-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:c64d5c97b2f47e3cd9b712eaced3b061f2b71234b3fc263e0fcf7d889c6559dc", size = 1722856, upload-time = "2025-09-24T13:50:28.497Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.44" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f0/f2/840d7b9496825333f532d2e3976b8eadbf52034178aac53630d09fe6e1ef/sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22", size = 9819830, upload-time = "2025-10-10T14:39:12.935Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/81/15d7c161c9ddf0900b076b55345872ed04ff1ed6a0666e5e94ab44b0163c/sqlalchemy-2.0.44-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe3917059c7ab2ee3f35e77757062b1bea10a0b6ca633c58391e3f3c6c488dd", size = 2140517, upload-time = "2025-10-10T15:36:15.64Z" }, + { url = "https://files.pythonhosted.org/packages/d4/d5/4abd13b245c7d91bdf131d4916fd9e96a584dac74215f8b5bc945206a974/sqlalchemy-2.0.44-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de4387a354ff230bc979b46b2207af841dc8bf29847b6c7dbe60af186d97aefa", size = 2130738, upload-time = "2025-10-10T15:36:16.91Z" }, + { url = "https://files.pythonhosted.org/packages/cb/3c/8418969879c26522019c1025171cefbb2a8586b6789ea13254ac602986c0/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3678a0fb72c8a6a29422b2732fe423db3ce119c34421b5f9955873eb9b62c1e", size = 3304145, upload-time = "2025-10-10T15:34:19.569Z" }, + { url = "https://files.pythonhosted.org/packages/94/2d/fdb9246d9d32518bda5d90f4b65030b9bf403a935cfe4c36a474846517cb/sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf6872a23601672d61a68f390e44703442639a12ee9dd5a88bbce52a695e46e", size = 3304511, upload-time = "2025-10-10T15:47:05.088Z" }, + { url = "https://files.pythonhosted.org/packages/7d/fb/40f2ad1da97d5c83f6c1269664678293d3fe28e90ad17a1093b735420549/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:329aa42d1be9929603f406186630135be1e7a42569540577ba2c69952b7cf399", size = 3235161, upload-time = "2025-10-10T15:34:21.193Z" }, + { url = "https://files.pythonhosted.org/packages/95/cb/7cf4078b46752dca917d18cf31910d4eff6076e5b513c2d66100c4293d83/sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:70e03833faca7166e6a9927fbee7c27e6ecde436774cd0b24bbcc96353bce06b", size = 3261426, upload-time = "2025-10-10T15:47:07.196Z" }, + { url = "https://files.pythonhosted.org/packages/f8/3b/55c09b285cb2d55bdfa711e778bdffdd0dc3ffa052b0af41f1c5d6e582fa/sqlalchemy-2.0.44-cp311-cp311-win32.whl", hash = "sha256:253e2f29843fb303eca6b2fc645aca91fa7aa0aa70b38b6950da92d44ff267f3", size = 2105392, upload-time = "2025-10-10T15:38:20.051Z" }, + { url = "https://files.pythonhosted.org/packages/c7/23/907193c2f4d680aedbfbdf7bf24c13925e3c7c292e813326c1b84a0b878e/sqlalchemy-2.0.44-cp311-cp311-win_amd64.whl", hash = "sha256:7a8694107eb4308a13b425ca8c0e67112f8134c846b6e1f722698708741215d5", size = 2130293, upload-time = "2025-10-10T15:38:21.601Z" }, + { url = "https://files.pythonhosted.org/packages/9c/5e/6a29fa884d9fb7ddadf6b69490a9d45fded3b38541713010dad16b77d015/sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05", size = 1928718, upload-time = "2025-10-10T15:29:45.32Z" }, +] + +[[package]] +name = "sqlparse" +version = "0.5.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/40/edede8dd6977b0d3da179a342c198ed100dd2aba4be081861ee5911e4da4/sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", size = 84999, upload-time = "2024-12-10T12:05:30.728Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415, upload-time = "2024-12-10T12:05:27.824Z" }, +] + +[[package]] +name = "starlette" +version = "0.36.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/be/47/1bba49d42d63f4453f0a64a20acbf2d0bd2f5a8cde6a166ee66c074a08f8/starlette-0.36.3.tar.gz", hash = "sha256:90a671733cfb35771d8cc605e0b679d23b992f8dcfad48cc60b38cb29aeb7080", size = 2842113, upload-time = "2024-02-04T18:16:24.95Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/f7/372e3953b6e6fbfe0b70a1bb52612eae16e943f4288516480860fcd4ac41/starlette-0.36.3-py3-none-any.whl", hash = "sha256:13d429aa93a61dc40bf503e8c801db1f1bca3dc706b10ef2434a36123568f044", size = 71481, upload-time = "2024-02-04T18:16:21.392Z" }, +] + +[[package]] +name = "tenacity" +version = "8.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/32/6c/57df6196ce52c464cf8556e8f697fec5d3469bb8cd319c1685c0a090e0b4/tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2", size = 43608, upload-time = "2024-05-07T08:48:17.099Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/61/a1/6bb0cbebefb23641f068bb58a2bc56da9beb2b1c550242e3c540b37698f3/tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185", size = 25934, upload-time = "2024-05-07T08:48:14.696Z" }, +] + +[[package]] +name = "threadpoolctl" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/4d/08c89e34946fce2aec4fbb45c9016efd5f4d7f24af8e5d93296e935631d8/threadpoolctl-3.6.0.tar.gz", hash = "sha256:8ab8b4aa3491d812b623328249fab5302a68d2d71745c8a4c719a2fcaba9f44e", size = 21274, upload-time = "2025-03-13T13:49:23.031Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/d5/f9a850d79b0851d1d4ef6456097579a9005b31fea68726a4ae5f2d82ddd9/threadpoolctl-3.6.0-py3-none-any.whl", hash = "sha256:43a0b8fd5a2928500110039e43a5eed8480b918967083ea48dc3ab9f13c4a7fb", size = 18638, upload-time = "2025-03-13T13:49:21.846Z" }, +] + +[[package]] +name = "tiktoken" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ea/cf/756fedf6981e82897f2d570dd25fa597eb3f4459068ae0572d7e888cfd6f/tiktoken-0.9.0.tar.gz", hash = "sha256:d02a5ca6a938e0490e1ff957bc48c8b078c88cb83977be1625b1fd8aac792c5d", size = 35991, upload-time = "2025-02-14T06:03:01.003Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/ae/4613a59a2a48e761c5161237fc850eb470b4bb93696db89da51b79a871f1/tiktoken-0.9.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f32cc56168eac4851109e9b5d327637f15fd662aa30dd79f964b7c39fbadd26e", size = 1065987, upload-time = "2025-02-14T06:02:14.174Z" }, + { url = "https://files.pythonhosted.org/packages/3f/86/55d9d1f5b5a7e1164d0f1538a85529b5fcba2b105f92db3622e5d7de6522/tiktoken-0.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:45556bc41241e5294063508caf901bf92ba52d8ef9222023f83d2483a3055348", size = 1009155, upload-time = "2025-02-14T06:02:15.384Z" }, + { url = "https://files.pythonhosted.org/packages/03/58/01fb6240df083b7c1916d1dcb024e2b761213c95d576e9f780dfb5625a76/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03935988a91d6d3216e2ec7c645afbb3d870b37bcb67ada1943ec48678e7ee33", size = 1142898, upload-time = "2025-02-14T06:02:16.666Z" }, + { url = "https://files.pythonhosted.org/packages/b1/73/41591c525680cd460a6becf56c9b17468d3711b1df242c53d2c7b2183d16/tiktoken-0.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3d80aad8d2c6b9238fc1a5524542087c52b860b10cbf952429ffb714bc1136", size = 1197535, upload-time = "2025-02-14T06:02:18.595Z" }, + { url = "https://files.pythonhosted.org/packages/7d/7c/1069f25521c8f01a1a182f362e5c8e0337907fae91b368b7da9c3e39b810/tiktoken-0.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b2a21133be05dc116b1d0372af051cd2c6aa1d2188250c9b553f9fa49301b336", size = 1259548, upload-time = "2025-02-14T06:02:20.729Z" }, + { url = "https://files.pythonhosted.org/packages/6f/07/c67ad1724b8e14e2b4c8cca04b15da158733ac60136879131db05dda7c30/tiktoken-0.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:11a20e67fdf58b0e2dea7b8654a288e481bb4fc0289d3ad21291f8d0849915fb", size = 893895, upload-time = "2025-02-14T06:02:22.67Z" }, +] + +[[package]] +name = "tokenizers" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "huggingface-hub" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1c/46/fb6854cec3278fbfa4a75b50232c77622bc517ac886156e6afbfa4d8fc6e/tokenizers-0.22.1.tar.gz", hash = "sha256:61de6522785310a309b3407bac22d99c4db5dba349935e99e4d15ea2226af2d9", size = 363123, upload-time = "2025-09-19T09:49:23.424Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/33/f4b2d94ada7ab297328fc671fed209368ddb82f965ec2224eb1892674c3a/tokenizers-0.22.1-cp39-abi3-macosx_10_12_x86_64.whl", hash = "sha256:59fdb013df17455e5f950b4b834a7b3ee2e0271e6378ccb33aa74d178b513c73", size = 3069318, upload-time = "2025-09-19T09:49:11.848Z" }, + { url = "https://files.pythonhosted.org/packages/1c/58/2aa8c874d02b974990e89ff95826a4852a8b2a273c7d1b4411cdd45a4565/tokenizers-0.22.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:8d4e484f7b0827021ac5f9f71d4794aaef62b979ab7608593da22b1d2e3c4edc", size = 2926478, upload-time = "2025-09-19T09:49:09.759Z" }, + { url = "https://files.pythonhosted.org/packages/1e/3b/55e64befa1e7bfea963cf4b787b2cea1011362c4193f5477047532ce127e/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d2962dd28bc67c1f205ab180578a78eef89ac60ca7ef7cbe9635a46a56422a", size = 3256994, upload-time = "2025-09-19T09:48:56.701Z" }, + { url = "https://files.pythonhosted.org/packages/71/0b/fbfecf42f67d9b7b80fde4aabb2b3110a97fac6585c9470b5bff103a80cb/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38201f15cdb1f8a6843e6563e6e79f4abd053394992b9bbdf5213ea3469b4ae7", size = 3153141, upload-time = "2025-09-19T09:48:59.749Z" }, + { url = "https://files.pythonhosted.org/packages/17/a9/b38f4e74e0817af8f8ef925507c63c6ae8171e3c4cb2d5d4624bf58fca69/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1cbe5454c9a15df1b3443c726063d930c16f047a3cc724b9e6e1a91140e5a21", size = 3508049, upload-time = "2025-09-19T09:49:05.868Z" }, + { url = "https://files.pythonhosted.org/packages/d2/48/dd2b3dac46bb9134a88e35d72e1aa4869579eacc1a27238f1577270773ff/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7d094ae6312d69cc2a872b54b91b309f4f6fbce871ef28eb27b52a98e4d0214", size = 3710730, upload-time = "2025-09-19T09:49:01.832Z" }, + { url = "https://files.pythonhosted.org/packages/93/0e/ccabc8d16ae4ba84a55d41345207c1e2ea88784651a5a487547d80851398/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afd7594a56656ace95cdd6df4cca2e4059d294c5cfb1679c57824b605556cb2f", size = 3412560, upload-time = "2025-09-19T09:49:03.867Z" }, + { url = "https://files.pythonhosted.org/packages/d0/c6/dc3a0db5a6766416c32c034286d7c2d406da1f498e4de04ab1b8959edd00/tokenizers-0.22.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ef6063d7a84994129732b47e7915e8710f27f99f3a3260b8a38fc7ccd083f4", size = 3250221, upload-time = "2025-09-19T09:49:07.664Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a6/2c8486eef79671601ff57b093889a345dd3d576713ef047776015dc66de7/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ba0a64f450b9ef412c98f6bcd2a50c6df6e2443b560024a09fa6a03189726879", size = 9345569, upload-time = "2025-09-19T09:49:14.214Z" }, + { url = "https://files.pythonhosted.org/packages/6b/16/32ce667f14c35537f5f605fe9bea3e415ea1b0a646389d2295ec348d5657/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:331d6d149fa9c7d632cde4490fb8bbb12337fa3a0232e77892be656464f4b446", size = 9271599, upload-time = "2025-09-19T09:49:16.639Z" }, + { url = "https://files.pythonhosted.org/packages/51/7c/a5f7898a3f6baa3fc2685c705e04c98c1094c523051c805cdd9306b8f87e/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:607989f2ea68a46cb1dfbaf3e3aabdf3f21d8748312dbeb6263d1b3b66c5010a", size = 9533862, upload-time = "2025-09-19T09:49:19.146Z" }, + { url = "https://files.pythonhosted.org/packages/36/65/7e75caea90bc73c1dd8d40438adf1a7bc26af3b8d0a6705ea190462506e1/tokenizers-0.22.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a0f307d490295717726598ef6fa4f24af9d484809223bbc253b201c740a06390", size = 9681250, upload-time = "2025-09-19T09:49:21.501Z" }, + { url = "https://files.pythonhosted.org/packages/30/2c/959dddef581b46e6209da82df3b78471e96260e2bc463f89d23b1bf0e52a/tokenizers-0.22.1-cp39-abi3-win32.whl", hash = "sha256:b5120eed1442765cd90b903bb6cfef781fd8fe64e34ccaecbae4c619b7b12a82", size = 2472003, upload-time = "2025-09-19T09:49:27.089Z" }, + { url = "https://files.pythonhosted.org/packages/b3/46/e33a8c93907b631a99377ef4c5f817ab453d0b34f93529421f42ff559671/tokenizers-0.22.1-cp39-abi3-win_amd64.whl", hash = "sha256:65fd6e3fb11ca1e78a6a93602490f134d1fdeb13bcef99389d5102ea318ed138", size = 2674684, upload-time = "2025-09-19T09:49:24.953Z" }, +] + +[[package]] +name = "tomli" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/2e/299f62b401438d5fe1624119c723f5d877acc86a4c2492da405626665f12/tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45", size = 153236, upload-time = "2025-10-08T22:01:00.137Z" }, + { url = "https://files.pythonhosted.org/packages/86/7f/d8fffe6a7aefdb61bced88fcb5e280cfd71e08939da5894161bd71bea022/tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba", size = 148084, upload-time = "2025-10-08T22:01:01.63Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/24935fb6a2ee63e86d80e4d3b58b222dafaf438c416752c8b58537c8b89a/tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf", size = 234832, upload-time = "2025-10-08T22:01:02.543Z" }, + { url = "https://files.pythonhosted.org/packages/89/da/75dfd804fc11e6612846758a23f13271b76d577e299592b4371a4ca4cd09/tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441", size = 242052, upload-time = "2025-10-08T22:01:03.836Z" }, + { url = "https://files.pythonhosted.org/packages/70/8c/f48ac899f7b3ca7eb13af73bacbc93aec37f9c954df3c08ad96991c8c373/tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845", size = 239555, upload-time = "2025-10-08T22:01:04.834Z" }, + { url = "https://files.pythonhosted.org/packages/ba/28/72f8afd73f1d0e7829bfc093f4cb98ce0a40ffc0cc997009ee1ed94ba705/tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c", size = 245128, upload-time = "2025-10-08T22:01:05.84Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/a7679c8ac85208706d27436e8d421dfa39d4c914dcf5fa8083a9305f58d9/tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456", size = 96445, upload-time = "2025-10-08T22:01:06.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/fe/3d3420c4cb1ad9cb462fb52967080575f15898da97e21cb6f1361d505383/tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be", size = 107165, upload-time = "2025-10-08T22:01:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" }, +] + +[[package]] +name = "tornado" +version = "6.5.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/09/ce/1eb500eae19f4648281bb2186927bb062d2438c2e5093d1360391afd2f90/tornado-6.5.2.tar.gz", hash = "sha256:ab53c8f9a0fa351e2c0741284e06c7a45da86afb544133201c5cc8578eb076a0", size = 510821, upload-time = "2025-08-08T18:27:00.78Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/48/6a7529df2c9cc12efd2e8f5dd219516184d703b34c06786809670df5b3bd/tornado-6.5.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2436822940d37cde62771cff8774f4f00b3c8024fe482e16ca8387b8a2724db6", size = 442563, upload-time = "2025-08-08T18:26:42.945Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b5/9b575a0ed3e50b00c40b08cbce82eb618229091d09f6d14bce80fc01cb0b/tornado-6.5.2-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:583a52c7aa94ee046854ba81d9ebb6c81ec0fd30386d96f7640c96dad45a03ef", size = 440729, upload-time = "2025-08-08T18:26:44.473Z" }, + { url = "https://files.pythonhosted.org/packages/1b/4e/619174f52b120efcf23633c817fd3fed867c30bff785e2cd5a53a70e483c/tornado-6.5.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0fe179f28d597deab2842b86ed4060deec7388f1fd9c1b4a41adf8af058907e", size = 444295, upload-time = "2025-08-08T18:26:46.021Z" }, + { url = "https://files.pythonhosted.org/packages/95/fa/87b41709552bbd393c85dd18e4e3499dcd8983f66e7972926db8d96aa065/tornado-6.5.2-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b186e85d1e3536d69583d2298423744740986018e393d0321df7340e71898882", size = 443644, upload-time = "2025-08-08T18:26:47.625Z" }, + { url = "https://files.pythonhosted.org/packages/f9/41/fb15f06e33d7430ca89420283a8762a4e6b8025b800ea51796ab5e6d9559/tornado-6.5.2-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e792706668c87709709c18b353da1f7662317b563ff69f00bab83595940c7108", size = 443878, upload-time = "2025-08-08T18:26:50.599Z" }, + { url = "https://files.pythonhosted.org/packages/11/92/fe6d57da897776ad2e01e279170ea8ae726755b045fe5ac73b75357a5a3f/tornado-6.5.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:06ceb1300fd70cb20e43b1ad8aaee0266e69e7ced38fa910ad2e03285009ce7c", size = 444549, upload-time = "2025-08-08T18:26:51.864Z" }, + { url = "https://files.pythonhosted.org/packages/9b/02/c8f4f6c9204526daf3d760f4aa555a7a33ad0e60843eac025ccfd6ff4a93/tornado-6.5.2-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:74db443e0f5251be86cbf37929f84d8c20c27a355dd452a5cfa2aada0d001ec4", size = 443973, upload-time = "2025-08-08T18:26:53.625Z" }, + { url = "https://files.pythonhosted.org/packages/ae/2d/f5f5707b655ce2317190183868cd0f6822a1121b4baeae509ceb9590d0bd/tornado-6.5.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b5e735ab2889d7ed33b32a459cac490eda71a1ba6857b0118de476ab6c366c04", size = 443954, upload-time = "2025-08-08T18:26:55.072Z" }, + { url = "https://files.pythonhosted.org/packages/e8/59/593bd0f40f7355806bf6573b47b8c22f8e1374c9b6fd03114bd6b7a3dcfd/tornado-6.5.2-cp39-abi3-win32.whl", hash = "sha256:c6f29e94d9b37a95013bb669616352ddb82e3bfe8326fccee50583caebc8a5f0", size = 445023, upload-time = "2025-08-08T18:26:56.677Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/f609b420c2f564a748a2d80ebfb2ee02a73ca80223af712fca591386cafb/tornado-6.5.2-cp39-abi3-win_amd64.whl", hash = "sha256:e56a5af51cc30dd2cae649429af65ca2f6571da29504a07995175df14c18f35f", size = 445427, upload-time = "2025-08-08T18:26:57.91Z" }, + { url = "https://files.pythonhosted.org/packages/5e/4f/e1f65e8f8c76d73658b33d33b81eed4322fb5085350e4328d5c956f0c8f9/tornado-6.5.2-cp39-abi3-win_arm64.whl", hash = "sha256:d6c33dc3672e3a1f3618eb63b7ef4683a7688e7b9e6e8f0d9aa5726360a004af", size = 444456, upload-time = "2025-08-08T18:26:59.207Z" }, +] + +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, +] + +[[package]] +name = "typer-slim" +version = "0.20.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8e/45/81b94a52caed434b94da65729c03ad0fb7665fab0f7db9ee54c94e541403/typer_slim-0.20.0.tar.gz", hash = "sha256:9fc6607b3c6c20f5c33ea9590cbeb17848667c51feee27d9e314a579ab07d1a3", size = 106561, upload-time = "2025-10-20T17:03:46.642Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/dd/5cbf31f402f1cc0ab087c94d4669cfa55bd1e818688b910631e131d74e75/typer_slim-0.20.0-py3-none-any.whl", hash = "sha256:f42a9b7571a12b97dddf364745d29f12221865acef7a2680065f9bb29c7dc89d", size = 47087, upload-time = "2025-10-20T17:03:44.546Z" }, +] + +[[package]] +name = "types-aiofiles" +version = "23.2.0.20240623" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/01/69018f975c874a950f7a62b322e0c7469ce522b4610a645218f0e11c8ab1/types-aiofiles-23.2.0.20240623.tar.gz", hash = "sha256:d515b2fa46bf894aff45a364a704f050de3898344fd6c5994d58dc8b59ab71e6", size = 9424, upload-time = "2024-06-23T02:27:28.337Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/03/b981494f5ca1a12d3589f6073c61784f7c76c88548c6ba63c762475c943d/types_aiofiles-23.2.0.20240623-py3-none-any.whl", hash = "sha256:70597b29fc40c8583b6d755814b2cd5fcdb6785622e82d74ef499f9066316e08", size = 9554, upload-time = "2024-06-23T02:27:26.746Z" }, +] + +[[package]] +name = "types-pyasn1" +version = "0.6.0.20250914" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/92/bfe2385ee347c9d528adbd0fd8e5d7da6bcd18572cc42fd94e44c182dd69/types_pyasn1-0.6.0.20250914.tar.gz", hash = "sha256:236102553b76c938953037b7ae93d11d395d9413b7f2f8083d3b19d740f7eda6", size = 17109, upload-time = "2025-09-14T02:56:08.041Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/9d/5eb611d0db5b980cbb7d3eaca5baf187d5346f6371fdb6c708847539cea6/types_pyasn1-0.6.0.20250914-py3-none-any.whl", hash = "sha256:68ffeef3c28e1ed120b8b81a242f238f137543e68d466d84a97edcf3e4203b5b", size = 24052, upload-time = "2025-09-14T02:56:07.247Z" }, +] + +[[package]] +name = "types-python-jose" +version = "3.5.0.20250531" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-pyasn1" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/89/c8/09095e22b8e5eb3992f47722a3e1b31098b55c5e8325f4b21c5f1bdcb06b/types_python_jose-3.5.0.20250531.tar.gz", hash = "sha256:dbac2bc99fbb8124068696617f8709acfe4a43d79c6df3e59800006d46d621fe", size = 11891, upload-time = "2025-05-31T03:04:29.017Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/67/33/9d8c351a44e68896a53003e00fb01e1158b9e5b68cf3b75c1e4b51eb5263/types_python_jose-3.5.0.20250531-py3-none-any.whl", hash = "sha256:1609ee4d40a8a2ef5f62fcda99ec977b2ae773dfee9355cfb7e5002afa063c55", size = 14725, upload-time = "2025-05-31T03:04:27.802Z" }, +] + +[[package]] +name = "types-pytz" +version = "2025.2.0.20250809" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/07/e2/c774f754de26848f53f05defff5bb21dd9375a059d1ba5b5ea943cf8206e/types_pytz-2025.2.0.20250809.tar.gz", hash = "sha256:222e32e6a29bb28871f8834e8785e3801f2dc4441c715cd2082b271eecbe21e5", size = 10876, upload-time = "2025-08-09T03:14:17.453Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/d0/91c24fe54e565f2344d7a6821e6c6bb099841ef09007ea6321a0bac0f808/types_pytz-2025.2.0.20250809-py3-none-any.whl", hash = "sha256:4f55ed1b43e925cf851a756fe1707e0f5deeb1976e15bf844bcaa025e8fbd0db", size = 10095, upload-time = "2025-08-09T03:14:16.674Z" }, +] + +[[package]] +name = "types-requests" +version = "2.32.4.20250913" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/36/27/489922f4505975b11de2b5ad07b4fe1dca0bca9be81a703f26c5f3acfce5/types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d", size = 23113, upload-time = "2025-09-13T02:40:02.309Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/20/9a227ea57c1285986c4cf78400d0a91615d25b24e257fd9e2969606bdfae/types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1", size = 20658, upload-time = "2025-09-13T02:40:01.115Z" }, +] + +[[package]] +name = "types-tqdm" +version = "4.67.0.20250809" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/d0/cf498fc630d9fdaf2428b93e60b0e67b08008fec22b78716b8323cf644dc/types_tqdm-4.67.0.20250809.tar.gz", hash = "sha256:02bf7ab91256080b9c4c63f9f11b519c27baaf52718e5fdab9e9606da168d500", size = 17200, upload-time = "2025-08-09T03:17:43.489Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/13/3ff0781445d7c12730befce0fddbbc7a76e56eb0e7029446f2853238360a/types_tqdm-4.67.0.20250809-py3-none-any.whl", hash = "sha256:1a73053b31fcabf3c1f3e2a9d5ecdba0f301bde47a418cd0e0bdf774827c5c57", size = 24020, upload-time = "2025-08-09T03:17:42.453Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspect" +version = "0.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825, upload-time = "2023-05-24T20:25:47.612Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827, upload-time = "2023-05-24T20:25:45.287Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, +] + +[[package]] +name = "tzlocal" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761, upload-time = "2025-03-05T21:17:41.549Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026, upload-time = "2025-03-05T21:17:39.857Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.27.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/d8/8aa69c76585035ca81851d99c3b00fd6be050aefd478a5376ff9fc5feb69/uvicorn-0.27.1.tar.gz", hash = "sha256:3d9a267296243532db80c83a959a3400502165ade2c1338dea4e67915fd4745a", size = 41151, upload-time = "2024-02-10T12:09:11.325Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d9/fd/bac111726b6c651f1fa5563145ecba5ff70d36fb140a55e0d79b60b9d65e/uvicorn-0.27.1-py3-none-any.whl", hash = "sha256:5c89da2f3895767472a35556e539fd59f7edbe9b1e9c0e1c99eebeadc61838e4", size = 60809, upload-time = "2024-02-10T12:09:08.934Z" }, +] + +[package.optional-dependencies] +standard = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "httptools" }, + { name = "python-dotenv" }, + { name = "pyyaml" }, + { name = "uvloop", marker = "platform_python_implementation != 'PyPy' and sys_platform != 'cygwin' and sys_platform != 'win32'" }, + { name = "watchfiles" }, + { name = "websockets" }, +] + +[[package]] +name = "uvloop" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/06/f0/18d39dbd1971d6d62c4629cc7fa67f74821b0dc1f5a77af43719de7936a7/uvloop-0.22.1.tar.gz", hash = "sha256:6c84bae345b9147082b17371e3dd5d42775bddce91f885499017f4607fdaf39f", size = 2443250, upload-time = "2025-10-16T22:17:19.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/d5/69900f7883235562f1f50d8184bb7dd84a2fb61e9ec63f3782546fdbd057/uvloop-0.22.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c60ebcd36f7b240b30788554b6f0782454826a0ed765d8430652621b5de674b9", size = 1352420, upload-time = "2025-10-16T22:16:21.187Z" }, + { url = "https://files.pythonhosted.org/packages/a8/73/c4e271b3bce59724e291465cc936c37758886a4868787da0278b3b56b905/uvloop-0.22.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b7f102bf3cb1995cfeaee9321105e8f5da76fdb104cdad8986f85461a1b7b77", size = 748677, upload-time = "2025-10-16T22:16:22.558Z" }, + { url = "https://files.pythonhosted.org/packages/86/94/9fb7fad2f824d25f8ecac0d70b94d0d48107ad5ece03769a9c543444f78a/uvloop-0.22.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53c85520781d84a4b8b230e24a5af5b0778efdb39142b424990ff1ef7c48ba21", size = 3753819, upload-time = "2025-10-16T22:16:23.903Z" }, + { url = "https://files.pythonhosted.org/packages/74/4f/256aca690709e9b008b7108bc85fba619a2bc37c6d80743d18abad16ee09/uvloop-0.22.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:56a2d1fae65fd82197cb8c53c367310b3eabe1bbb9fb5a04d28e3e3520e4f702", size = 3804529, upload-time = "2025-10-16T22:16:25.246Z" }, + { url = "https://files.pythonhosted.org/packages/7f/74/03c05ae4737e871923d21a76fe28b6aad57f5c03b6e6bfcfa5ad616013e4/uvloop-0.22.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40631b049d5972c6755b06d0bfe8233b1bd9a8a6392d9d1c45c10b6f9e9b2733", size = 3621267, upload-time = "2025-10-16T22:16:26.819Z" }, + { url = "https://files.pythonhosted.org/packages/75/be/f8e590fe61d18b4a92070905497aec4c0e64ae1761498cad09023f3f4b3e/uvloop-0.22.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:535cc37b3a04f6cd2c1ef65fa1d370c9a35b6695df735fcff5427323f2cd5473", size = 3723105, upload-time = "2025-10-16T22:16:28.252Z" }, +] + +[[package]] +name = "vine" +version = "5.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/e4/d07b5f29d283596b9727dd5275ccbceb63c44a1a82aa9e4bfd20426762ac/vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0", size = 48980, upload-time = "2023-11-05T08:46:53.857Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/ff/7c0c86c43b3cbb927e0ccc0255cb4057ceba4799cd44ae95174ce8e8b5b2/vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc", size = 9636, upload-time = "2023-11-05T08:46:51.205Z" }, +] + +[[package]] +name = "watchdog" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/24/d9be5cd6642a6aa68352ded4b4b10fb0d7889cb7f45814fb92cecd35f101/watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c", size = 96393, upload-time = "2024-11-01T14:06:31.756Z" }, + { url = "https://files.pythonhosted.org/packages/63/7a/6013b0d8dbc56adca7fdd4f0beed381c59f6752341b12fa0886fa7afc78b/watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2", size = 88392, upload-time = "2024-11-01T14:06:32.99Z" }, + { url = "https://files.pythonhosted.org/packages/d1/40/b75381494851556de56281e053700e46bff5b37bf4c7267e858640af5a7f/watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c", size = 89019, upload-time = "2024-11-01T14:06:34.963Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" }, + { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" }, + { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" }, + { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" }, + { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" }, + { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, +] + +[[package]] +name = "watchdog-gevent" +version = "0.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "gevent" }, + { name = "watchdog" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/97/69/91cfca7c21c382e3a8aca4251dcd7d4315228d9346381feb2dde36d14061/watchdog_gevent-0.2.1.tar.gz", hash = "sha256:ae6b94d0f8c8ce1c5956cd865f612b61f456cf19801744bba25a349fe8e8c337", size = 4296, upload-time = "2024-10-19T05:29:12.987Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/a9/54b88e150b77791958957e2188312477d09fc84820fc03f8b3a7569d10b0/watchdog_gevent-0.2.1-py3-none-any.whl", hash = "sha256:e8114658104a018f626ee54052335407c1438369febc776c4b4c4308ed002350", size = 3462, upload-time = "2024-10-19T05:29:11.421Z" }, +] + +[[package]] +name = "watchfiles" +version = "1.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" }, + { url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" }, + { url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" }, + { url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" }, + { url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" }, + { url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" }, + { url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" }, + { url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" }, + { url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473, upload-time = "2025-10-14T15:04:43.624Z" }, + { url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598, upload-time = "2025-10-14T15:04:44.516Z" }, + { url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210, upload-time = "2025-10-14T15:04:45.883Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" }, + { url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" }, + { url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" }, +] + +[[package]] +name = "wcwidth" +version = "0.2.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" }, +] + +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" }, + { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" }, + { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" }, + { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878, upload-time = "2025-03-05T20:02:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883, upload-time = "2025-03-05T20:02:03.148Z" }, + { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252, upload-time = "2025-03-05T20:02:05.29Z" }, + { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521, upload-time = "2025-03-05T20:02:07.458Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958, upload-time = "2025-03-05T20:02:09.842Z" }, + { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918, upload-time = "2025-03-05T20:02:11.968Z" }, + { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388, upload-time = "2025-03-05T20:02:13.32Z" }, + { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828, upload-time = "2025-03-05T20:02:14.585Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +] + +[[package]] +name = "yarl" +version = "1.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511", size = 141607, upload-time = "2025-10-06T14:09:16.298Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6", size = 94027, upload-time = "2025-10-06T14:09:17.786Z" }, + { url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028", size = 94963, upload-time = "2025-10-06T14:09:19.662Z" }, + { url = "https://files.pythonhosted.org/packages/68/fe/2c1f674960c376e29cb0bec1249b117d11738db92a6ccc4a530b972648db/yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d", size = 368406, upload-time = "2025-10-06T14:09:21.402Z" }, + { url = "https://files.pythonhosted.org/packages/95/26/812a540e1c3c6418fec60e9bbd38e871eaba9545e94fa5eff8f4a8e28e1e/yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503", size = 336581, upload-time = "2025-10-06T14:09:22.98Z" }, + { url = "https://files.pythonhosted.org/packages/0b/f5/5777b19e26fdf98563985e481f8be3d8a39f8734147a6ebf459d0dab5a6b/yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65", size = 388924, upload-time = "2025-10-06T14:09:24.655Z" }, + { url = "https://files.pythonhosted.org/packages/86/08/24bd2477bd59c0bbd994fe1d93b126e0472e4e3df5a96a277b0a55309e89/yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e", size = 392890, upload-time = "2025-10-06T14:09:26.617Z" }, + { url = "https://files.pythonhosted.org/packages/46/00/71b90ed48e895667ecfb1eaab27c1523ee2fa217433ed77a73b13205ca4b/yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d", size = 365819, upload-time = "2025-10-06T14:09:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/30/2d/f715501cae832651d3282387c6a9236cd26bd00d0ff1e404b3dc52447884/yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7", size = 363601, upload-time = "2025-10-06T14:09:30.568Z" }, + { url = "https://files.pythonhosted.org/packages/f8/f9/a678c992d78e394e7126ee0b0e4e71bd2775e4334d00a9278c06a6cce96a/yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967", size = 358072, upload-time = "2025-10-06T14:09:32.528Z" }, + { url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed", size = 385311, upload-time = "2025-10-06T14:09:34.634Z" }, + { url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6", size = 381094, upload-time = "2025-10-06T14:09:36.268Z" }, + { url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e", size = 370944, upload-time = "2025-10-06T14:09:37.872Z" }, + { url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca", size = 81804, upload-time = "2025-10-06T14:09:39.359Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b", size = 86858, upload-time = "2025-10-06T14:09:41.068Z" }, + { url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376", size = 81637, upload-time = "2025-10-06T14:09:42.712Z" }, + { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] + +[[package]] +name = "zope-event" +version = "5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "setuptools" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/c7/31e6f40282a2c548602c177826df281177caf79efaa101dd14314fb4ee73/zope_event-5.1.tar.gz", hash = "sha256:a153660e0c228124655748e990396b9d8295d6e4f546fa1b34f3319e1c666e7f", size = 18632, upload-time = "2025-06-26T07:14:22.72Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/ed/d8c3f56c1edb0ee9b51461dd08580382e9589850f769b69f0dedccff5215/zope_event-5.1-py3-none-any.whl", hash = "sha256:53de8f0e9f61dc0598141ac591f49b042b6d74784dab49971b9cc91d0f73a7df", size = 6905, upload-time = "2025-06-26T07:14:21.779Z" }, +] + +[[package]] +name = "zope-interface" +version = "8.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/88/3a/7fcf02178b8fad0a51e67e32765cd039ae505d054d744d76b8c2bbcba5ba/zope_interface-8.0.1.tar.gz", hash = "sha256:eba5610d042c3704a48222f7f7c6ab5b243ed26f917e2bc69379456b115e02d1", size = 253746, upload-time = "2025-09-25T05:55:51.285Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/2f/c10c739bcb9b072090c97c2e08533777497190daa19d190d72b4cce9c7cb/zope_interface-8.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4bd01022d2e1bce4a4a4ed9549edb25393c92e607d7daa6deff843f1f68b479d", size = 207903, upload-time = "2025-09-25T05:58:21.671Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e1/9845ac3697f108d9a1af6912170c59a23732090bbfb35955fe77e5544955/zope_interface-8.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:29be8db8b712d94f1c05e24ea230a879271d787205ba1c9a6100d1d81f06c69a", size = 208345, upload-time = "2025-09-25T05:58:24.217Z" }, + { url = "https://files.pythonhosted.org/packages/f2/49/6573bc8b841cfab18e80c8e8259f1abdbbf716140011370de30231be79ad/zope_interface-8.0.1-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:51ae1b856565b30455b7879fdf0a56a88763b401d3f814fa9f9542d7410dbd7e", size = 255027, upload-time = "2025-09-25T05:58:19.975Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fd/908b0fd4b1ab6e412dfac9bd2b606f2893ef9ba3dd36d643f5e5b94c57b3/zope_interface-8.0.1-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d2e7596149cb1acd1d4d41b9f8fe2ffc0e9e29e2e91d026311814181d0d9efaf", size = 259800, upload-time = "2025-09-25T05:58:11.487Z" }, + { url = "https://files.pythonhosted.org/packages/dc/78/8419a2b4e88410520ed4b7f93bbd25a6d4ae66c4e2b131320f2b90f43077/zope_interface-8.0.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b2737c11c34fb9128816759864752d007ec4f987b571c934c30723ed881a7a4f", size = 260978, upload-time = "2025-09-25T06:26:24.483Z" }, + { url = "https://files.pythonhosted.org/packages/e5/90/caf68152c292f1810e2bd3acd2177badf08a740aa8a348714617d6c9ad0b/zope_interface-8.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:cf66e4bf731aa7e0ced855bb3670e8cda772f6515a475c6a107bad5cb6604103", size = 212155, upload-time = "2025-09-25T05:59:40.318Z" }, +] diff --git a/echo/setup.sh b/echo/setup.sh deleted file mode 100755 index e9070163..00000000 --- a/echo/setup.sh +++ /dev/null @@ -1,40 +0,0 @@ -#!/bin/bash - -frontend_setup () { - curl -fsSL https://fnm.vercel.app/install | bash - echo 'eval "$(fnm env --use-on-cd)"' >> ~/.bashrc - FNM_PATH="/root/.local/share/fnm" - if [ -d "$FNM_PATH" ]; then - export PATH="$FNM_PATH:$PATH" - eval "`fnm env`" - fi - fnm install 22 - npm i -g pnpm - pnpm config set store-dir /home/node/.local/share/pnpm/store - - pnpm install -g azure-functions-core-tools@4 - - cd frontend - pnpm install -} - -server_setup() { - curl -sSf https://rye.astral.sh/get | RYE_INSTALL_OPTION="--yes" bash - echo 'source "$HOME/.rye/env"' >> ~/.bashrc - . $HOME/.rye/env - cd server - rye sync - pip install mypy -} - -# hide stdout, only show stderr -frontend_setup & -first=$! - -server_setup & -second=$! - -wait $first -wait $second - -echo "Setup complete" \ No newline at end of file From 08f73162763aefd6d121c395afa009bb8e75bac5 Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti Date: Thu, 6 Nov 2025 13:25:41 +0000 Subject: [PATCH 02/23] enhance development environment and documentation - Updated README to include a note about common development queries. - Replaced deprecated VSCode extension with a new one in the devcontainer configuration. - Added a new network configuration for Docker services in the docker-compose file. - Implemented PostgreSQL client installation in the setup script with an option to skip it. - Improved troubleshooting documentation for Redis/Valkey startup issues and added PostgreSQL connection instructions. - Refactored test code for better readability and consistency. --- echo/.devcontainer/devcontainer.json | 2 +- echo/.devcontainer/docker-compose.yml | 24 +-- echo/.devcontainer/setup.sh | 41 +++++ echo/docs/database_migrations.md | 6 +- echo/docs/troubleshooting-tips.md | 13 +- echo/readme.md | 1 + echo/server/Dockerfile | 2 + echo/server/tests/test_transcribe_runpod.py | 178 +++++++++++--------- echo/server/uv.lock | 2 + 9 files changed, 170 insertions(+), 99 deletions(-) diff --git a/echo/.devcontainer/devcontainer.json b/echo/.devcontainer/devcontainer.json index 354e0c20..f2f8dafb 100644 --- a/echo/.devcontainer/devcontainer.json +++ b/echo/.devcontainer/devcontainer.json @@ -22,7 +22,7 @@ // python "ms-python.python", "charliermarsh.ruff", - "meta.pyrefly", + "matangover.mypy", "anysphere.cursorpyright", // for cursor // general "github.vscode-pull-request-github", diff --git a/echo/.devcontainer/docker-compose.yml b/echo/.devcontainer/docker-compose.yml index ec9ccd89..0c2374e1 100644 --- a/echo/.devcontainer/docker-compose.yml +++ b/echo/.devcontainer/docker-compose.yml @@ -3,6 +3,8 @@ services: image: valkey/valkey:8.0.6-alpine volumes: - ./redis_data:/data + networks: + - dembrane-network postgres: image: pgvector/pgvector:0.8.1-pg16 @@ -16,6 +18,8 @@ services: volumes: - ./postgres_data:/var/lib/postgresql/data - ./init.sql:/docker-entrypoint-initdb.d/init.sql + networks: + - dembrane-network neo4j: image: neo4j:5.26.16-community @@ -30,6 +34,8 @@ services: - 7687:7687 # Neo4j Bolt protocol environment: - NEO4J_AUTH=neo4j/admin@dembrane + networks: + - dembrane-network directus: build: @@ -67,14 +73,8 @@ services: - USER_REGISTER_URL_ALLOW_LIST=http://localhost:5173/verify-email - PASSWORD_RESET_URL_ALLOW_LIST=http://localhost:5173/password-reset - STORAGE_LOCATIONS=local - - EMAIL_TRANSPORT="smtp" - - EMAIL_FROM="" - - EMAIL_SMTP_HOST="" - - EMAIL_SMTP_PORT=587 - - EMAIL_SMTP_USER="" - - EMAIL_SMTP_PASSWORD="" - - EMAIL_SMTP_SECURE=false - - EMAIL_SMTP_IGNORE_TLS=false + networks: + - dembrane-network depends_on: - postgres @@ -98,7 +98,6 @@ services: - ADMIN_BASE_URL=http://localhost:5173 - PARTICIPANT_BASE_URL=http://localhost:5174 - DIRECTUS_BASE_URL=http://directus:8055 - - DEBUG_MODE=1 - DISABLE_SENTRY=1 - SERVE_API_DOCS=1 - DISABLE_REDACTION=1 @@ -112,9 +111,14 @@ services: - ../..:/workspaces:cached # for docker passthrough - /var/run/docker.sock:/var/run/docker.sock - + networks: + - dembrane-network command: sleep infinity depends_on: - postgres - redis - neo4j + +networks: + dembrane-network: + driver: bridge \ No newline at end of file diff --git a/echo/.devcontainer/setup.sh b/echo/.devcontainer/setup.sh index a203c035..7a451308 100755 --- a/echo/.devcontainer/setup.sh +++ b/echo/.devcontainer/setup.sh @@ -220,6 +220,35 @@ install_server_deps() { fi } +install_postgresql_client() { + if command_exists psql && psql --version | grep -q "psql (PostgreSQL) 16"; then + log_info "PostgreSQL client 16 already installed: $(psql --version)" + return + fi + + log_info "Installing PostgreSQL client 16..." + + # Install postgresql-common first + ensure_apt_packages postgresql-common + + # Run the pgdg script to add PostgreSQL repository (non-interactive) + if [ -f "/usr/share/postgresql-common/pgdg/apt.postgresql.org.sh" ]; then + log_info "Adding PostgreSQL repository..." + echo "" | /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh || log_warn "PostgreSQL repository script may have already run" + else + log_warn "PostgreSQL repository script not found, attempting to install postgresql-client-16 anyway" + fi + + # Install postgresql-client-16 + ensure_apt_packages postgresql-client-16 + + if command_exists psql; then + log_info "PostgreSQL client installed: $(psql --version)" + else + log_warn "PostgreSQL client installation completed but psql command not found" + fi +} + show_help() { cat < CREATE extension vector; ``` \ No newline at end of file diff --git a/echo/docs/troubleshooting-tips.md b/echo/docs/troubleshooting-tips.md index a0ba7b6d..08dd39a3 100644 --- a/echo/docs/troubleshooting-tips.md +++ b/echo/docs/troubleshooting-tips.md @@ -31,19 +31,26 @@ If you try logging into directus and it doesn't work with what you have in the . Solution: You need to reset the DB. (delete ".devcontainer/postgres_data" and rebuild / migrate the DB again / etc) -### Redis not starting (Docker Desktop) +### Redis/Valkey not starting (Docker Desktop) `Can't open the append-only file: Permission denied` +`redis.exceptions.ResponseError: MISCONF Valkey is configured to save RDB snapshots, but it's currently unable to persist to disk. Commands that may modify the data set are disabled, because this instance is configured to report errors during writes if RDB snapshotting fails (stop-writes-on-bgsave-error option). Please check the Valkey logs for details about the RDB error.` -If your Redis container fails to start and you see a “Permission denied” error about the append-only file, you may need to change permissions on the Redis data folder. +If your Redis/Valkey container fails to start and you see a “Permission denied” error about the append-only file, you may need to change permissions on the Redis data folder. + +0. First make sure that the folder `.devcontainer/redis_data` exists 1. **Open a local WSL terminal** (outside of the container). 2. **Run**: ```bash - sudo chown -R 1001:1001 ./echo/.devcontainer/redis_data + sudo chown -R 1001:1001 .devcontainer/redis_data ``` 3. **Restart** the redis container from Docker Desktop. +### Able to login, "Error creating Project" + +- do [Database Migrations](./docs/database_migrations.md) + ### Minio not starting - Go to minio-ui at http://localhost:9001/ diff --git a/echo/readme.md b/echo/readme.md index e8f664a3..3c14c6cf 100644 --- a/echo/readme.md +++ b/echo/readme.md @@ -77,5 +77,6 @@ The following guide is to run the whole application locally. it is HIGHLY recomm - Click **"Terminal Keeper: Active session"**. ## FAQ [./docs/troubleshooting-tips.md](./docs/troubleshooting-tips.md) +- Check this file for common queries while developing, before raising a request. Enjoy building with Dembrane! \ No newline at end of file diff --git a/echo/server/Dockerfile b/echo/server/Dockerfile index b398f8f9..c395636f 100644 --- a/echo/server/Dockerfile +++ b/echo/server/Dockerfile @@ -27,6 +27,8 @@ WORKDIR /code/server COPY pyproject.toml uv.lock* ./ RUN uv sync --frozen +FROM base AS runner + # Copy everything else after dependency installation # This ensures dependency layer is cached separately from application code COPY . . diff --git a/echo/server/tests/test_transcribe_runpod.py b/echo/server/tests/test_transcribe_runpod.py index 5fe5ba7d..a29a7646 100644 --- a/echo/server/tests/test_transcribe_runpod.py +++ b/echo/server/tests/test_transcribe_runpod.py @@ -7,132 +7,142 @@ from dembrane.utils import get_utc_timestamp from dembrane.directus import directus from dembrane.transcribe import ( - _get_status_runpod, - queue_transcribe_audio_runpod, - transcribe_conversation_chunk, + _get_status_runpod, + queue_transcribe_audio_runpod, + transcribe_conversation_chunk, ) logger = logging.getLogger("test_transcribe") + @pytest.fixture def fixture_english_chunk(): - logger.info("setup") + logger.info("setup") + + p = directus.create_item( + "project", + { + "name": "test", + "language": "en", + "is_conversation_allowed": True, + }, + )["data"] - p = directus.create_item("project", { - "name": "test", - "language": "en", - "is_conversation_allowed": True, - })["data"] + c = directus.create_item( + "conversation", + {"project_id": p["id"], "participant_name": "test_english", "language": "en"}, + )["data"] - c = directus.create_item("conversation", { - "project_id": p["id"], - "participant_name": "test_english", - "language": "en" - })["data"] + path = save_to_s3_from_url( + "https://github.com/runpod-workers/sample-inputs/raw/refs/heads/main/audio/Arthur.mp3", + public=True, + ) - path = save_to_s3_from_url("https://github.com/runpod-workers/sample-inputs/raw/refs/heads/main/audio/Arthur.mp3", public=True) + cc = directus.create_item( + "conversation_chunk", + { + "conversation_id": c["id"], + "path": path, + "timestamp": str(get_utc_timestamp()), + }, + )["data"] - cc = directus.create_item("conversation_chunk", { - "conversation_id": c["id"], - "path": path, - "timestamp": str(get_utc_timestamp()), - })["data"] + yield cc["id"] - yield cc["id"] + logger.info("teardown") - logger.info("teardown") + directus.delete_item("conversation_chunk", cc["id"]) - directus.delete_item("conversation_chunk", cc["id"]) + directus.delete_item("conversation", c["id"]) - directus.delete_item("conversation", c["id"]) + directus.delete_item("project", p["id"]) - directus.delete_item("project", p["id"]) + delete_from_s3(path) - delete_from_s3(path) @pytest.fixture def fixture_dutch_chunk(): - logger.info("setup") + logger.info("setup") - p = directus.create_item("project", { - "name": "test", - "language": "nl", - "is_conversation_allowed": True, - })["data"] + p = directus.create_item( + "project", + { + "name": "test", + "language": "nl", + "is_conversation_allowed": True, + }, + )["data"] - c = directus.create_item("conversation", { - "project_id": p["id"], - "participant_name": "test_dutch", - "language": "nl" - })["data"] + c = directus.create_item( + "conversation", {"project_id": p["id"], "participant_name": "test_dutch", "language": "nl"} + )["data"] - path = save_to_s3_from_url("https://github.com/runpod-workers/sample-inputs/raw/refs/heads/main/audio/Arthur.mp3", public=True) + path = save_to_s3_from_url( + "https://github.com/runpod-workers/sample-inputs/raw/refs/heads/main/audio/Arthur.mp3", + public=True, + ) - cc = directus.create_item("conversation_chunk", { - "conversation_id": c["id"], - "path": path, - "timestamp": str(get_utc_timestamp()), - })["data"] + cc = directus.create_item( + "conversation_chunk", + { + "conversation_id": c["id"], + "path": path, + "timestamp": str(get_utc_timestamp()), + }, + )["data"] - yield cc["id"] + yield cc["id"] - logger.info("teardown") + logger.info("teardown") - directus.delete_item("conversation_chunk", cc["id"]) + directus.delete_item("conversation_chunk", cc["id"]) - directus.delete_item("conversation", c["id"]) + directus.delete_item("conversation", c["id"]) - directus.delete_item("project", p["id"]) + directus.delete_item("project", p["id"]) - delete_from_s3(path) + delete_from_s3(path) @pytest.mark.parametrize("is_priority", [True, False]) def test_queue_transcribe_audio_runpod(is_priority: bool): - job_id = queue_transcribe_audio_runpod( - audio_file_uri="https://github.com/runpod-workers/sample-inputs/raw/refs/heads/main/audio/Arthur.mp3", - whisper_prompt="", - language="en", - is_priority=is_priority, - ) - assert job_id is not None + job_id = queue_transcribe_audio_runpod( + audio_file_uri="https://github.com/runpod-workers/sample-inputs/raw/refs/heads/main/audio/Arthur.mp3", + whisper_prompt="", + language="en", + is_priority=is_priority, + ) + assert job_id is not None def test_transcribe_conversation_chunk_english(fixture_english_chunk): - logger.info(f"fixture_english_chunk conversation_chunk_id: {fixture_english_chunk}") - result = transcribe_conversation_chunk(fixture_english_chunk) - logger.info(f"result: {result}") - assert result is not None + logger.info(f"fixture_english_chunk conversation_chunk_id: {fixture_english_chunk}") + result = transcribe_conversation_chunk(fixture_english_chunk) + logger.info(f"result: {result}") + assert result is not None def test_transcribe_conversation_chunk_dutch(fixture_dutch_chunk): - logger.info(f"fixture_dutch_chunk: {fixture_dutch_chunk}") - result = transcribe_conversation_chunk(fixture_dutch_chunk) - - # get the conversation chunk - cc = dict(directus.get_item("conversation_chunk", result)) - - logger.info(f"cc: {cc}") - assert cc.get("runpod_job_status_link") is not None - - status, _ = _get_status_runpod(cc["runpod_job_status_link"]) - while status in ["IN_PROGRESS", "IN_QUEUE"]: - logger.info(f"waiting for job to finish: {status}") - time.sleep(2) - status, _ = _get_status_runpod(cc["runpod_job_status_link"]) - - # get the status of the job - status, data = _get_status_runpod(cc["runpod_job_status_link"]) - - logger.info(f"data: {data}") + logger.info(f"fixture_dutch_chunk: {fixture_dutch_chunk}") + result = transcribe_conversation_chunk(fixture_dutch_chunk) - # get the output - assert data.get("output") is not None - assert data.get("output").get("joined_text") is not None - - + # get the conversation chunk + cc = dict(directus.get_item("conversation_chunk", result)) + logger.info(f"cc: {cc}") + assert cc.get("runpod_job_status_link") is not None + status, _ = _get_status_runpod(cc["runpod_job_status_link"]) + while status in ["IN_PROGRESS", "IN_QUEUE"]: + logger.info(f"waiting for job to finish: {status}") + time.sleep(2) + status, _ = _get_status_runpod(cc["runpod_job_status_link"]) + # get the status of the job + status, data = _get_status_runpod(cc["runpod_job_status_link"]) + logger.info(f"data: {data}") + # get the output + assert data.get("output") is not None + assert data.get("output").get("joined_text") is not None diff --git a/echo/server/uv.lock b/echo/server/uv.lock index b7047a91..1112b8ca 100644 --- a/echo/server/uv.lock +++ b/echo/server/uv.lock @@ -1053,6 +1053,8 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/67/24/28a5b2fa42d12b3d7e5614145f0bd89714c34c08be6aabe39c14dd52db34/greenlet-3.2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9c6de1940a7d828635fbd254d69db79e54619f165ee7ce32fda763a9cb6a58c", size = 1548385, upload-time = "2025-11-04T12:42:11.067Z" }, + { url = "https://files.pythonhosted.org/packages/6a/05/03f2f0bdd0b0ff9a4f7b99333d57b53a7709c27723ec8123056b084e69cd/greenlet-3.2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03c5136e7be905045160b1b9fdca93dd6727b180feeafda6818e6496434ed8c5", size = 1613329, upload-time = "2025-11-04T12:42:12.928Z" }, { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" }, ] From 7202a55bd7afc0e6f5d72c99c4265444a698f93d Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti Date: Thu, 6 Nov 2025 16:11:34 +0000 Subject: [PATCH 03/23] add 2fa --- echo/.devcontainer/docker-compose.yml | 4 +- echo/.devcontainer/setup.sh | 27 ++ echo/frontend/AGENTS.md | 67 +++ echo/frontend/package.json | 2 +- echo/frontend/pnpm-lock.yaml | 12 +- echo/frontend/src/Router.tsx | 18 + .../src/components/auth/hooks/index.ts | 26 +- .../frontend/src/components/layout/Header.tsx | 15 + .../settings/TwoFactorSettingsCard.tsx | 426 ++++++++++++++++++ .../src/components/settings/hooks/index.ts | 78 ++++ echo/frontend/src/routes/auth/Login.tsx | 202 +++++++-- .../src/routes/settings/UserSettingsRoute.tsx | 52 +++ .../dembrane/processing_status_utils.py | 30 +- echo/server/pyproject.toml | 28 +- 14 files changed, 899 insertions(+), 88 deletions(-) create mode 100644 echo/frontend/AGENTS.md create mode 100644 echo/frontend/src/components/settings/TwoFactorSettingsCard.tsx create mode 100644 echo/frontend/src/components/settings/hooks/index.ts create mode 100644 echo/frontend/src/routes/settings/UserSettingsRoute.tsx diff --git a/echo/.devcontainer/docker-compose.yml b/echo/.devcontainer/docker-compose.yml index 0c2374e1..a95401a1 100644 --- a/echo/.devcontainer/docker-compose.yml +++ b/echo/.devcontainer/docker-compose.yml @@ -105,7 +105,9 @@ services: - STORAGE_S3_SECRET=dembrane - STORAGE_S3_BUCKET=dembrane - STORAGE_S3_ENDPOINT=http://minio:9000 - - NEO4J_URL=bolt://neo4j:7687 + - NEO4J_URI=bolt://neo4j:7687 + - NEO4J_USERNAME=neo4j + - NEO4J_PASSWORD=admin@dembrane volumes: - ../..:/workspaces:cached diff --git a/echo/.devcontainer/setup.sh b/echo/.devcontainer/setup.sh index 7a451308..6fe1b8d5 100755 --- a/echo/.devcontainer/setup.sh +++ b/echo/.devcontainer/setup.sh @@ -123,6 +123,22 @@ install_pnpm() { log_info "pnpm installed: $(pnpm --version)" } +install_codex() { + if command_exists codex; then + log_info "codex already installed: $(codex --version 2>/dev/null || echo 'installed')" + return + fi + + if ! command_exists npm; then + log_error "npm not found, cannot install codex" + return 1 + fi + + log_info "Installing @openai/codex globally..." + npm install -g @openai/codex@latest + log_info "codex installed" +} + install_uv() { if command_exists uv; then log_info "uv already installed: $(uv --version)" @@ -256,6 +272,7 @@ Usage: ./setup.sh [options] Options: -h, --help Show this help message --skip-node Skip fnm/Node.js/pnpm installation + --skip-codex Skip codex installation --skip-frontend Skip frontend dependency installation --skip-server Skip server dependency installation --skip-python Skip managed Python setup for uv @@ -270,6 +287,7 @@ EOF parse_args() { SKIP_NODE="false" + SKIP_CODEX="false" SKIP_FRONTEND="false" SKIP_SERVER="false" SKIP_PYTHON="false" @@ -285,6 +303,10 @@ parse_args() { SKIP_NODE="true" shift ;; + --skip-codex) + SKIP_CODEX="true" + shift + ;; --skip-frontend) SKIP_FRONTEND="true" shift @@ -344,6 +366,11 @@ main() { install_fnm install_node install_pnpm + if [ "$SKIP_CODEX" = "false" ]; then + install_codex + else + log_info "Skipping codex installation" + fi else log_info "Skipping Node.js tooling setup" fi diff --git a/echo/frontend/AGENTS.md b/echo/frontend/AGENTS.md new file mode 100644 index 00000000..2acd91b0 --- /dev/null +++ b/echo/frontend/AGENTS.md @@ -0,0 +1,67 @@ +# AGENTS Log + +## Maintenance Protocol +- Read this file before making changes; keep structure consistent and fix stale links/paths immediately. +- Rely on git history for timing; no manual timestamps necessary. +- Auto-correct typos and formatting without asking; escalate only for new patterns or major warnings. +- Ensure instructions stay aligned with repo reality—if something drifts, repair it and note the fix in context. + +## When to Ask +- Saw a pattern (≥3 uses)? Ask: “Document this pattern?” +- Fixed a bug? Ask: “Add this to warnings?” +- Completed a repeatable workflow? Ask: “Document this workflow?” +- Resolved confusion for the team? Ask: “Add this clarification?” +- Skip documenting secrets, temporary hacks, or anything explicitly excluded. + +## Project Snapshot +- React 19 + Vite 6 + TypeScript frontend managed with pnpm; Mantine, TanStack Query, and Lingui power UI/data/localization (package.json). +- Directus SDK configured in `src/lib/directus.ts` for both app and participant APIs; `src/lib/api.ts` centralizes custom REST helpers. +- Tailwind is layered on top of Mantine components (see `src/routes/project/conversation/ProjectConversationOverview.tsx` and peers) for fine-grained styling. +- Account security lives under `src/routes/settings/UserSettingsRoute.tsx`, with Directus TFA mutations in `src/components/settings/hooks/index.ts`. + +## Build / Run / Tooling +- Install: `pnpm install` +- Dev (full app): `pnpm dev` (sets `VITE_DISABLE_SENTRY` and `VITE_PARTICIPANT_BASE_URL`) +- Dev (participant router): `pnpm participant:dev` +- Build: `pnpm build` (runs `tsc` then `vite build`) +- Preview: `pnpm preview` +- Lint/format: `pnpm lint`, `pnpm lint:fix`, `pnpm format`, `pnpm format:check` +- i18n: `pnpm messages:extract`, `pnpm messages:compile` +- No automated test script defined in package.json. + +## Repeating Patterns (3+ sightings) +- **React Query hook hubs**: Each feature owns a `hooks/index.ts` exposing `useQuery`/`useMutation` wrappers with shared `useQueryClient` invalidation logic (`src/components/{conversation,project,chat,participant,...}/hooks/index.ts`). +- **Lingui macros for copy**: Most routed screens import `t` from `@lingui/core/macro` and `Trans` from `@lingui/react/macro` to localize UI strings (e.g. `src/routes/auth/Login.tsx`, `src/routes/project/conversation/ProjectConversationOverview.tsx`). +- **Mantine + Tailwind blend**: Screens compose Mantine primitives (`Stack`, `Group`, `ActionIcon`, etc.) while layering Tailwind utility classes via `className`, alongside toast feedback via `@/components/common/Toaster` (e.g. `src/components/conversation/ConversationDangerZone.tsx`, `src/components/dropzone/UploadConversationDropzone.tsx`). + +## Change Hotspots (git history) +- Translation bundles dominate churn: `src/locales/{en-US,de-DE,es-ES,fr-FR,nl-NL}.{po,ts}` appear in 50–60 commits each (`git log` frequency). +- Core API glue in `src/lib/api.ts` shows ~20 touches, indicating frequent iteration. +- UI wiring files under `src/components/**/hooks/index.ts` and participant flows see regular updates alongside translations. + +## Slow-Moving Files +- Configuration and workflow guides under `.cursor/rules/` show single commits each. +- Build tooling such as `vite.config.ts` (3 commits) and `tailwind.config.js` rarely change compared to feature code. + +## TODO / FIXME / HACK Inventory +- `src/routes/project/conversation/ProjectConversationOverview.tsx`: TODO improve links component design. +- `src/routes/project/conversation/ProjectConversationTranscript.tsx`: TODO consider reusable conversation flags hook. +- `src/routes/participant/ParticipantStart.tsx`: FIXME limit lucide icon bundle for onboarding cards. +- `src/lib/directus.ts`: TODO standardize Directus error handling and add localization polish. +- `src/lib/api.ts`: FIXME decompose monolithic API helper into feature-scoped modules. +- `src/components/conversation/OngoingConversationsSummaryCard.tsx`: FIXME evaluate using Aggregate API for counts. +- `src/routes/project/library/ProjectLibrary.tsx`: TODO move permission checks server-side. +- `src/components/conversation/ConversationLink.tsx`: TODO drop redundant prop. +- `src/components/announcement/hooks/useProcessedAnnouncements.ts`: FIXME flatten hook into utility. +- `src/components/common/Markdown.tsx`: FIXME remove Tally embed workaround when possible. + +## Gotchas & Notes +- README references `docs/getting_started.md`, but that file is missing in this workspace—expect setup details elsewhere. +- Toast notifications are the primary success/error surface; missing translations or wrong toast copy stands out quickly. +- Localization workflow is active: keep Lingui extract/compile scripts in mind when touching `t`/`Trans` strings. +- Directus client instances expect environment-configured URLs (`DIRECTUS_PUBLIC_URL`, `DIRECTUS_CONTENT_PUBLIC_URL`); local dev needs these in `.env`. +- Custom Directus POSTs (like 2FA) call `directus.request` with a function signature rather than `restRequest`; reuse `postDirectus` from `src/components/settings/hooks/index.ts` to stay consistent. +- UI mutations should surface inline feedback: pair toasts with contextual Mantine `Alert` components inside modals/forms for errors or warnings. +- Directus login surfaces 2FA by responding with `INVALID_OTP`; `src/routes/auth/Login.tsx` toggles an OTP field and retries using `useLoginMutation`. Reuse that pattern when touching other auth entry points. +- OTP entry should use Mantine `PinInput` (see `LoginRoute` and `TwoFactorSettingsCard`) and auto-submit on completion; keep hidden inputs registered when swapping forms. +- Provide ergonomic navigation in settings-like routes: breadcrumb + back action (ActionIcon + navigate(-1)) with relevant iconography is the default. diff --git a/echo/frontend/package.json b/echo/frontend/package.json index d3312619..c6f6f0ca 100644 --- a/echo/frontend/package.json +++ b/echo/frontend/package.json @@ -19,7 +19,7 @@ }, "dependencies": { "@ai-sdk/react": "^1.2.12", - "@directus/sdk": "^18.0.3", + "@directus/sdk": "^20.1.1", "@dnd-kit/core": "^6.3.1", "@dnd-kit/sortable": "^10.0.0", "@dnd-kit/utilities": "^3.2.2", diff --git a/echo/frontend/pnpm-lock.yaml b/echo/frontend/pnpm-lock.yaml index 60c66125..a0657d1c 100644 --- a/echo/frontend/pnpm-lock.yaml +++ b/echo/frontend/pnpm-lock.yaml @@ -15,8 +15,8 @@ importers: specifier: ^1.2.12 version: 1.2.12(react@19.0.0)(zod@3.24.2) '@directus/sdk': - specifier: ^18.0.3 - version: 18.0.3 + specifier: ^20.1.1 + version: 20.1.1 '@dnd-kit/core': specifier: ^6.3.1 version: 6.3.1(react-dom@19.0.0(react@19.0.0))(react@19.0.0) @@ -627,9 +627,9 @@ packages: react: ^16.8.0 || ^17 || ^18 || ^19 react-dom: ^16.8.0 || ^17 || ^18 || ^19 - '@directus/sdk@18.0.3': - resolution: {integrity: sha512-PnEDRDqr2x/DG3HZ3qxU7nFp2nW6zqJqswjii57NhriXgTz4TBUI8NmSdzQvnyHuTL9J0nedYfQGfW4v8odS1A==} - engines: {node: '>=18.0.0'} + '@directus/sdk@20.1.1': + resolution: {integrity: sha512-cCWvxDRKOygVHBkKA7l1I4O4niomK8SKR/+Ul1L9NFF1K2zAf3jRqhjc7JavCECOiiAWME/s2wYatNFqFFh0gQ==} + engines: {node: '>=22'} '@dnd-kit/accessibility@3.1.1': resolution: {integrity: sha512-2P+YgaXF+gRsIihwwY1gCsQSYnu9Zyj2py8kY5fFvUM1qm2WA2u639R6YNVfU4GWr+ZM5mqEsfHZZLoRONbemw==} @@ -5172,7 +5172,7 @@ snapshots: react-dom: 19.0.0(react@19.0.0) react-is: 17.0.2 - '@directus/sdk@18.0.3': {} + '@directus/sdk@20.1.1': {} '@dnd-kit/accessibility@3.1.1(react@19.0.0)': dependencies: diff --git a/echo/frontend/src/Router.tsx b/echo/frontend/src/Router.tsx index 2c2f27ce..8d92f8f4 100644 --- a/echo/frontend/src/Router.tsx +++ b/echo/frontend/src/Router.tsx @@ -89,6 +89,10 @@ const ProjectUnsubscribe = createLazyNamedRoute( "ProjectUnsubscribe", ); const DebugPage = createLazyRoute(() => import("./routes/Debug")); +const UserSettingsRoute = createLazyNamedRoute( + () => import("./routes/settings/UserSettingsRoute"), + "UserSettingsRoute", +); export const mainRouter = createBrowserRouter([ { @@ -244,6 +248,20 @@ export const mainRouter = createBrowserRouter([ ), path: "projects", }, + { + children: [ + { + element: , + index: true, + }, + ], + element: ( + + + + ), + path: "settings", + }, { element: , path: "*", diff --git a/echo/frontend/src/components/auth/hooks/index.ts b/echo/frontend/src/components/auth/hooks/index.ts index 7d548df5..b223d541 100644 --- a/echo/frontend/src/components/auth/hooks/index.ts +++ b/echo/frontend/src/components/auth/hooks/index.ts @@ -20,7 +20,13 @@ export const useCurrentUser = () => try { return directus.request( readUser("me", { - fields: ["id", "first_name", "email", "disable_create_project"], + fields: [ + "id", + "first_name", + "email", + "disable_create_project", + "tfa_secret", + ], }), ); } catch (_error) { @@ -147,8 +153,22 @@ export const useRegisterMutation = () => { // todo: add redirection logic here export const useLoginMutation = () => { return useMutation({ - mutationFn: (payload: Parameters) => { - return directus.login(...payload); + mutationFn: async ({ + email, + password, + otp, + }: { + email: string; + password: string; + otp?: string; + }) => { + return directus.login( + email, + password, + { + otp: otp || undefined, + }, + ); }, onSuccess: () => { toast.success("Login successful"); diff --git a/echo/frontend/src/components/layout/Header.tsx b/echo/frontend/src/components/layout/Header.tsx index 92b975a5..39daef3a 100644 --- a/echo/frontend/src/components/layout/Header.tsx +++ b/echo/frontend/src/components/layout/Header.tsx @@ -7,6 +7,7 @@ import { IconLogout, IconNotes, IconSettings, + IconShieldLock, } from "@tabler/icons-react"; import { useParams } from "react-router"; import { @@ -21,6 +22,7 @@ import { Announcements } from "../announcement/Announcements"; import { TopAnnouncementBar } from "../announcement/TopAnnouncementBar"; import { Logo } from "../common/Logo"; import { LanguagePicker } from "../language/LanguagePicker"; +import { useI18nNavigate } from "@/hooks/useI18nNavigate"; const User = ({ name, email }: { name: string; email: string }) => (
{ const logoutMutation = useLogoutMutation(); const { loading, isAuthenticated } = useAuthenticated(); const { data: user } = useCurrentUser(); + const navigate = useI18nNavigate(); // maybe useEffect(params) / useState is better here? // but when we change language, we reload the page (check LanguagePicker.tsx) @@ -93,6 +96,9 @@ export const Header = () => { doRedirect: true, }); }; + const handleSettingsClick = () => { + navigate("/settings"); + }; return ( <> @@ -148,6 +154,15 @@ export const Header = () => { + } + onClick={handleSettingsClick} + > + + Settings + + + } component="a" diff --git a/echo/frontend/src/components/settings/TwoFactorSettingsCard.tsx b/echo/frontend/src/components/settings/TwoFactorSettingsCard.tsx new file mode 100644 index 00000000..0a8174f3 --- /dev/null +++ b/echo/frontend/src/components/settings/TwoFactorSettingsCard.tsx @@ -0,0 +1,426 @@ +import { t } from "@lingui/core/macro"; +import { Trans } from "@lingui/react/macro"; +import { + ActionIcon, + Anchor, + Alert, + Badge, + Button, + CopyButton, + Divider, + Group, + List, + Modal, + PasswordInput, + Paper, + Skeleton, + Stack, + Switch, + Text, + TextInput, + Tooltip, + PinInput, +} from "@mantine/core"; +import { useDisclosure } from "@mantine/hooks"; +import { + IconCheck, + IconCopy, + IconInfoCircle, + IconLock, +} from "@tabler/icons-react"; +import { useEffect, useMemo, useState } from "react"; +import { QRCode } from "@/components/common/QRCode"; +import { + type GenerateTwoFactorResponse, + useDisableTwoFactorMutation, + useEnableTwoFactorMutation, + useGenerateTwoFactorMutation, +} from "./hooks"; + +interface TwoFactorSettingsCardProps { + isLoading: boolean; + isTwoFactorEnabled: boolean; +} + +const AUTH_APP_LINKS = [ + { + href: "https://apps.apple.com/us/app/google-authenticator/id388497605", + label: "Google Authenticator (iOS)", + }, + { + href: "https://play.google.com/store/apps/details?id=com.google.android.apps.authenticator2", + label: "Google Authenticator (Android)", + }, + { + href: "https://authy.com/download", + label: "Authy", + }, + { + href: "https://www.microsoft.com/en-us/security/mobile-authenticator-app", + label: "Microsoft Authenticator", + }, +]; + +export const TwoFactorSettingsCard = ({ + isLoading, + isTwoFactorEnabled, +}: TwoFactorSettingsCardProps) => { + const [enableModalOpened, { close: closeEnableModal, open: openEnableModal }] = + useDisclosure(false); + const [ + disableModalOpened, + { close: closeDisableModal, open: openDisableModal }, + ] = useDisclosure(false); + + const generateSecretMutation = useGenerateTwoFactorMutation(); + const enableTwoFactorMutation = useEnableTwoFactorMutation(); + const disableTwoFactorMutation = useDisableTwoFactorMutation(); + const { reset: resetGenerateSecret } = generateSecretMutation; + const { reset: resetEnableTwoFactor } = enableTwoFactorMutation; + const { reset: resetDisableTwoFactor } = disableTwoFactorMutation; + + const [password, setPassword] = useState(""); + const [otp, setOtp] = useState(""); + const [disableOtp, setDisableOtp] = useState(""); + const [setupStep, setSetupStep] = useState<"password" | "verify">("password"); + const [generatedSecret, setGeneratedSecret] = + useState(null); + + const isMutating = + generateSecretMutation.isPending || + enableTwoFactorMutation.isPending || + disableTwoFactorMutation.isPending; + + useEffect(() => { + if (!enableModalOpened) { + setPassword(""); + setOtp(""); + setSetupStep("password"); + setGeneratedSecret(null); + resetGenerateSecret(); + resetEnableTwoFactor(); + } + }, [ + enableModalOpened, + resetEnableTwoFactor, + resetGenerateSecret, + ]); + + useEffect(() => { + if (!disableModalOpened) { + setDisableOtp(""); + resetDisableTwoFactor(); + } + }, [ + disableModalOpened, + resetDisableTwoFactor, + ]); + + const handleToggle = () => { + if (isTwoFactorEnabled) { + openDisableModal(); + return; + } + openEnableModal(); + }; + + const handleGenerateSecret = async () => { + if (!password) return; + try { + const data = await generateSecretMutation.mutateAsync({ password }); + if (data) { + setGeneratedSecret(data); + setSetupStep("verify"); + setPassword(""); + } + } catch (_error) { + // handled in mutation onError + } + }; + + const handleEnableTwoFactor = async (submittedOtp?: string) => { + if (!generatedSecret || enableTwoFactorMutation.isPending) return; + const trimmedOtp = (submittedOtp ?? otp).trim(); + if (trimmedOtp.length < 6) return; + + try { + await enableTwoFactorMutation.mutateAsync({ + otp: trimmedOtp, + secret: generatedSecret.secret, + }); + closeEnableModal(); + } catch (_error) { + // handled in mutation onError + } + }; + + const handleDisableTwoFactor = async (submittedOtp?: string) => { + if (disableTwoFactorMutation.isPending) return; + const trimmedOtp = (submittedOtp ?? disableOtp).trim(); + if (trimmedOtp.length < 6) return; + try { + await disableTwoFactorMutation.mutateAsync({ otp: trimmedOtp }); + closeDisableModal(); + } catch (_error) { + // handled in mutation onError + } + }; + + const renderEnableModalContent = () => { + if (setupStep === "password") { + return ( + + {generateSecretMutation.isError && ( + + {generateSecretMutation.error?.message ?? + t`Something went wrong while generating the secret.`} + + )} + + + + Confirm your password to generate a new secret for your + authenticator app. + + + + setPassword(event.currentTarget.value)} + data-autofocus + disabled={generateSecretMutation.isPending} + /> + + + + + + ); + } + + if (!generatedSecret) { + return null; + } + + return ( + + {enableTwoFactorMutation.isError && ( + + {enableTwoFactorMutation.error?.message ?? + t`We couldn’t enable two-factor authentication. Double-check your code and try again.`} + + )} + + + Scan the QR code or copy the secret into your app. + + + + +
+ +
+ + + {generatedSecret.secret} + + + +
+
+ + + + Authenticator code + + setOtp(value)} + onComplete={(value) => handleEnableTwoFactor(value)} + inputMode="numeric" + disabled={enableTwoFactorMutation.isPending} + /> + + + Enter the current six-digit code from your authenticator app. + + + + + + + + +
+ ); + }; + + return ( + <> + + + + + + + + Two-factor authentication + + + + Keep access secure with a one-time code from your authenticator + app. Toggle two-factor authentication for this account. + + + + + + + {isLoading ? ( + + ) : ( + + )} + + + + + + + + + + Recommended apps + + + + + {AUTH_APP_LINKS.map((link) => ( + + + {link.label} + + + ))} + + + + + + Enable two-factor authentication} + size="lg" + > + {renderEnableModalContent()} + + + Disable two-factor authentication} + size="md" + > + + + Enter a valid code to turn off two-factor authentication. + + + + + Authenticator code + + setDisableOtp(value)} + onComplete={(value) => handleDisableTwoFactor(value)} + inputMode="numeric" + disabled={disableTwoFactorMutation.isPending} + /> + + + {disableTwoFactorMutation.isError && ( + + {disableTwoFactorMutation.error?.message ?? + t`We couldn’t disable two-factor authentication. Try again with a fresh code.`} + + )} + + + + + + + + + ); +}; + +const CopySecretButton = ({ secret }: { secret: string }) => { + if (!secret) { + return null; + } + + return ( + + {({ copied, copy }) => ( + + + {copied ? : } + + + )} + + ); +}; diff --git a/echo/frontend/src/components/settings/hooks/index.ts b/echo/frontend/src/components/settings/hooks/index.ts new file mode 100644 index 00000000..27105ed2 --- /dev/null +++ b/echo/frontend/src/components/settings/hooks/index.ts @@ -0,0 +1,78 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; +import { toast } from "@/components/common/Toaster"; +import { throwWithMessage } from "@/components/auth/utils/errorUtils"; +import { directus } from "@/lib/directus"; + +export interface GenerateTwoFactorResponse { + secret: string; + otpauth_url: string; +} + +const postDirectus = async ( + path: string, + body: Record, +) => { + try { + return await directus.request(() => ({ + body: JSON.stringify(body), + method: "POST", + path, + })); + } catch (error) { + throwWithMessage(error); + } +}; + +export const useGenerateTwoFactorMutation = () => { + return useMutation({ + mutationFn: async ({ password }: { password: string }) => { + const data = await postDirectus( + "/users/me/tfa/generate", + { password }, + ); + + return data; + }, + onError: (error: Error) => { + toast.error(error.message); + }, + }); +}; + +export const useEnableTwoFactorMutation = () => { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: async ({ otp, secret }: { otp: string; secret: string }) => { + await postDirectus("/users/me/tfa/enable", { otp, secret }); + }, + onError: (error: Error) => { + toast.error(error.message); + }, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: ["users", "me"], + }); + toast.success("Two-factor authentication enabled"); + }, + }); +}; + +export const useDisableTwoFactorMutation = () => { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: async ({ otp }: { otp: string }) => { + await postDirectus("/users/me/tfa/disable", { otp }); + }, + onError: (error: Error) => { + toast.error(error.message); + }, + onSuccess: () => { + queryClient.invalidateQueries({ + queryKey: ["users", "me"], + }); + toast.success("Two-factor authentication disabled"); + }, + }); +}; diff --git a/echo/frontend/src/routes/auth/Login.tsx b/echo/frontend/src/routes/auth/Login.tsx index 9a684ca1..beedb21f 100644 --- a/echo/frontend/src/routes/auth/Login.tsx +++ b/echo/frontend/src/routes/auth/Login.tsx @@ -1,4 +1,4 @@ -import { readItems, readProviders } from "@directus/sdk"; +import { readProviders } from "@directus/sdk"; import { t } from "@lingui/core/macro"; import { Trans } from "@lingui/react/macro"; import { @@ -9,16 +9,18 @@ import { Container, Divider, PasswordInput, + PinInput, Stack, Text, TextInput, Title, } from "@mantine/core"; import { useDocumentTitle } from "@mantine/hooks"; +import { useAutoAnimate } from "@formkit/auto-animate/react"; import { IconBrandGoogle } from "@tabler/icons-react"; import { useQuery } from "@tanstack/react-query"; import type React from "react"; -import { useEffect, useState } from "react"; +import { useEffect, useRef, useState } from "react"; import { useForm } from "react-hook-form"; import { useSearchParams } from "react-router"; import { useLoginMutation } from "@/components/auth/hooks"; @@ -60,10 +62,16 @@ const LoginWithProvider = ({ export const LoginRoute = () => { useDocumentTitle(t`Login | Dembrane`); - const { register, handleSubmit } = useForm<{ + const { register, handleSubmit, setValue, getValues } = useForm<{ email: string; password: string; - }>(); + otp: string; + }>({ + defaultValues: { + otp: "", + }, + shouldUnregister: false, + }); const [searchParams, _setSearchParams] = useSearchParams(); @@ -76,12 +84,37 @@ export const LoginRoute = () => { const createProjectMutation = useCreateProjectMutation(); const [error, setError] = useState(""); + const [otpRequired, setOtpRequired] = useState(false); + const [otpValue, setOtpValue] = useState(""); + const [formParent] = useAutoAnimate(); + const pinInputRef = useRef(null); const loginMutation = useLoginMutation(); - const onSubmit = handleSubmit(async (data) => { + const submitLogin = async (data: { + email: string; + password: string; + otp?: string; + }) => { + if (loginMutation.isPending) return; + try { setError(""); - await loginMutation.mutateAsync([data.email, data.password]); + const trimmedOtp = data.otp?.trim(); + + if (otpRequired && (!trimmedOtp || trimmedOtp.length < 6)) { + setError(t`Enter the 6-digit code from your authenticator app.`); + return; + } + + await loginMutation.mutateAsync({ + email: data.email, + password: data.password, + otp: otpRequired ? trimmedOtp || undefined : undefined, + }); + + setOtpRequired(false); + setValue("otp", ""); + setOtpValue(""); // Auto-create first project for new users if (searchParams.get("new") === "true") { @@ -102,15 +135,37 @@ export const LoginRoute = () => { navigate("/projects"); } } catch (error) { - try { - if ((error as any).errors[0].message !== "") { - setError((error as any).errors[0].message); - } - } catch { + const errors = (error as any)?.errors; + const firstError = Array.isArray(errors) ? errors[0] : undefined; + const code = firstError?.extensions?.code; + const message = + firstError?.message && firstError.message !== "" + ? firstError.message + : undefined; + + if (code === "INVALID_OTP") { + setOtpRequired(true); + setError( + t`That code didn't work. Try again with a fresh code from your authenticator app.`, + ); + setValue("otp", ""); + setOtpValue(""); + return; + } + + setOtpRequired(false); + setValue("otp", ""); + setOtpValue(""); + + if (message) { + setError(message); + } else { setError(t`Something went wrong`); } } - }); + }; + + const onSubmit = handleSubmit((formData) => submitLogin(formData)); useEffect(() => { if (searchParams.get("reason") === "INVALID_CREDENTIALS") { @@ -124,6 +179,15 @@ export const LoginRoute = () => { } }, [searchParams]); + useEffect(() => { + if (otpRequired) { + const input = pinInputRef.current?.querySelector("input"); + if (input) { + input.focus(); + } + } + }, [otpRequired]); + return ( @@ -140,33 +204,81 @@ export const LoginRoute = () => { )}
- - {error && {error}} - - Email} - size="lg" - {...register("email")} - placeholder={t`Email`} - required - type="email" - /> - Password} - size="lg" - {...register("password")} - placeholder={t`Password`} - required - /> -
- - - Forgot your password? - - -
+ + + {error && !otpRequired && {error}} + + {otpRequired ? ( + + + Authenticator code + + { + setOtpValue(value); + setValue("otp", value); + }} + onComplete={(value) => { + setOtpValue(value); + setValue("otp", value); + const { email, password } = getValues(); + void submitLogin({ + email, + password, + otp: value, + }); + }} + inputMode="numeric" + name="otp" + /> + {error && ( + + {error} + + )} + + + Open your authenticator app and enter the current + six-digit code. + + + + ) : ( + <> + Email} + size="lg" + {...register("email")} + placeholder={t`Email`} + required + type="email" + /> + Password} + size="lg" + {...register("password")} + placeholder={t`Password`} + required + /> + + )} + {!otpRequired && ( +
+ + + Forgot your password? + + +
+ )}
@@ -179,7 +291,7 @@ export const LoginRoute = () => { - + {/* {providerQuery.data?.find( (provider) => provider.name === "google", ) && ( @@ -189,17 +301,7 @@ export const LoginRoute = () => { label={t`Sign in with Google`} /> )} - - - {/* {providerQuery.data?.find( - (provider) => provider.name === "outseta", - ) && ( - } - label="Login" - /> - )} */} + */}
diff --git a/echo/frontend/src/routes/settings/UserSettingsRoute.tsx b/echo/frontend/src/routes/settings/UserSettingsRoute.tsx new file mode 100644 index 00000000..e1ded6f5 --- /dev/null +++ b/echo/frontend/src/routes/settings/UserSettingsRoute.tsx @@ -0,0 +1,52 @@ +import { t } from "@lingui/core/macro"; +import { Trans } from "@lingui/react/macro"; +import { + ActionIcon, + Container, + Group, + Stack, + Text, + Title, + Divider, +} from "@mantine/core"; +import { useDocumentTitle } from "@mantine/hooks"; +import { IconShieldLock, IconArrowLeft } from "@tabler/icons-react"; +import { useCurrentUser } from "@/components/auth/hooks"; +import { TwoFactorSettingsCard } from "@/components/settings/TwoFactorSettingsCard"; +import { useI18nNavigate } from "@/hooks/useI18nNavigate"; + +export const UserSettingsRoute = () => { + useDocumentTitle(t`Settings | Dembrane`); + const { data: user, isLoading } = useCurrentUser(); + const navigate = useI18nNavigate(); + + const isTwoFactorEnabled = Boolean(user?.tfa_secret); + + return ( + + + + navigate(-1)} + aria-label={t`Go back`} + > + + + + <Trans>Settings</Trans> + + + + + + + + + + ); +}; diff --git a/echo/server/dembrane/processing_status_utils.py b/echo/server/dembrane/processing_status_utils.py index 522aeb61..103f456a 100644 --- a/echo/server/dembrane/processing_status_utils.py +++ b/echo/server/dembrane/processing_status_utils.py @@ -28,19 +28,23 @@ def add_processing_status( ) -> int: logger.info(f"{event} {message} - {duration_ms}") with directus_client_context() as client: - return client.create_item( - "processing_status", - { - "conversation_id": conversation_id, - "conversation_chunk_id": conversation_chunk_id, - "project_id": project_id, - "project_analysis_run_id": project_analysis_run_id, - "event": event, - "message": message, - "duration_ms": duration_ms, - "parent_id": parent_id, - }, - )["data"]["id"] + # add to DB only if + if duration_ms is not None or (event and "error" in event): + return client.create_item( + "processing_status", + { + "conversation_id": conversation_id, + "conversation_chunk_id": conversation_chunk_id, + "project_id": project_id, + "project_analysis_run_id": project_analysis_run_id, + "event": event, + "message": message, + "duration_ms": duration_ms, + "parent_id": parent_id, + }, + )["data"]["id"] + else: + return -1 def set_error_status( diff --git a/echo/server/pyproject.toml b/echo/server/pyproject.toml index fdf69005..e8e05ee7 100644 --- a/echo/server/pyproject.toml +++ b/echo/server/pyproject.toml @@ -97,21 +97,21 @@ packages = ["dembrane"] [tool.uv] package = false -# [tool.mypy] -# plugins = 'pydantic.mypy' -# exclude = ['scripts', 'tests'] -# warn_redundant_casts = true -# warn_unused_ignores = true -# check_untyped_defs = true -# no_implicit_reexport = true -# disallow_untyped_defs = true -# ignore_missing_imports = true +[tool.mypy] +plugins = 'pydantic.mypy' +exclude = ['scripts', 'tests'] +warn_redundant_casts = true +warn_unused_ignores = true +check_untyped_defs = true +no_implicit_reexport = true +disallow_untyped_defs = true +ignore_missing_imports = true -# [tool.pydantic-mypy] -# init_forbid_extra = true -# init_typed = true -# warn_required_dynamic_aliases = true -# warn_untyped_fields = true +[tool.pydantic-mypy] +init_forbid_extra = true +init_typed = true +warn_required_dynamic_aliases = true +warn_untyped_fields = true [tool.pytest.ini_options] testpaths = ["tests", "dembrane"] From 46a843e464e029f865a452a850582ab2b4ed3ff7 Mon Sep 17 00:00:00 2001 From: Usama Date: Fri, 7 Nov 2025 07:02:49 +0000 Subject: [PATCH 04/23] - refactor participant portal to separate code, child components, add routes to it - add verify feature to the participant portal ________________ Ai generated message below: - Added new components for verification process including `Verify`, `VerifySelection`, `VerifyArtefact`, and `VerifyInstructions`. - Introduced `VerifiedArtefactsList` and `VerifiedArtefactItem` for displaying approved artefacts. - Integrated `ParticipantConversationAudioContent` to handle audio conversation verification. - Updated routing in `Router.tsx` to accommodate new verification paths. - Refactored `ParticipantLayout` to include `ParticipantHeader` for better UI structure. - Enhanced API with mock implementation for generating verification artefacts. - Updated project schema to include verification settings and topics. --- echo/frontend/src/Router.tsx | 24 + .../components/layout/ParticipantHeader.tsx | 73 +++ .../components/layout/ParticipantLayout.tsx | 43 +- .../participant/ConversationErrorView.tsx | 68 ++ .../participant/ParticipantBody.tsx | 8 +- .../ParticipantConversationAudio.tsx | 604 ++++++------------ .../ParticipantConversationAudioContent.tsx | 44 ++ .../participant/ParticipantEchoMessages.tsx | 58 ++ .../participant/PermissionErrorModal.tsx | 78 +++ .../StopRecordingConfirmationModal.tsx | 79 +++ .../participant/verify/ArtefactModal.tsx | 41 ++ .../verify/VerifiedArtefactItem.tsx | 52 ++ .../verify/VerifiedArtefactsList.tsx | 71 ++ .../components/participant/verify/Verify.tsx | 5 + .../participant/verify/VerifyArtefact.tsx | 379 +++++++++++ .../participant/verify/VerifyInstructions.tsx | 111 ++++ .../participant/verify/VerifySelection.tsx | 109 ++++ .../participant/verify/hooks/index.ts | 92 +++ .../project/ProjectPortalEditor.tsx | 116 ++++ echo/frontend/src/lib/api.ts | 58 ++ echo/frontend/src/lib/typesDirectus.d.ts | 14 + .../src/routes/project/ProjectRoutes.tsx | 2 + echo/server/dembrane/api/participant.py | 144 ++--- echo/server/dembrane/service/project.py | 2 + 24 files changed, 1728 insertions(+), 547 deletions(-) create mode 100644 echo/frontend/src/components/layout/ParticipantHeader.tsx create mode 100644 echo/frontend/src/components/participant/ConversationErrorView.tsx create mode 100644 echo/frontend/src/components/participant/ParticipantConversationAudioContent.tsx create mode 100644 echo/frontend/src/components/participant/ParticipantEchoMessages.tsx create mode 100644 echo/frontend/src/components/participant/PermissionErrorModal.tsx create mode 100644 echo/frontend/src/components/participant/StopRecordingConfirmationModal.tsx create mode 100644 echo/frontend/src/components/participant/verify/ArtefactModal.tsx create mode 100644 echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx create mode 100644 echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx create mode 100644 echo/frontend/src/components/participant/verify/Verify.tsx create mode 100644 echo/frontend/src/components/participant/verify/VerifyArtefact.tsx create mode 100644 echo/frontend/src/components/participant/verify/VerifyInstructions.tsx create mode 100644 echo/frontend/src/components/participant/verify/VerifySelection.tsx create mode 100644 echo/frontend/src/components/participant/verify/hooks/index.ts diff --git a/echo/frontend/src/Router.tsx b/echo/frontend/src/Router.tsx index 2c2f27ce..e88f3a99 100644 --- a/echo/frontend/src/Router.tsx +++ b/echo/frontend/src/Router.tsx @@ -14,6 +14,10 @@ import { ProjectConversationLayout } from "./components/layout/ProjectConversati import { ProjectLayout } from "./components/layout/ProjectLayout"; import { ProjectLibraryLayout } from "./components/layout/ProjectLibraryLayout"; import { ProjectOverviewLayout } from "./components/layout/ProjectOverviewLayout"; +import { ParticipantConversationAudioContent } from "./components/participant/ParticipantConversationAudioContent"; +import { Verify } from "./components/participant/verify/Verify"; +import { VerifyArtefact } from "./components/participant/verify/VerifyArtefact"; +import { VerifySelection } from "./components/participant/verify/VerifySelection"; import { ParticipantConversationAudioRoute, ParticipantConversationTextRoute, @@ -267,6 +271,26 @@ export const participantRouter = createBrowserRouter([ path: "start", }, { + children: [ + { + element: , + index: true, + }, + { + children: [ + { + element: , + index: true, + }, + { + element: , + path: "approve", + }, + ], + element: , + path: "verify", + }, + ], element: , path: "conversation/:conversationId", }, diff --git a/echo/frontend/src/components/layout/ParticipantHeader.tsx b/echo/frontend/src/components/layout/ParticipantHeader.tsx new file mode 100644 index 00000000..85bfd358 --- /dev/null +++ b/echo/frontend/src/components/layout/ParticipantHeader.tsx @@ -0,0 +1,73 @@ +import { t } from "@lingui/core/macro"; +import { Trans } from "@lingui/react/macro"; +import { ActionIcon, Box, Button, Group } from "@mantine/core"; +import { useDisclosure } from "@mantine/hooks"; +import { IconArrowLeft, IconSettings } from "@tabler/icons-react"; +import { useLocation, useParams } from "react-router"; +import useSessionStorageState from "use-session-storage-state"; +import { useI18nNavigate } from "@/hooks/useI18nNavigate"; +import { Logo } from "../common/Logo"; +import { ParticipantSettingsModal } from "../participant/ParticipantSettingsModal"; + +export const ParticipantHeader = () => { + const [loadingFinished] = useSessionStorageState("loadingFinished", { + defaultValue: true, + }); + const { pathname } = useLocation(); + const { projectId, conversationId } = useParams(); + const navigate = useI18nNavigate(); + const [opened, { open, close }] = useDisclosure(false); + + const showBackButton = + pathname.includes("/verify") && !pathname.includes("/verify/approve"); + const hideSettingsButton = + pathname.includes("start") || pathname.includes("finish"); + + if (!loadingFinished) { + return null; + } + + const handleBack = () => { + if (projectId && conversationId) { + navigate(`/${projectId}/conversation/${conversationId}`); + } + }; + + return ( + <> + + + {showBackButton && ( + + + + )} + + + {!hideSettingsButton && ( + + + + + + )} + + ); +}; diff --git a/echo/frontend/src/components/layout/ParticipantLayout.tsx b/echo/frontend/src/components/layout/ParticipantLayout.tsx index 220707ab..f2f964d5 100644 --- a/echo/frontend/src/components/layout/ParticipantLayout.tsx +++ b/echo/frontend/src/components/layout/ParticipantLayout.tsx @@ -1,36 +1,10 @@ -import { t } from "@lingui/core/macro"; -import { ActionIcon, Box, Group } from "@mantine/core"; -import { useDisclosure } from "@mantine/hooks"; -import { IconSettings } from "@tabler/icons-react"; import { Outlet, useLocation } from "react-router"; -import useSessionStorageState from "use-session-storage-state"; -import { Logo } from "../common/Logo"; - -import { ParticipantSettingsModal } from "../participant/ParticipantSettingsModal"; import { I18nProvider } from "./I18nProvider"; - -const ParticipantHeader = () => { - const [loadingFinished] = useSessionStorageState("loadingFinished", { - defaultValue: true, - }); - - if (!loadingFinished) { - return null; - } - - return ( - - - - ); -}; +import { ParticipantHeader } from "./ParticipantHeader"; export const ParticipantLayout = () => { const { pathname } = useLocation(); const isReportPage = pathname.includes("report"); - const hideSettingsButton = - pathname.includes("start") || pathname.includes("finish"); - const [opened, { open, close }] = useDisclosure(false); if (isReportPage) { return ( @@ -44,24 +18,9 @@ export const ParticipantLayout = () => { return ( - -
- {!hideSettingsButton && ( - - - - - - )}
diff --git a/echo/frontend/src/components/participant/ConversationErrorView.tsx b/echo/frontend/src/components/participant/ConversationErrorView.tsx new file mode 100644 index 00000000..757a494f --- /dev/null +++ b/echo/frontend/src/components/participant/ConversationErrorView.tsx @@ -0,0 +1,68 @@ +import { Trans } from "@lingui/react/macro"; +import { Button, Group, Text } from "@mantine/core"; +import { IconPlus, IconReload } from "@tabler/icons-react"; + +type ConversationErrorViewProps = { + conversationDeletedDuringRecording: boolean; + newConversationLink: string | null; +}; + +export const ConversationErrorView = ({ + conversationDeletedDuringRecording, + newConversationLink, +}: ConversationErrorViewProps) => { + return ( +
+
+ + {conversationDeletedDuringRecording ? ( + + Conversation Ended + + ) : ( + + Something went wrong + + )} + + + {conversationDeletedDuringRecording ? ( + + It looks like the conversation was deleted while you were + recording. We've stopped the recording to prevent any issues. You + can start a new one anytime. + + ) : ( + + The conversation could not be loaded. Please try again or contact + support. + + )} + + + + {newConversationLink && ( + + )} + +
+
+ ); +}; diff --git a/echo/frontend/src/components/participant/ParticipantBody.tsx b/echo/frontend/src/components/participant/ParticipantBody.tsx index 7bfd3d90..fdf1fcd5 100644 --- a/echo/frontend/src/components/participant/ParticipantBody.tsx +++ b/echo/frontend/src/components/participant/ParticipantBody.tsx @@ -30,13 +30,13 @@ export const ParticipantBody = ({ viewResponses = false, children, interleaveMessages = true, - recordingStarted = false, + isRecording = false, }: PropsWithChildren<{ projectId: string; conversationId: string; viewResponses?: boolean; interleaveMessages?: boolean; - recordingStarted?: boolean; + isRecording?: boolean; }>) => { const [ref] = useAutoAnimate(); const [chatRef] = useAutoAnimate(); @@ -119,13 +119,13 @@ export const ParticipantBody = ({ - {!recordingStarted && ( + {!isRecording && (

Welcome

)} - {recordingStarted && ( + {isRecording && (
{ENABLE_CONVERSATION_HEALTH && ( { const { projectId, conversationId } = useParams(); + const location = useLocation(); const textModeUrl = `/${projectId}/conversation/${conversationId}/text`; const finishUrl = `/${projectId}/conversation/${conversationId}/finish`; + const verifyUrl = `/${projectId}/conversation/${conversationId}/verify`; + + // Check if we're on the verify route + const isOnVerifyRoute = location.pathname.includes("/verify"); // Get device ID from cookies for audio recording const savedDeviceId = Cookies.get("micDeviceId"); const deviceId = savedDeviceId || ""; + const { iso639_1 } = useLanguage(); const projectQuery = useParticipantProjectById(projectId ?? ""); const conversationQuery = useConversationQuery(projectId, conversationId); const chunks = useConversationChunksQuery(projectId, conversationId); - const repliesQuery = useConversationRepliesQuery(conversationId); const uploadChunkMutation = useUploadConversationChunk(); + const repliesQuery = useConversationRepliesQuery(conversationId); + + // State for Echo cooldown management + const [lastReplyTime, setLastReplyTime] = useState(null); + const [remainingCooldown, setRemainingCooldown] = useState(0); + const [showCooldownMessage, setShowCooldownMessage] = useState(false); + + // useChat hook for Echo messages + const { + messages: echoMessages, + isLoading: echoIsLoading, + status: echoStatus, + error: echoError, + handleSubmit, + } = useChat({ + api: `${API_BASE_URL}/conversations/${conversationId}/get-reply`, + body: { language: iso639_1 }, + experimental_prepareRequestBody() { + return { + language: iso639_1, + }; + }, + initialMessages: + repliesQuery.data?.map((msg) => ({ + content: msg.content_text ?? "", + id: String(msg.id), + role: msg.type === "assistant_reply" ? "assistant" : "user", + })) ?? [], + onError: (error) => { + console.error("onError", error); + }, + }); const onChunk = (chunk: Blob) => { uploadChunkMutation.mutate({ @@ -85,11 +117,6 @@ export const ParticipantConversationAudio = () => { threshold: 0.1, }); - const [troubleShootingGuideOpened, setTroubleShootingGuideOpened] = - useState(false); - const [lastReplyTime, setLastReplyTime] = useState(null); - const [remainingCooldown, setRemainingCooldown] = useState(0); - const [showCooldownMessage, setShowCooldownMessage] = useState(false); const [ conversationDeletedDuringRecording, setConversationDeletedDuringRecording, @@ -100,35 +127,8 @@ export const ParticipantConversationAudio = () => { const [opened, { open, close }] = useDisclosure(false); // Navigation and language const navigate = useI18nNavigate(); - const { iso639_1 } = useLanguage(); const newConversationLink = useProjectSharingLink(projectQuery.data); - // Calculate remaining cooldown time - const getRemainingCooldown = useCallback(() => { - if (!lastReplyTime) return 0; - const cooldownSeconds = DEFAULT_REPLY_COOLDOWN; - const elapsedSeconds = Math.floor( - (new Date().getTime() - lastReplyTime.getTime()) / 1000, - ); - return Math.max(0, cooldownSeconds - elapsedSeconds); - }, [lastReplyTime]); - - // Update cooldown timer - useEffect(() => { - if (!lastReplyTime) return; - - const interval = setInterval(() => { - const remaining = getRemainingCooldown(); - setRemainingCooldown(remaining); - - if (remaining <= 0) { - clearInterval(interval); - } - }, 1000); - - return () => clearInterval(interval); - }, [lastReplyTime, getRemainingCooldown]); - const audioRecorder = useChunkedAudioRecorder({ deviceId, onChunk }); useWakeLock({ obtainWakeLockOnMount: true }); @@ -157,6 +157,32 @@ export const ParticipantConversationAudio = () => { useWindowEvent("microphoneDeviceChanged", handleMicrophoneDeviceChanged); + // Calculate remaining cooldown time + const getRemainingCooldown = useCallback(() => { + if (!lastReplyTime) return 0; + const cooldownSeconds = DEFAULT_REPLY_COOLDOWN; + const elapsedSeconds = Math.floor( + (Date.now() - lastReplyTime.getTime()) / 1000, + ); + return Math.max(0, cooldownSeconds - elapsedSeconds); + }, [lastReplyTime]); + + // Update cooldown timer + useEffect(() => { + if (!lastReplyTime) return; + + const interval = setInterval(() => { + const remaining = getRemainingCooldown(); + setRemainingCooldown(remaining); + + if (remaining <= 0) { + clearInterval(interval); + } + }, 1000); + + return () => clearInterval(interval); + }, [lastReplyTime, getRemainingCooldown]); + // Monitor conversation status during recording - handle deletion mid-recording useEffect(() => { if (!isRecording) return; @@ -195,43 +221,34 @@ export const ParticipantConversationAudio = () => { stopRecording, ]); - const { - messages: echoMessages, - isLoading, - status, - error, - handleSubmit, - } = useChat({ - api: `${API_BASE_URL}/conversations/${conversationId}/get-reply`, - body: { language: iso639_1 }, - experimental_prepareRequestBody() { - return { - language: iso639_1, - }; - }, - initialMessages: - repliesQuery.data?.map((msg) => ({ - content: msg.content_text ?? "", - id: String(msg.id), - role: msg.type === "assistant_reply" ? "assistant" : "user", - })) ?? [], - onError: (error) => { - console.error("onError", error); - }, - }); - // Handlers - const handleCheckMicrophoneAccess = async () => { - const permissionError = await checkPermissionError(); - if (["granted", "prompt"].includes(permissionError ?? "")) { - window.location.reload(); - } else { - alert( - t`Microphone access is still denied. Please check your settings and try again.`, - ); + const handleStopRecording = () => { + if (isRecording) { + pauseRecording(); + open(); } }; + const handleConfirmFinish = async () => { + setIsStopping(true); + try { + stopRecording(); + await finishConversation(conversationId ?? ""); + close(); + navigate(finishUrl); + } catch (error) { + console.error("Error finishing conversation:", error); + toast.error(t`Failed to finish conversation. Please try again.`); + setIsStopping(false); + } + }; + + const handleSwitchToText = () => { + stopRecording(); + close(); + navigate(textModeUrl); + }; + const handleReply = async (e: React.MouseEvent) => { const remaining = getRemainingCooldown(); if (remaining > 0) { @@ -269,27 +286,6 @@ export const ParticipantConversationAudio = () => { } }; - const handleStopRecording = () => { - if (isRecording) { - pauseRecording(); - open(); - } - }; - - const handleConfirmFinish = async () => { - setIsStopping(true); - try { - stopRecording(); - await finishConversation(conversationId ?? ""); - close(); - navigate(finishUrl); - } catch (error) { - console.error("Error finishing conversation:", error); - toast.error(t`Failed to finish conversation. Please try again.`); - setIsStopping(false); - } - }; - if (conversationQuery.isLoading || projectQuery.isLoading) { return ; } @@ -301,206 +297,38 @@ export const ParticipantConversationAudio = () => { conversationDeletedDuringRecording ) { return ( -
-
- - {conversationDeletedDuringRecording ? ( - - Conversation Ended - - ) : ( - - Something went wrong - - )} - - - {conversationDeletedDuringRecording ? ( - - It looks like the conversation was deleted while you were - recording. We've stopped the recording to prevent any issues. - You can start a new one anytime. - - ) : ( - - The conversation could not be loaded. Please try again or - contact support. - - )} - - - - {newConversationLink && ( - - )} - -
-
+ ); } return ( -
+ {/* modal for permissions error */} - true} - centered - fullScreen - radius={0} - transitionProps={{ duration: 200, transition: "fade" }} - withCloseButton={false} - > -
- -
- - Oops! It looks like microphone access was denied. No worries, - though! We've got a handy troubleshooting guide for you. Feel - free to check it out. Once you've resolved the issue, come back - and visit this page again to check if your microphone is ready. - -
- - - - -
-
-
+ {/* modal for stop recording confirmation */} - {} : close} - closeOnClickOutside={!isStopping} - closeOnEscape={!isStopping} - centered - title={ - - Finish Conversation - - } - size="sm" - radius="md" - padding="xl" - > - - - - Are you sure you want to finish the conversation? - - - - - - - - - - - {projectQuery.data && conversationQuery.data && ( - - )} - - - {echoMessages && echoMessages.length > 0 && ( - <> - {echoMessages.map((message, index) => ( - - ))} - {status !== "streaming" && status !== "ready" && !error && ( - - )} - - )} - - {error && } - + close={close} + isStopping={isStopping} + handleConfirmFinish={handleConfirmFinish} + handleResume={resumeRecording} + handleSwitchToText={handleSwitchToText} + /> + + +
@@ -511,9 +339,9 @@ export const ParticipantConversationAudio = () => { > { /> - {/* Recording time indicator */} - {isRecording && ( -
- - {isPaused ? ( - - ) : ( -
- )} - - {Math.floor(recordingTime / 3600) > 0 && ( - <> - {Math.floor(recordingTime / 3600) - .toString() - .padStart(2, "0")} - : - + + {/* Recording time indicator */} + {isRecording && ( +
+ + {isPaused ? ( + + ) : ( +
)} - {Math.floor((recordingTime % 3600) / 60) - .toString() - .padStart(2, "0")} - :{(recordingTime % 60).toString().padStart(2, "0")} - - -
- )} + + {Math.floor(recordingTime / 3600) > 0 && ( + <> + {Math.floor(recordingTime / 3600) + .toString() + .padStart(2, "0")} + : + + )} + {Math.floor((recordingTime % 3600) / 60) + .toString() + .padStart(2, "0")} + :{(recordingTime % 60).toString().padStart(2, "0")} + +
+
+ )} - {!isRecording && ( - {chunks?.data && - chunks.data.length > 0 && - !!projectQuery.data?.is_get_reply_enabled && ( - - - - )} - - +
+ ) : ( + ECHO + )} + + )} + {recordingTime >= 60 && + !isOnVerifyRoute && + projectQuery.data?.is_verify_enabled && ( + )} - - {isPaused ? ( - - ) : ( - - )} - - + )} )} -
+ ); }; diff --git a/echo/frontend/src/components/participant/ParticipantConversationAudioContent.tsx b/echo/frontend/src/components/participant/ParticipantConversationAudioContent.tsx new file mode 100644 index 00000000..1bc2d9bd --- /dev/null +++ b/echo/frontend/src/components/participant/ParticipantConversationAudioContent.tsx @@ -0,0 +1,44 @@ +import type { Message } from "@ai-sdk/react"; +import { useOutletContext, useParams } from "react-router"; +import { useConversationQuery, useParticipantProjectById } from "./hooks"; +import { ParticipantBody } from "./ParticipantBody"; +import { ParticipantEchoMessages } from "./ParticipantEchoMessages"; +import { VerifiedArtefactsList } from "./verify/VerifiedArtefactsList"; + +type OutletContextType = { + isRecording: boolean; + echoMessages: Message[]; + echoIsLoading: boolean; + echoStatus: string; + echoError: Error | undefined; +}; + +export const ParticipantConversationAudioContent = () => { + const { projectId, conversationId } = useParams(); + const { isRecording, echoMessages, echoIsLoading, echoStatus, echoError } = + useOutletContext(); + const projectQuery = useParticipantProjectById(projectId ?? ""); + const conversationQuery = useConversationQuery(projectId, conversationId); + + return ( + <> + {projectQuery.data && conversationQuery.data && ( + + )} + + + + + + ); +}; diff --git a/echo/frontend/src/components/participant/ParticipantEchoMessages.tsx b/echo/frontend/src/components/participant/ParticipantEchoMessages.tsx new file mode 100644 index 00000000..6189b6a9 --- /dev/null +++ b/echo/frontend/src/components/participant/ParticipantEchoMessages.tsx @@ -0,0 +1,58 @@ +import type { Message } from "@ai-sdk/react"; +import { t } from "@lingui/core/macro"; +import { Stack } from "@mantine/core"; +import { EchoErrorAlert } from "./EchoErrorAlert"; +import SpikeMessage from "./SpikeMessage"; + +type ParticipantEchoMessagesProps = { + echoMessages: Message[]; + isLoading: boolean; + status: string; + error: Error | undefined; +}; + +export const ParticipantEchoMessages = ({ + echoMessages, + isLoading, + status, + error, +}: ParticipantEchoMessagesProps) => { + return ( + + {echoMessages && echoMessages.length > 0 && ( + <> + {echoMessages.map((message, index) => ( + + ))} + {status !== "streaming" && status !== "ready" && !error && ( + + )} + + )} + + {error && } + + ); +}; diff --git a/echo/frontend/src/components/participant/PermissionErrorModal.tsx b/echo/frontend/src/components/participant/PermissionErrorModal.tsx new file mode 100644 index 00000000..1a3e9154 --- /dev/null +++ b/echo/frontend/src/components/participant/PermissionErrorModal.tsx @@ -0,0 +1,78 @@ +import { t } from "@lingui/core/macro"; +import { Trans } from "@lingui/react/macro"; +import { Button, Divider, Modal, Stack } from "@mantine/core"; +import { IconQuestionMark, IconReload } from "@tabler/icons-react"; +import { useState } from "react"; +import { checkPermissionError } from "@/lib/utils"; + +type PermissionErrorModalProps = { + permissionError: string | null | undefined; +}; + +export const PermissionErrorModal = ({ + permissionError, +}: PermissionErrorModalProps) => { + const [troubleShootingGuideOpened, setTroubleShootingGuideOpened] = + useState(false); + + const handleCheckMicrophoneAccess = async () => { + const permissionState = await checkPermissionError(); + if (["granted", "prompt"].includes(permissionState ?? "")) { + window.location.reload(); + } else { + alert( + t`Microphone access is still denied. Please check your settings and try again.`, + ); + } + }; + + return ( + true} + centered + fullScreen + radius={0} + transitionProps={{ duration: 200, transition: "fade" }} + withCloseButton={false} + > +
+ +
+ + Oops! It looks like microphone access was denied. No worries, + though! We've got a handy troubleshooting guide for you. Feel free + to check it out. Once you've resolved the issue, come back and + visit this page again to check if your microphone is ready. + +
+ + + + +
+
+
+ ); +}; diff --git a/echo/frontend/src/components/participant/StopRecordingConfirmationModal.tsx b/echo/frontend/src/components/participant/StopRecordingConfirmationModal.tsx new file mode 100644 index 00000000..47735cf7 --- /dev/null +++ b/echo/frontend/src/components/participant/StopRecordingConfirmationModal.tsx @@ -0,0 +1,79 @@ +import { Trans } from "@lingui/react/macro"; +import { Anchor, Button, Group, Modal, Stack, Text } from "@mantine/core"; + +type StopRecordingConfirmationModalProps = { + opened: boolean; + close: () => void; + isStopping: boolean; + handleConfirmFinish: () => void; + handleResume: () => void; + handleSwitchToText: () => void; +}; + +export const StopRecordingConfirmationModal = ({ + opened, + close, + isStopping, + handleConfirmFinish, + handleResume, + handleSwitchToText, +}: StopRecordingConfirmationModalProps) => { + const handleClose = () => { + handleResume(); + close(); + }; + + return ( + {} : handleClose} + closeOnClickOutside={!isStopping} + closeOnEscape={!isStopping} + centered + title={ + + Recording Paused + + } + size="sm" + radius="md" + padding="xl" + > + + + + + + + Switch to text input + + + + ); +}; diff --git a/echo/frontend/src/components/participant/verify/ArtefactModal.tsx b/echo/frontend/src/components/participant/verify/ArtefactModal.tsx new file mode 100644 index 00000000..0c11ba37 --- /dev/null +++ b/echo/frontend/src/components/participant/verify/ArtefactModal.tsx @@ -0,0 +1,41 @@ +import { Box, LoadingOverlay, Modal, ScrollArea } from "@mantine/core"; +import { Markdown } from "../../common/Markdown"; + +type ArtefactModalProps = { + opened: boolean; + onClose: () => void; + onExited?: () => void; + artefact?: { + id: string; + content: string | null | undefined; + conversation_id: string | Conversation | null | undefined; + approved_at: string | null | undefined; + } | null; + isLoading?: boolean; +}; + +export const ArtefactModal = ({ + opened, + onClose, + onExited, + artefact, + isLoading = false, +}: ArtefactModalProps) => { + return ( + + + + + + + ); +}; diff --git a/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx b/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx new file mode 100644 index 00000000..c6f7cc3d --- /dev/null +++ b/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx @@ -0,0 +1,52 @@ +import { t } from "@lingui/core/macro"; +import { ActionIcon, Box, Group, Paper, Text } from "@mantine/core"; +import { IconRosetteDiscountCheckFilled } from "@tabler/icons-react"; +import { format } from "date-fns"; +import { VERIFY_OPTIONS } from "./VerifySelection"; + +type VerifiedArtefactItemProps = { + artefact: ConversationArtefact; + onViewArtefact: (artefactId: string) => void; +}; + +export const VerifiedArtefactItem = ({ + artefact, + onViewArtefact, +}: VerifiedArtefactItemProps) => { + // Get the label from the key + const option = VERIFY_OPTIONS.find((opt) => opt.key === artefact.key); + const label = option?.label || artefact.key; + + // Format the timestamp using date-fns + const formattedDate = artefact.approved_at + ? format(new Date(artefact.approved_at), "h:mm a") + : ""; + + return ( + + onViewArtefact(artefact.id)} + > + + + + + + {label} + {formattedDate && ( + + {formattedDate} + + )} + + + + + ); +}; diff --git a/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx b/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx new file mode 100644 index 00000000..b84500e6 --- /dev/null +++ b/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx @@ -0,0 +1,71 @@ +import { Box, Skeleton, Stack } from "@mantine/core"; +import { useDisclosure } from "@mantine/hooks"; +import { useState } from "react"; +import { ArtefactModal } from "./ArtefactModal"; +import { useConversationArtefact, useConversationArtefacts } from "./hooks"; +import { VerifiedArtefactItem } from "./VerifiedArtefactItem"; + +type VerifiedArtefactsListProps = { + conversationId: string; +}; + +export const VerifiedArtefactsList = ({ + conversationId, +}: VerifiedArtefactsListProps) => { + const { data: artefacts, isLoading } = + useConversationArtefacts(conversationId); + const [opened, { open, close }] = useDisclosure(false); + const [selectedArtefactId, setSelectedArtefactId] = useState( + null, + ); + + // Fetch the full artefact content when one is selected + const { data: selectedArtefact, isLoading: isLoadingArtefact } = + useConversationArtefact(selectedArtefactId ?? undefined); + + const handleViewArtefact = (artefactId: string) => { + setSelectedArtefactId(artefactId); + open(); + }; + + const handleCloseModal = () => { + close(); + }; + + const handleModalExited = () => { + setSelectedArtefactId(null); + }; + + if (isLoading) { + return ( + + + + ); + } + + if (!artefacts || artefacts.length === 0) { + return null; + } + + return ( + <> + + {artefacts.map((artefact: ConversationArtefact) => ( + + ))} + + + + ); +}; diff --git a/echo/frontend/src/components/participant/verify/Verify.tsx b/echo/frontend/src/components/participant/verify/Verify.tsx new file mode 100644 index 00000000..2bafa334 --- /dev/null +++ b/echo/frontend/src/components/participant/verify/Verify.tsx @@ -0,0 +1,5 @@ +import { Outlet } from "react-router"; + +export const Verify = () => { + return ; +}; diff --git a/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx b/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx new file mode 100644 index 00000000..ff401939 --- /dev/null +++ b/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx @@ -0,0 +1,379 @@ +import { t } from "@lingui/core/macro"; +import { Trans } from "@lingui/react/macro"; +import { + ActionIcon, + Button, + Group, + Paper, + ScrollArea, + Stack, + Text, + Title, +} from "@mantine/core"; +import { IconPencil, IconPlayerPause, IconVolume } from "@tabler/icons-react"; +import { memo, useEffect, useRef, useState } from "react"; +import { useParams, useSearchParams } from "react-router"; +import { useI18nNavigate } from "@/hooks/useI18nNavigate"; +import { Logo } from "../../common/Logo"; +import { Markdown } from "../../common/Markdown"; +import { MarkdownWYSIWYG } from "../../form/MarkdownWYSIWYG/MarkdownWYSIWYG"; +import { + useGenerateVerificationArtefact, + useSaveVerificationArtefact, +} from "./hooks"; +import { VerifyInstructions } from "./VerifyInstructions"; +import { VERIFY_OPTIONS } from "./VerifySelection"; + +const MemoizedMarkdownWYSIWYG = memo(MarkdownWYSIWYG); + +export const VerifyArtefact = () => { + const { projectId, conversationId } = useParams(); + const navigate = useI18nNavigate(); + const [searchParams] = useSearchParams(); + const saveArtefactMutation = useSaveVerificationArtefact(); + const generateArtefactMutation = useGenerateVerificationArtefact(); + + // Get selected option from URL params + const selectedOptionKey = searchParams.get("key"); + + // States + const [showInstructions, setShowInstructions] = useState(true); + const [isApproving, setIsApproving] = useState(false); + const [isRevising, setIsRevising] = useState(false); + const [artefactContent, setArtefactContent] = useState(""); + const [hasGenerated, setHasGenerated] = useState(false); + const [isEditing, setIsEditing] = useState(false); + const [editedContent, setEditedContent] = useState(""); + const [readAloudUrl, setReadAloudUrl] = useState(""); + const [isPlaying, setIsPlaying] = useState(false); + const [lastReviseTime, setLastReviseTime] = useState(null); + const [reviseTimeRemaining, setReviseTimeRemaining] = useState(0); + + // Ref for audio element + const audioRef = useRef(null); + const reviseTimerRef = useRef(null); + + const selectedOption = VERIFY_OPTIONS.find( + (opt) => opt.key === selectedOptionKey, + ); + const selectedOptionLabel = selectedOption?.label || t`verified`; + + // Redirect back if no selected option key + useEffect(() => { + if (!selectedOptionKey) { + navigate(`/${projectId}/conversation/${conversationId}/verify`, { + replace: true, + }); + } + }, [selectedOptionKey, navigate, projectId, conversationId]); + + // biome-ignore lint/correctness/useExhaustiveDependencies: we want to regenerate the artefact if the user clicks the next button + useEffect(() => { + if (!selectedOptionKey || !conversationId || hasGenerated) return; + + const generateArtefact = async () => { + try { + setHasGenerated(true); + const response = await generateArtefactMutation.mutateAsync({ + conversationId, + topicList: [selectedOptionKey], // only one for now + }); + + // Get the first artifact from the response + if (response && response.length > 0) { + const artifact = response[0]; + setArtefactContent(artifact.content); + // Set read aloud URL from API response + setReadAloudUrl(artifact.read_aloud_stream_url || ""); + } + } catch (error) { + console.error("Failed to generate artifact:", error); + setHasGenerated(false); // Reset on error so user can retry + } + }; + + generateArtefact(); + }, [selectedOptionKey, conversationId, hasGenerated]); + + const handleNextFromInstructions = () => { + setShowInstructions(false); + }; + + const handleApprove = async () => { + if (!conversationId || !selectedOptionKey || !artefactContent) return; + + setIsApproving(true); + try { + await saveArtefactMutation.mutateAsync({ + artefactContent, + conversationId, + key: selectedOptionKey, + }); + + // Navigate back to conversation + const conversationUrl = `/${projectId}/conversation/${conversationId}`; + navigate(conversationUrl); + } finally { + setIsApproving(false); + } + }; + + const handleRevise = async () => { + if (!conversationId || !selectedOptionKey) return; + setIsRevising(true); + setLastReviseTime(Date.now()); // Start cooldown timer + try { + // Mock API call to revise artefact (3 seconds) + await new Promise((resolve) => setTimeout(resolve, 3000)); + + const response = await generateArtefactMutation.mutateAsync({ + conversationId: conversationId, + topicList: [selectedOptionKey], // only one for now + }); + + // Get the first artifact from the response + if (response && response.length > 0) { + const artifact = response[0]; + setArtefactContent(artifact.content); + // Set read aloud URL from API response + setReadAloudUrl(artifact.read_aloud_stream_url || ""); + } + } finally { + setIsRevising(false); + } + }; + + const handleEdit = () => { + setEditedContent(artefactContent); + setIsEditing(true); + }; + + const handleCancelEdit = () => { + setIsEditing(false); + setEditedContent(""); + }; + + const handleSaveEdit = () => { + if (!editedContent) return; + + // Update the artefact content with edited content + setArtefactContent(editedContent); + // Exit edit mode to show Revise/Approve buttons + setIsEditing(false); + setEditedContent(""); + }; + + const handleReadAloud = () => { + if (!audioRef.current) { + audioRef.current = new Audio(readAloudUrl); + audioRef.current.addEventListener("ended", () => { + setIsPlaying(false); + }); + } + + if (isPlaying) { + audioRef.current.pause(); + setIsPlaying(false); + } else { + audioRef.current.play(); + setIsPlaying(true); + } + }; + + // Cooldown timer for revise button (2 minutes) + useEffect(() => { + if (lastReviseTime === null) return; + + const COOLDOWN_MS = 2 * 60 * 1000; // 2 minutes in milliseconds + + const updateTimer = () => { + const now = Date.now(); + const elapsed = now - lastReviseTime; + const remaining = Math.max(0, COOLDOWN_MS - elapsed); + + setReviseTimeRemaining(remaining); + + if (remaining === 0) { + if (reviseTimerRef.current) { + clearInterval(reviseTimerRef.current); + reviseTimerRef.current = null; + } + } + }; + + // Update immediately + updateTimer(); + + // Update every second + reviseTimerRef.current = setInterval(updateTimer, 1000); + + return () => { + if (reviseTimerRef.current) { + clearInterval(reviseTimerRef.current); + reviseTimerRef.current = null; + } + }; + }, [lastReviseTime]); + + // Cleanup audio on unmount + useEffect(() => { + return () => { + if (audioRef.current) { + audioRef.current.pause(); + audioRef.current = null; + } + }; + }, []); + + // step 1: show instructions while generating response from api + if (showInstructions) { + return ( + + ); + } + + // step 2: show artefact with revise/approve once user clicks next on step 1 + return ( + + + + {isRevising ? ( + +
+ +
+ + + + Regenerating the artefact + + + + + This would just take a few moments + + + +
+ ) : ( + + {/* Title with Read Aloud Button */} + + + <Trans id="participant.verify.artefact.title"> + Artefact: {selectedOptionLabel} + </Trans> + + {readAloudUrl && ( + + {isPlaying ? ( + + ) : ( + + )} + + )} + + + {/* Markdown Content or Editor */} + {isEditing ? ( + + ) : ( +
+ +
+ )} +
+ )} +
+
+ + {/* Action buttons */} + + {isEditing ? ( + <> + + + + ) : ( + <> + + + + + + + + )} + +
+ ); +}; diff --git a/echo/frontend/src/components/participant/verify/VerifyInstructions.tsx b/echo/frontend/src/components/participant/verify/VerifyInstructions.tsx new file mode 100644 index 00000000..3ed79acf --- /dev/null +++ b/echo/frontend/src/components/participant/verify/VerifyInstructions.tsx @@ -0,0 +1,111 @@ +import { Trans } from "@lingui/react/macro"; +import { Box, Button, Group, Loader, Stack, Text } from "@mantine/core"; +import { IconArrowRight } from "@tabler/icons-react"; + +type VerifyInstructionsProps = { + objectLabel: string; + isLoading?: boolean; + onNext: () => void; + buttonText?: string; +}; + +const INSTRUCTIONS = [ + { + key: "receive-artefact", + render: (objectLabel: string) => ( + + You'll soon get {objectLabel} to verify. + + ), + }, + { + key: "read-aloud", + render: (objectLabel: string) => ( + + Once you receive the {objectLabel}, read it aloud and share out loud + what you want to change, if anything. + + ), + }, + { + key: "revise-artefact", + render: (objectLabel: string) => ( + + Once you have discussed, hit "revise" to see the {objectLabel} change to + reflect your discussion. + + ), + }, + { + key: "approve-artefact", + render: (objectLabel: string) => ( + + If you are happy with the {objectLabel} click "Approve" to show you feel + heard. + + ), + }, + { + key: "approval-helps", + render: (_objectLabel: string) => ( + + Your approval helps us understand what you really think! + + ), + }, +]; +export const VerifyInstructions = ({ + objectLabel, + isLoading = false, + onNext, +}: VerifyInstructionsProps) => { + return ( + + + {INSTRUCTIONS.map((instruction, index) => ( + + + + {index + 1} + + + + {instruction.render(objectLabel)} + + + ))} + + + {/* Next button */} + + + ); +}; diff --git a/echo/frontend/src/components/participant/verify/VerifySelection.tsx b/echo/frontend/src/components/participant/verify/VerifySelection.tsx new file mode 100644 index 00000000..7f71e8d8 --- /dev/null +++ b/echo/frontend/src/components/participant/verify/VerifySelection.tsx @@ -0,0 +1,109 @@ +import { t } from "@lingui/core/macro"; +import { Trans } from "@lingui/react/macro"; +import { Box, Button, Group, Stack, Title } from "@mantine/core"; +import { IconArrowRight } from "@tabler/icons-react"; +import { useState } from "react"; +import { useParams } from "react-router"; +import { useI18nNavigate } from "@/hooks/useI18nNavigate"; +import { useParticipantProjectById } from "../hooks"; + +// Verify options that match the verification_topics field +export const VERIFY_OPTIONS = [ + { + icon: "✅", + key: "agreements", + label: t`What we actually agreed on`, + }, + { + icon: "🔍", + key: "gems", + label: t`Hidden gems`, + }, + { + icon: "👀", + key: "truths", + label: t`Painful truths`, + }, + { + icon: "🚀", + key: "moments", + label: t`Breakthrough moments`, + }, + { + icon: "↗️", + key: "actions", + label: t`What we think should happen`, + }, + { + icon: "⚠️", + key: "disagreements", + label: t`Moments we agreed to disagree`, + }, +]; + +export const VerifySelection = () => { + const { projectId, conversationId } = useParams(); + const navigate = useI18nNavigate(); + const [selectedOption, setSelectedOption] = useState(null); + const projectQuery = useParticipantProjectById(projectId ?? ""); + + // Filter options based on enabled topics + const enabledTopics = projectQuery.data?.verification_topics ?? []; + const availableOptions = VERIFY_OPTIONS.filter((option) => + enabledTopics.includes(option.key), + ); + + const handleNext = () => { + if (!selectedOption) return; + + // Navigate directly to approve route with URL param + navigate( + `/${projectId}/conversation/${conversationId}/verify/approve?key=${selectedOption}`, + ); + }; + + return ( + + {/* Main content */} + + + <Trans id="participant.verify.selection.title"> + What do you want to verify? + </Trans> + + + {/* Options list */} + + {availableOptions.map((option) => ( + setSelectedOption(option.key)} + className={`cursor-pointer rounded-3xl border-2 px-4 py-3 transition-all ${ + selectedOption === option.key + ? "border-blue-500 bg-blue-50" + : "border-gray-300 bg-white hover:border-gray-400" + }`} + > + + {option.icon} + {option.label} + + + ))} + + + + {/* Next button */} + + + ); +}; diff --git a/echo/frontend/src/components/participant/verify/hooks/index.ts b/echo/frontend/src/components/participant/verify/hooks/index.ts new file mode 100644 index 00000000..f523f58d --- /dev/null +++ b/echo/frontend/src/components/participant/verify/hooks/index.ts @@ -0,0 +1,92 @@ +import { createItem, readItems } from "@directus/sdk"; +import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; +import { toast } from "@/components/common/Toaster"; +import { generateVerificationArtefact } from "@/lib/api"; +import { directus } from "@/lib/directus"; + +// Hook for generating verification artefacts +export const useGenerateVerificationArtefact = () => { + return useMutation({ + mutationFn: generateVerificationArtefact, + onError: (error) => { + console.error("Failed to generate verification artefact:", error); + toast.error("Failed to generate artefact. Please try again."); + }, + }); +}; + +// Hook for saving verification artefacts +export const useSaveVerificationArtefact = () => { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: async ({ + conversationId, + artefactContent, + key, + }: { + conversationId: string; + artefactContent: string; + key: string; + }) => { + // await new Promise((resolve) => setTimeout(resolve, 1000)); + return directus.request( + createItem("conversation_artefact", { + approved_at: new Date().toISOString(), + content: artefactContent, + conversation_id: conversationId, + key, + }), + ); + }, + onError: (error) => { + console.error("Failed to save verification artefact:", error); + toast.error("Failed to approve artefact. Please try again."); + }, + onSuccess: (_data, variables) => { + toast.success("Artefact approved successfully!"); + queryClient.invalidateQueries({ + queryKey: ["conversations", variables.conversationId], + }); + queryClient.invalidateQueries({ + queryKey: ["conversation_artefacts", variables.conversationId], + }); + }, + }); +}; + +// Hook for fetching conversation artefacts +export const useConversationArtefacts = ( + conversationId: string | undefined, +) => { + return useQuery({ + enabled: !!conversationId, + queryFn: () => + directus.request( + readItems("conversation_artefact", { + fields: ["id", "conversation_id", "approved_at", "key"], + filter: { conversation_id: { _eq: conversationId } }, + sort: ["-approved_at"], + }), + ), + queryKey: ["conversation_artefacts", conversationId], + }); +}; + +// Hook for fetching a single artefact by ID (with aggressive caching - content never changes) +export const useConversationArtefact = (artefactId: string | undefined) => { + return useQuery({ + enabled: !!artefactId, + queryFn: () => + directus.request( + readItems("conversation_artefact", { + fields: ["id", "content", "conversation_id", "approved_at"], + filter: { id: { _eq: artefactId } }, + limit: 1, + }), + ), + queryKey: ["conversation_artefact", artefactId], + select: (data) => (data.length > 0 ? data[0] : null), + staleTime: Number.POSITIVE_INFINITY, + }); +}; diff --git a/echo/frontend/src/components/project/ProjectPortalEditor.tsx b/echo/frontend/src/components/project/ProjectPortalEditor.tsx index 72ef3afc..a2932072 100644 --- a/echo/frontend/src/components/project/ProjectPortalEditor.tsx +++ b/echo/frontend/src/components/project/ProjectPortalEditor.tsx @@ -29,6 +29,7 @@ import { Logo } from "../common/Logo"; import { FormLabel } from "../form/FormLabel"; import { MarkdownWYSIWYG } from "../form/MarkdownWYSIWYG/MarkdownWYSIWYG"; import { SaveStatus } from "../form/SaveStatus"; +import { VERIFY_OPTIONS } from "../participant/verify/VerifySelection"; import { useUpdateProjectByIdMutation } from "./hooks"; import { useProjectSharingLink } from "./ProjectQRCode"; import { ProjectTagsInput } from "./ProjectTagsInput"; @@ -44,7 +45,9 @@ const FormSchema = z.object({ get_reply_prompt: z.string(), is_get_reply_enabled: z.boolean(), is_project_notification_subscription_allowed: z.boolean(), + is_verify_enabled: z.boolean(), language: z.enum(["en", "nl", "de", "fr", "es"]), + verification_topics: z.array(z.string()), }); type ProjectPortalFormValues = z.infer; @@ -162,7 +165,9 @@ const ProjectPortalEditorComponent: React.FC<{ project: Project }> = ({ is_get_reply_enabled: project.is_get_reply_enabled ?? false, is_project_notification_subscription_allowed: project.is_project_notification_subscription_allowed ?? false, + is_verify_enabled: project.is_verify_enabled ?? false, language: (project.language ?? "en") as "en" | "nl" | "de" | "fr" | "es", + verification_topics: project.verification_topics ?? [], }; }, [project.id]); @@ -187,6 +192,11 @@ const ProjectPortalEditorComponent: React.FC<{ project: Project }> = ({ name: "is_get_reply_enabled", }); + const watchedVerifyEnabled = useWatch({ + control, + name: "is_verify_enabled", + }); + const updateProjectMutation = useUpdateProjectByIdMutation(); const onSave = useCallback( @@ -549,6 +559,112 @@ const ProjectPortalEditorComponent: React.FC<{ project: Project }> = ({ + + + + <Trans id="dashboard.dembrane.verify.title"> + Dembrane Verify + </Trans> + + + + + Experimental + + + + + + + Enable this feature to allow participants to create and + approve "verified objects" from their submissions. This + helps crystallize key ideas, concerns, or summaries. After + the conversation, you can filter for discussions with + verified objects and review them in the overview. + + + + ( + + } + checked={field.value} + onChange={(e) => + field.onChange(e.currentTarget.checked) + } + /> + )} + /> + + ( + + + + + Select which topics participants can use for + verification. + + + + {VERIFY_OPTIONS.map((topic) => ( + { + if (!watchedVerifyEnabled) return; + const newTopics = field.value.includes( + topic.key, + ) + ? field.value.filter((t) => t !== topic.key) + : [...field.value, topic.key]; + field.onChange(newTopics); + }} + > + + {topic.icon} + {topic.label} + + + ))} + + + )} + /> + + + + <Trans>Portal Content</Trans> diff --git a/echo/frontend/src/lib/api.ts b/echo/frontend/src/lib/api.ts index b93d2020..bafcbcb6 100644 --- a/echo/frontend/src/lib/api.ts +++ b/echo/frontend/src/lib/api.ts @@ -1063,6 +1063,64 @@ export const generateConversationSummary = async (conversationId: string) => { >(`/conversations/${conversationId}/summarize`); }; +// Mock API call for generating verification artefacts +// TODO: Replace with actual API endpoint +export const generateVerificationArtefact = async (payload: { + conversationId: string; + topicList: string[]; +}): Promise< + { + id: string; + approved_at?: string | null; + content: string; + conversation_id: string; + key: string; + read_aloud_stream_url: string; + }[] +> => { + // Simulate API delay (3 seconds) + await new Promise((resolve) => setTimeout(resolve, 2000)); + + // Mock response matching the API pattern + const mockContent = ` +### 1. Mapping, Wisdom, and Collaboration + +The participants exchanged perspectives on collective intelligence, mapping tools, and developing processes that foster group wisdom. There was mutual interest in both browser-based collaborative mind mapping (like Mindmeister and XMind) and participatory conversation tools that can capture, visualize, and refine insights. + +### 2. Influences and Foundations + +They discussed influential thinkers and frameworks that shape their approaches: + +* Tom Atlee's Wise Democracy Pattern Language, with an emphasis on collective wisdom oriented toward long-term, broad benefit. +* Doug Engelbart's concepts of dynamic knowledge repositories and networked improvement communities, emphasizing continual, systemic improvement. + +### 3. Next Steps + +Moving forward, the team agreed to: + +1. Schedule a follow-up meeting to dive deeper into specific tools +2. Share relevant resources and documentation +3. Explore potential collaboration opportunities`; + + return [ + { + approved_at: new Date().toISOString(), + content: mockContent, + conversation_id: payload.conversationId, + id: `artifact-${Date.now()}`, + key: `key-${Date.now()}`, + read_aloud_stream_url: + "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3", + }, + ]; + + // When ready to use real API, replace with: + // return apiNoAuth.post<GenerateVerificationArtefactRequest, GenerateVerificationArtefactResponse>( + // '/verify/generate', + // payload + // ); +}; + export const unsubscribeParticipant = async ( projectId: string, token: string, diff --git a/echo/frontend/src/lib/typesDirectus.d.ts b/echo/frontend/src/lib/typesDirectus.d.ts index 5a77af03..c2a22412 100644 --- a/echo/frontend/src/lib/typesDirectus.d.ts +++ b/echo/frontend/src/lib/typesDirectus.d.ts @@ -56,6 +56,7 @@ type AspectSegment = { }; type Conversation = { + artefacts: any[] | ConversationArtefact[]; chunks: any[] | ConversationChunk[]; conversation_segments: any[] | ConversationSegment[]; created_at?: string | null; @@ -82,6 +83,16 @@ type Conversation = { updated_at?: string | null; }; +type ConversationArtefact = { + approved_at?: string | null; + content?: string | null; + conversation_id?: string | Conversation | null; + created_at?: string | null; + id: string; + key?: string | null; + updated_at?: string | null; +}; + type ConversationChunk = { conversation_id: string | Conversation; conversation_segments: any[] | ConversationSegmentConversationChunk[]; @@ -613,6 +624,8 @@ type Project = { is_conversation_allowed: boolean; is_enhanced_audio_processing_enabled?: boolean | null; is_get_reply_enabled?: boolean | null; + is_verify_enabled?: boolean | null; + verification_topics?: string[] | null; is_project_notification_subscription_allowed?: boolean | null; language?: string | null; name?: string | null; @@ -753,6 +766,7 @@ type CustomDirectusTypes = { aspect: Aspect[]; aspect_segment: AspectSegment[]; conversation: Conversation[]; + conversation_artefact: ConversationArtefact[]; conversation_chunk: ConversationChunk[]; conversation_link: ConversationLink[]; conversation_project_tag: ConversationProjectTag[]; diff --git a/echo/frontend/src/routes/project/ProjectRoutes.tsx b/echo/frontend/src/routes/project/ProjectRoutes.tsx index 41230e03..3f0fc81a 100644 --- a/echo/frontend/src/routes/project/ProjectRoutes.tsx +++ b/echo/frontend/src/routes/project/ProjectRoutes.tsx @@ -88,6 +88,8 @@ export const ProjectPortalSettingsRoute = () => { "get_reply_mode", "get_reply_prompt", "is_get_reply_enabled", + "is_verify_enabled", + "verification_topics", "is_project_notification_subscription_allowed", { tags: ["id", "created_at", "text", "sort"], diff --git a/echo/server/dembrane/api/participant.py b/echo/server/dembrane/api/participant.py index 39ffa791..2d86b961 100644 --- a/echo/server/dembrane/api/participant.py +++ b/echo/server/dembrane/api/participant.py @@ -38,7 +38,9 @@ class PublicProjectSchema(BaseModel): is_conversation_allowed: bool is_get_reply_enabled: bool + is_verify_enabled: bool is_project_notification_subscription_allowed: bool + verification_topics: Optional[List[str]] = [] # onboarding default_conversation_tutorial_slug: Optional[str] = None @@ -98,7 +100,7 @@ class ConfirmUploadRequest(BaseModel): # NOTE: This is process-local and won't be shared across workers/pods. # With API_WORKERS=2 and horizontal scaling, the effective limit becomes # 10 × workers × pods instead of strict 10 req/min. -# +# # DECISION (2025-10-03): We accept this risk because: # - Users are authenticated municipal employees (paid customers) # - Normal usage: 6-10 req/min (well under distributed limit) @@ -116,20 +118,19 @@ def check_rate_limit(conversation_id: str) -> bool: Returns True if within limit, False if exceeded. """ now = time() - + # Clean old entries if conversation_id in _rate_limit_cache: _rate_limit_cache[conversation_id] = [ - t for t in _rate_limit_cache[conversation_id] - if now - t < _RATE_LIMIT_WINDOW + t for t in _rate_limit_cache[conversation_id] if now - t < _RATE_LIMIT_WINDOW ] else: _rate_limit_cache[conversation_id] = [] - + # Check limit if len(_rate_limit_cache[conversation_id]) >= _RATE_LIMIT_MAX_REQUESTS: return False - + # Add current request _rate_limit_cache[conversation_id].append(now) return True @@ -168,9 +169,7 @@ async def get_project( ) -> dict: try: project = await run_in_thread_pool( - project_service.get_by_id_or_raise, - project_id, - with_tags=True + project_service.get_by_id_or_raise, project_id, with_tags=True ) if project.get("is_conversation_allowed", False) is False: @@ -191,14 +190,9 @@ async def get_conversation( conversation_id: str, ) -> dict: try: - project = await run_in_thread_pool( - project_service.get_by_id_or_raise, - project_id - ) + project = await run_in_thread_pool(project_service.get_by_id_or_raise, project_id) conversation = await run_in_thread_pool( - conversation_service.get_by_id_or_raise, - conversation_id, - with_tags=True + conversation_service.get_by_id_or_raise, conversation_id, with_tags=True ) if project.get("is_conversation_allowed", False) is False: @@ -218,14 +212,9 @@ async def get_conversation_chunks( conversation_id: str, ) -> List[dict]: try: - project = await run_in_thread_pool( - project_service.get_by_id_or_raise, - project_id - ) + project = await run_in_thread_pool(project_service.get_by_id_or_raise, project_id) conversation = await run_in_thread_pool( - conversation_service.get_by_id_or_raise, - conversation_id, - with_chunks=True + conversation_service.get_by_id_or_raise, conversation_id, with_chunks=True ) if project.get("is_conversation_allowed", False) is False: @@ -246,8 +235,7 @@ async def delete_conversation_chunk( ) -> None: try: conversation = await run_in_thread_pool( - conversation_service.get_by_id_or_raise, - conversation_id + conversation_service.get_by_id_or_raise, conversation_id ) except ConversationNotFoundException as e: raise HTTPException(status_code=404, detail="Conversation not found") from e @@ -255,10 +243,7 @@ async def delete_conversation_chunk( if project_id != conversation.get("project_id"): raise HTTPException(status_code=404, detail="Conversation not found") - await run_in_thread_pool( - conversation_service.delete_chunk, - chunk_id - ) + await run_in_thread_pool(conversation_service.delete_chunk, chunk_id) return @@ -288,9 +273,7 @@ async def upload_conversation_text( return chunk except ConversationServiceException as e: - raise HTTPException( - status_code=400, detail=str(e) - ) from e + raise HTTPException(status_code=400, detail=str(e)) from e except ConversationNotOpenForParticipationException as e: raise HTTPException( status_code=403, detail="Conversation not open for participation" @@ -331,54 +314,51 @@ async def get_chunk_upload_url( ) -> dict: """ Generate a presigned URL for direct S3 upload. - + This endpoint is fast (<100ms) as it only generates a URL, no file transfer happens through the API. - + Rate limit: 10 requests per minute per conversation. """ - logger.info(f"Presigned URL requested for conversation {conversation_id}, filename: {body.filename}") - + logger.info( + f"Presigned URL requested for conversation {conversation_id}, filename: {body.filename}" + ) + try: # Rate limiting if not check_rate_limit(conversation_id): logger.warning(f"Rate limit exceeded for conversation {conversation_id}") raise HTTPException( status_code=429, - detail="Too many upload requests. Please wait before uploading more files." + detail="Too many upload requests. Please wait before uploading more files.", ) - + # Verify conversation exists and is open conversation = await run_in_thread_pool( - conversation_service.get_by_id_or_raise, - conversation_id + conversation_service.get_by_id_or_raise, conversation_id ) project = await run_in_thread_pool( - project_service.get_by_id_or_raise, - conversation["project_id"] + project_service.get_by_id_or_raise, conversation["project_id"] ) - + if not project.get("is_conversation_allowed", False): logger.warning(f"Conversation {conversation_id} not open for participation") - raise HTTPException( - status_code=403, - detail="Conversation not open for participation" - ) - + raise HTTPException(status_code=403, detail="Conversation not open for participation") + # Generate chunk ID chunk_id = generate_uuid() - + # Sanitize filename to prevent path traversal safe_filename = get_sanitized_s3_key(body.filename) - + # Create S3 key with sanitized filename file_key = f"conversation/{conversation_id}/chunks/{chunk_id}-{safe_filename}" - + logger.info(f"Generated S3 key: {file_key}") - + # Generate presigned POST from dembrane.s3 import generate_presigned_post - + presigned_data = await run_in_thread_pool( generate_presigned_post, file_name=file_key, @@ -386,34 +366,30 @@ async def get_chunk_upload_url( size_limit_mb=2048, # 2GB limit expires_in_seconds=3600, # 1 hour ) - + # Construct final file URL file_url = f"{STORAGE_S3_ENDPOINT}/{STORAGE_S3_BUCKET}/{file_key}" - + logger.info(f"Presigned URL generated successfully for chunk {chunk_id}") - + return { "chunk_id": chunk_id, "upload_url": presigned_data["url"], "fields": presigned_data["fields"], "file_url": file_url, } - + except ConversationNotFoundException as e: logger.error(f"Conversation not found: {conversation_id}") raise HTTPException(status_code=404, detail="Conversation not found") from e except ConversationNotOpenForParticipationException as e: logger.error(f"Conversation not open: {conversation_id}") raise HTTPException( - status_code=403, - detail="Conversation not open for participation" + status_code=403, detail="Conversation not open for participation" ) from e except Exception as e: logger.error(f"Error generating presigned URL: {e}", exc_info=True) - raise HTTPException( - status_code=500, - detail="Failed to generate upload URL" - ) from e + raise HTTPException(status_code=500, detail="Failed to generate upload URL") from e @ParticipantRouter.post( @@ -426,28 +402,27 @@ async def confirm_chunk_upload( ) -> dict: """ Confirm that a file upload completed and create the chunk record. - + This should be called after the client successfully uploads to S3 using the presigned URL. - + Includes retry logic for S3 eventual consistency. """ logger.info(f"Confirming upload for chunk {body.chunk_id}, conversation {conversation_id}") - + try: # Verify file exists in S3 with retry logic (eventual consistency) file_key = get_sanitized_s3_key(body.file_url) file_size = None max_retries = 3 retry_delays = [0.1, 0.5, 2.0] # 100ms, 500ms, 2s - + for attempt in range(max_retries): try: - file_size = await run_in_thread_pool( - get_file_size_bytes_from_s3, - file_key + file_size = await run_in_thread_pool(get_file_size_bytes_from_s3, file_key) + logger.info( + f"File verified in S3: {file_key}, size: {file_size} bytes, attempt: {attempt + 1}" ) - logger.info(f"File verified in S3: {file_key}, size: {file_size} bytes, attempt: {attempt + 1}") break except Exception as e: if attempt < max_retries - 1: @@ -461,13 +436,13 @@ async def confirm_chunk_upload( logger.error( f"File not found in S3 after {max_retries} attempts: {file_key}. " f"Upload may have failed or S3 is experiencing issues. Error: {e}", - exc_info=True + exc_info=True, ) raise HTTPException( status_code=400, - detail="File not found in S3. Upload may have failed. Please try again." + detail="File not found in S3. Upload may have failed. Please try again.", ) from e - + # Create chunk record (reuse existing logic) chunk = await run_in_thread_pool( conversation_service.create_chunk, @@ -478,38 +453,31 @@ async def confirm_chunk_upload( file_url=body.file_url, # Use the S3 URL directly transcript=None, ) - + logger.info( f"Chunk created successfully: {body.chunk_id}, " f"conversation: {conversation_id}, size: {file_size} bytes" ) - + return chunk - + except ConversationNotOpenForParticipationException as e: logger.error(f"Conversation not open for participation: {conversation_id}") raise HTTPException( - status_code=403, - detail="Conversation not open for participation" + status_code=403, detail="Conversation not open for participation" ) from e except ConversationNotFoundException as e: logger.error(f"Conversation not found while confirming upload: {conversation_id}") raise HTTPException(status_code=404, detail="Conversation not found") from e except ConversationServiceException as e: logger.error(f"Failed to create chunk: {e}") - raise HTTPException( - status_code=400, - detail=str(e) - ) from e + raise HTTPException(status_code=400, detail=str(e)) from e except HTTPException: # Re-raise HTTP exceptions as-is raise except Exception as e: logger.error(f"Error confirming upload: {e}", exc_info=True) - raise HTTPException( - status_code=500, - detail="Failed to confirm upload" - ) from e + raise HTTPException(status_code=500, detail="Failed to confirm upload") from e @ParticipantRouter.post( @@ -581,7 +549,7 @@ async def subscribe_notifications(data: NotificationSubscriptionRequest) -> dict await run_in_thread_pool( directus.delete_item, "project_report_notification_participants", - participant["id"] + participant["id"], ) # Create new entry with opt-in diff --git a/echo/server/dembrane/service/project.py b/echo/server/dembrane/service/project.py index cf045df0..d18573fb 100644 --- a/echo/server/dembrane/service/project.py +++ b/echo/server/dembrane/service/project.py @@ -151,6 +151,8 @@ def create_shallow_clone( "is_project_notification_subscription_allowed": current_project[ "is_project_notification_subscription_allowed" ], + "is_verify_enabled": current_project["is_verify_enabled"], + "verification_topics": current_project["verification_topics"], } if overrides: From e914842a5500feac027d81c4bd43954119c1222a Mon Sep 17 00:00:00 2001 From: Usama <reach.usamazafar@gmail.com> Date: Fri, 7 Nov 2025 07:31:28 +0000 Subject: [PATCH 05/23] minor fixes --- .../participant/StopRecordingConfirmationModal.tsx | 1 - .../participant/verify/VerifiedArtefactItem.tsx | 8 +++++++- .../src/components/participant/verify/VerifyArtefact.tsx | 7 +++++-- echo/frontend/src/lib/api.ts | 3 +-- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/echo/frontend/src/components/participant/StopRecordingConfirmationModal.tsx b/echo/frontend/src/components/participant/StopRecordingConfirmationModal.tsx index 47735cf7..6a1255e5 100644 --- a/echo/frontend/src/components/participant/StopRecordingConfirmationModal.tsx +++ b/echo/frontend/src/components/participant/StopRecordingConfirmationModal.tsx @@ -68,7 +68,6 @@ export const StopRecordingConfirmationModal = ({ size="sm" pt="sm" ta="left" - style={{ pointerEvents: isStopping ? "none" : "auto" }} disabled={isStopping} > <Trans id="participant.link.switch.text">Switch to text input</Trans> diff --git a/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx b/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx index c6f7cc3d..f04112b0 100644 --- a/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx +++ b/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx @@ -19,7 +19,13 @@ export const VerifiedArtefactItem = ({ // Format the timestamp using date-fns const formattedDate = artefact.approved_at - ? format(new Date(artefact.approved_at), "h:mm a") + ? (() => { + try { + return format(new Date(artefact.approved_at), "h:mm a"); + } catch { + return ""; + } + })() : ""; return ( diff --git a/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx b/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx index ff401939..72e3cf1f 100644 --- a/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx +++ b/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx @@ -121,7 +121,6 @@ export const VerifyArtefact = () => { const handleRevise = async () => { if (!conversationId || !selectedOptionKey) return; setIsRevising(true); - setLastReviseTime(Date.now()); // Start cooldown timer try { // Mock API call to revise artefact (3 seconds) await new Promise((resolve) => setTimeout(resolve, 3000)); @@ -138,6 +137,7 @@ export const VerifyArtefact = () => { // Set read aloud URL from API response setReadAloudUrl(artifact.read_aloud_stream_url || ""); } + setLastReviseTime(Date.now()); // Start cooldown timer } finally { setIsRevising(false); } @@ -164,7 +164,10 @@ export const VerifyArtefact = () => { }; const handleReadAloud = () => { - if (!audioRef.current) { + if (!readAloudUrl) return; + + if (!audioRef.current || audioRef.current.src !== readAloudUrl) { + audioRef.current?.pause(); audioRef.current = new Audio(readAloudUrl); audioRef.current.addEventListener("ended", () => { setIsPlaying(false); diff --git a/echo/frontend/src/lib/api.ts b/echo/frontend/src/lib/api.ts index bafcbcb6..9e62f3f6 100644 --- a/echo/frontend/src/lib/api.ts +++ b/echo/frontend/src/lib/api.ts @@ -1109,8 +1109,7 @@ Moving forward, the team agreed to: conversation_id: payload.conversationId, id: `artifact-${Date.now()}`, key: `key-${Date.now()}`, - read_aloud_stream_url: - "https://www.soundhelix.com/examples/mp3/SoundHelix-Song-1.mp3", + read_aloud_stream_url: "", }, ]; From d78e799728271c52192cce1a88b8f404d3768aeb Mon Sep 17 00:00:00 2001 From: Usama <reach.usamazafar@gmail.com> Date: Fri, 7 Nov 2025 07:47:24 +0000 Subject: [PATCH 06/23] minor refactor for VerifiedArtefactItem --- .../verify/VerifiedArtefactItem.tsx | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx b/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx index f04112b0..678e2ef6 100644 --- a/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx +++ b/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx @@ -9,6 +9,16 @@ type VerifiedArtefactItemProps = { onViewArtefact: (artefactId: string) => void; }; +const formatArtefactTime = (timestamp: string | null | undefined): string => { + if (!timestamp) return ""; + + try { + return format(new Date(timestamp), "h:mm a"); + } catch { + return ""; + } +}; + export const VerifiedArtefactItem = ({ artefact, onViewArtefact, @@ -18,18 +28,10 @@ export const VerifiedArtefactItem = ({ const label = option?.label || artefact.key; // Format the timestamp using date-fns - const formattedDate = artefact.approved_at - ? (() => { - try { - return format(new Date(artefact.approved_at), "h:mm a"); - } catch { - return ""; - } - })() - : ""; + const formattedDate = formatArtefactTime(artefact.approved_at); return ( - <Box key={artefact.id} className="flex items-baseline justify-end"> + <Box className="flex items-baseline justify-end"> <Paper className="my-2 cursor-pointer rounded-t-xl rounded-bl-xl p-4 hover:bg-gray-50 transition-colors" onClick={() => onViewArtefact(artefact.id)} From a3c464328e536c8c9cf3b3fd878d64fb45b80131 Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti <sameer@dembrane.com> Date: Fri, 7 Nov 2025 08:25:21 +0000 Subject: [PATCH 07/23] fix smells --- .../src/components/auth/hooks/index.ts | 3 +- echo/frontend/src/routes/auth/Login.tsx | 80 +++++++++---------- .../src/routes/settings/UserSettingsRoute.tsx | 8 +- 3 files changed, 40 insertions(+), 51 deletions(-) diff --git a/echo/frontend/src/components/auth/hooks/index.ts b/echo/frontend/src/components/auth/hooks/index.ts index b223d541..faa2db91 100644 --- a/echo/frontend/src/components/auth/hooks/index.ts +++ b/echo/frontend/src/components/auth/hooks/index.ts @@ -163,8 +163,7 @@ export const useLoginMutation = () => { otp?: string; }) => { return directus.login( - email, - password, + { email, password }, { otp: otp || undefined, }, diff --git a/echo/frontend/src/routes/auth/Login.tsx b/echo/frontend/src/routes/auth/Login.tsx index beedb21f..983a847a 100644 --- a/echo/frontend/src/routes/auth/Login.tsx +++ b/echo/frontend/src/routes/auth/Login.tsx @@ -1,10 +1,9 @@ -import { readProviders } from "@directus/sdk"; +import { useAutoAnimate } from "@formkit/auto-animate/react"; import { t } from "@lingui/core/macro"; import { Trans } from "@lingui/react/macro"; import { Alert, Anchor, - Box, Button, Container, Divider, @@ -16,10 +15,6 @@ import { Title, } from "@mantine/core"; import { useDocumentTitle } from "@mantine/hooks"; -import { useAutoAnimate } from "@formkit/auto-animate/react"; -import { IconBrandGoogle } from "@tabler/icons-react"; -import { useQuery } from "@tanstack/react-query"; -import type React from "react"; import { useEffect, useRef, useState } from "react"; import { useForm } from "react-hook-form"; import { useSearchParams } from "react-router"; @@ -27,38 +22,35 @@ import { useLoginMutation } from "@/components/auth/hooks"; import { I18nLink } from "@/components/common/i18nLink"; import { toast } from "@/components/common/Toaster"; import { useCreateProjectMutation } from "@/components/project/hooks"; -import { DIRECTUS_PUBLIC_URL } from "@/config"; import { useI18nNavigate } from "@/hooks/useI18nNavigate"; -import { useLanguage } from "@/hooks/useLanguage"; -import { directus } from "@/lib/directus"; -const LoginWithProvider = ({ - provider, - icon, - label, -}: { - provider: string; - icon: React.ReactNode; - label: string; -}) => { - const { language } = useLanguage(); - return ( - <Button - component="a" - href={`${DIRECTUS_PUBLIC_URL}/auth/login/${provider}?redirect=${encodeURIComponent( - `${window.location.origin}/${language}/projects`, - )}`} - size="lg" - c="gray" - color="gray.6" - variant="outline" - rightSection={icon} - fullWidth - > - {label} - </Button> - ); -}; +// const LoginWithProvider = ({ +// provider, +// icon, +// label, +// }: { +// provider: string; +// icon: React.ReactNode; +// label: string; +// }) => { +// const { language } = useLanguage(); +// return ( +// <Button +// component="a" +// href={`${DIRECTUS_PUBLIC_URL}/auth/login/${provider}?redirect=${encodeURIComponent( +// `${window.location.origin}/${language}/projects`, +// )}`} +// size="lg" +// c="gray" +// color="gray.6" +// variant="outline" +// rightSection={icon} +// fullWidth +// > +// {label} +// </Button> +// ); +// }; export const LoginRoute = () => { useDocumentTitle(t`Login | Dembrane`); @@ -75,11 +67,6 @@ export const LoginRoute = () => { const [searchParams, _setSearchParams] = useSearchParams(); - const providerQuery = useQuery({ - queryFn: () => directus.request(readProviders()), - queryKey: ["auth-providers"], - }); - const navigate = useI18nNavigate(); const createProjectMutation = useCreateProjectMutation(); @@ -108,8 +95,8 @@ export const LoginRoute = () => { await loginMutation.mutateAsync({ email: data.email, - password: data.password, otp: otpRequired ? trimmedOtp || undefined : undefined, + password: data.password, }); setOtpRequired(false); @@ -135,6 +122,7 @@ export const LoginRoute = () => { navigate("/projects"); } } catch (error) { + // biome-ignore lint/suspicious/noExplicitAny: <todo> const errors = (error as any)?.errors; const firstError = Array.isArray(errors) ? errors[0] : undefined; const code = firstError?.extensions?.code; @@ -230,8 +218,8 @@ export const LoginRoute = () => { const { email, password } = getValues(); void submitLogin({ email, - password, otp: value, + password, }); }} inputMode="numeric" @@ -278,7 +266,11 @@ export const LoginRoute = () => { </div> )} <Button size="lg" type="submit" loading={loginMutation.isPending}> - {otpRequired ? <Trans>Verify code</Trans> : <Trans>Login</Trans>} + {otpRequired ? ( + <Trans>Verify code</Trans> + ) : ( + <Trans>Login</Trans> + )} </Button> </Stack> </form> diff --git a/echo/frontend/src/routes/settings/UserSettingsRoute.tsx b/echo/frontend/src/routes/settings/UserSettingsRoute.tsx index e1ded6f5..98ad696b 100644 --- a/echo/frontend/src/routes/settings/UserSettingsRoute.tsx +++ b/echo/frontend/src/routes/settings/UserSettingsRoute.tsx @@ -3,14 +3,13 @@ import { Trans } from "@lingui/react/macro"; import { ActionIcon, Container, + Divider, Group, Stack, - Text, Title, - Divider, } from "@mantine/core"; import { useDocumentTitle } from "@mantine/hooks"; -import { IconShieldLock, IconArrowLeft } from "@tabler/icons-react"; +import { IconArrowLeft } from "@tabler/icons-react"; import { useCurrentUser } from "@/components/auth/hooks"; import { TwoFactorSettingsCard } from "@/components/settings/TwoFactorSettingsCard"; import { useI18nNavigate } from "@/hooks/useI18nNavigate"; @@ -29,7 +28,7 @@ export const UserSettingsRoute = () => { <ActionIcon variant="subtle" color="gray" - onClick={() => navigate(-1)} + onClick={() => navigate("..")} aria-label={t`Go back`} > <IconArrowLeft size={18} /> @@ -39,7 +38,6 @@ export const UserSettingsRoute = () => {
- Date: Fri, 7 Nov 2025 09:54:21 +0000 Subject: [PATCH 08/23] feat: add verify API and seed topics --- .../directus/sync/collections/dashboards.json | 7 - echo/directus/sync/collections/flows.json | 17 + .../directus/sync/collections/operations.json | 22 + echo/directus/sync/collections/panels.json | 388 ----------- .../snapshot/collections/announcement.json | 2 +- .../collections/conversation_artifact.json | 28 + .../sync/snapshot/collections/languages.json | 2 +- .../collections/verification_topic.json | 28 + .../verification_topic_translations.json | 28 + .../conversation/conversation_artifacts.json | 28 + .../conversation_artifact/approved_at.json | 44 ++ .../fields/conversation_artifact/content.json | 44 ++ .../conversation_id.json | 49 ++ .../conversation_artifact/date_created.json | 48 ++ .../fields/conversation_artifact/id.json | 46 ++ .../fields/conversation_artifact/key.json | 44 ++ .../last_updated_at.json | 44 ++ .../read_aloud_stream_url.json | 44 ++ .../conversation_artifact/user_created.json | 48 ++ .../conversation_artifact/user_updated.json | 48 ++ .../sync/snapshot/fields/project/context.json | 2 +- ...sation_ask_for_participant_name_label.json | 2 +- .../fields/project/conversations.json | 2 +- .../project/custom_verification_topics.json | 28 + ...conversation_ask_for_participant_name.json | 2 +- .../default_conversation_description.json | 2 +- .../default_conversation_finish_text.json | 2 +- .../project/default_conversation_title.json | 2 +- ...efault_conversation_transcript_prompt.json | 2 +- .../default_conversation_tutorial_slug.json | 2 +- .../fields/project/directus_user_id.json | 2 +- .../fields/project/divider-n6xep9.json | 2 +- .../fields/project/get_reply_mode.json | 2 +- .../fields/project/get_reply_prompt.json | 2 +- .../project/image_generation_model.json | 2 +- .../project/is_conversation_allowed.json | 2 +- .../is_enhanced_audio_processing_enabled.json | 2 +- .../fields/project/is_get_reply_enabled.json | 2 +- ...ect_notification_subscription_allowed.json | 2 +- .../fields/project/is_verify_enabled.json | 46 ++ .../fields/project/processing_status.json | 2 +- .../fields/project/project_analysis_runs.json | 2 +- .../fields/project/project_chats.json | 2 +- .../fields/project/project_reports.json | 2 +- .../selected_verification_key_list.json | 47 ++ .../sync/snapshot/fields/project/tags.json | 2 +- .../verification_topic/date_created.json | 48 ++ .../verification_topic/date_updated.json | 48 ++ .../fields/verification_topic/icon.json | 44 ++ .../fields/verification_topic/key.json | 44 ++ .../fields/verification_topic/project_id.json | 49 ++ .../fields/verification_topic/prompt.json | 44 ++ .../fields/verification_topic/sort.json | 44 ++ .../verification_topic/translations.json | 31 + .../verification_topic/user_created.json | 48 ++ .../verification_topic/user_updated.json | 48 ++ .../verification_topic_translations/id.json | 44 ++ .../label.json | 44 ++ .../languages_code.json | 44 ++ .../verification_topic_key.json | 44 ++ .../conversation_id.json | 25 + .../conversation_artifact/user_created.json | 25 + .../conversation_artifact/user_updated.json | 25 + .../verification_topic/project_id.json | 25 + .../verification_topic/user_created.json | 25 + .../verification_topic/user_updated.json | 25 + .../languages_code.json | 25 + .../verification_topic_key.json | 25 + echo/server/AGENTS.md | 47 ++ echo/server/dembrane/api/api.py | 2 + echo/server/dembrane/api/verify.py | 633 ++++++++++++++++++ echo/server/dembrane/main.py | 7 + .../generate_artifact.en.jinja | 14 + 73 files changed, 2283 insertions(+), 420 deletions(-) create mode 100644 echo/directus/sync/snapshot/collections/conversation_artifact.json create mode 100644 echo/directus/sync/snapshot/collections/verification_topic.json create mode 100644 echo/directus/sync/snapshot/collections/verification_topic_translations.json create mode 100644 echo/directus/sync/snapshot/fields/conversation/conversation_artifacts.json create mode 100644 echo/directus/sync/snapshot/fields/conversation_artifact/approved_at.json create mode 100644 echo/directus/sync/snapshot/fields/conversation_artifact/content.json create mode 100644 echo/directus/sync/snapshot/fields/conversation_artifact/conversation_id.json create mode 100644 echo/directus/sync/snapshot/fields/conversation_artifact/date_created.json create mode 100644 echo/directus/sync/snapshot/fields/conversation_artifact/id.json create mode 100644 echo/directus/sync/snapshot/fields/conversation_artifact/key.json create mode 100644 echo/directus/sync/snapshot/fields/conversation_artifact/last_updated_at.json create mode 100644 echo/directus/sync/snapshot/fields/conversation_artifact/read_aloud_stream_url.json create mode 100644 echo/directus/sync/snapshot/fields/conversation_artifact/user_created.json create mode 100644 echo/directus/sync/snapshot/fields/conversation_artifact/user_updated.json create mode 100644 echo/directus/sync/snapshot/fields/project/custom_verification_topics.json create mode 100644 echo/directus/sync/snapshot/fields/project/is_verify_enabled.json create mode 100644 echo/directus/sync/snapshot/fields/project/selected_verification_key_list.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic/date_created.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic/date_updated.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic/icon.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic/key.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic/project_id.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic/prompt.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic/sort.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic/translations.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic/user_created.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic/user_updated.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic_translations/id.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic_translations/label.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic_translations/languages_code.json create mode 100644 echo/directus/sync/snapshot/fields/verification_topic_translations/verification_topic_key.json create mode 100644 echo/directus/sync/snapshot/relations/conversation_artifact/conversation_id.json create mode 100644 echo/directus/sync/snapshot/relations/conversation_artifact/user_created.json create mode 100644 echo/directus/sync/snapshot/relations/conversation_artifact/user_updated.json create mode 100644 echo/directus/sync/snapshot/relations/verification_topic/project_id.json create mode 100644 echo/directus/sync/snapshot/relations/verification_topic/user_created.json create mode 100644 echo/directus/sync/snapshot/relations/verification_topic/user_updated.json create mode 100644 echo/directus/sync/snapshot/relations/verification_topic_translations/languages_code.json create mode 100644 echo/directus/sync/snapshot/relations/verification_topic_translations/verification_topic_key.json create mode 100644 echo/server/AGENTS.md create mode 100644 echo/server/dembrane/api/verify.py create mode 100644 echo/server/prompt_templates/generate_artifact.en.jinja diff --git a/echo/directus/sync/collections/dashboards.json b/echo/directus/sync/collections/dashboards.json index f1afe9b2..b0464422 100644 --- a/echo/directus/sync/collections/dashboards.json +++ b/echo/directus/sync/collections/dashboards.json @@ -1,11 +1,4 @@ [ - { - "name": "Impact Measurements", - "icon": "space_dashboard", - "note": null, - "color": "#2ECDA7", - "_syncId": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b" - }, { "name": "Announcements", "icon": "space_dashboard", diff --git a/echo/directus/sync/collections/flows.json b/echo/directus/sync/collections/flows.json index 0a114cce..e37f9011 100644 --- a/echo/directus/sync/collections/flows.json +++ b/echo/directus/sync/collections/flows.json @@ -32,6 +32,23 @@ "operation": null, "_syncId": "392f4954-059e-4bf1-bdc6-11cc1b66971c" }, + { + "name": "Send Email Base", + "icon": "alternate_email", + "color": "#3CD3AD", + "description": null, + "status": "active", + "trigger": "manual", + "accountability": "activity", + "options": { + "collections": [ + "project" + ], + "requireConfirmation": true + }, + "operation": "302997e3-b03d-4305-90f6-f5ebc7a0cb3e", + "_syncId": "92b4aa37-5068-4dea-92d8-22cd6007c8b9" + }, { "name": "Send Report Emails", "icon": "mail", diff --git a/echo/directus/sync/collections/operations.json b/echo/directus/sync/collections/operations.json index 5168a13b..c169726e 100644 --- a/echo/directus/sync/collections/operations.json +++ b/echo/directus/sync/collections/operations.json @@ -179,6 +179,28 @@ "flow": "ec4e7ea5-72de-4365-b66f-d8f11b549495", "_syncId": "da5f4cce-eff0-426d-a207-d61366899d1f" }, + { + "name": "Send Email", + "key": "mail_vndml", + "type": "mail", + "position_x": 19, + "position_y": 1, + "options": { + "type": "markdown", + "to": [ + "sameer@dembrane.com" + ], + "subject": "Test", + "replyTo": [ + "usama@dembrane.com" + ], + "body": "Hello world" + }, + "resolve": null, + "reject": null, + "flow": "92b4aa37-5068-4dea-92d8-22cd6007c8b9", + "_syncId": "302997e3-b03d-4305-90f6-f5ebc7a0cb3e" + }, { "name": "Report Not Published", "key": "report_not_published", diff --git a/echo/directus/sync/collections/panels.json b/echo/directus/sync/collections/panels.json index 25620d18..50d4f6f1 100644 --- a/echo/directus/sync/collections/panels.json +++ b/echo/directus/sync/collections/panels.json @@ -1,25 +1,4 @@ [ - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Insight generation over the past week", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "time-series", - "position_x": 2, - "position_y": 87, - "width": 23, - "height": 9, - "options": { - "collection": "insight", - "function": "count", - "dateField": "created_at", - "valueField": "id", - "filter": null - }, - "_syncId": "10d105b0-55c3-4011-9e20-773f6a21ba80" - }, { "dashboard": "d88e3e89-f55a-4279-87a5-159d8efc6dbc", "name": "Announcement Readers Graph", @@ -108,61 +87,6 @@ }, "_syncId": "25a32ec7-5397-4124-a821-66508523f883" }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Total Chats", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "metric", - "position_x": 27, - "position_y": 33, - "width": 15, - "height": 9, - "options": { - "collection": "project_chat", - "field": "id", - "function": "count" - }, - "_syncId": "2644d6f3-af03-4050-b142-b0aeaa72a6ee" - }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Number of projects", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "metric", - "position_x": 27, - "position_y": 65, - "width": 15, - "height": 9, - "options": { - "collection": "project", - "field": "id", - "function": "count", - "filter": { - "_and": [ - { - "_and": [ - { - "created_at": { - "_between": [ - "$NOW(-7)", - "$NOW" - ] - } - } - ] - } - ] - }, - "sortField": "id" - }, - "_syncId": "26a37f7f-a58c-489d-a2c1-c53121db7d3a" - }, { "dashboard": "fadfb3fb-a781-41f1-a202-ef0f92174226", "name": "proj - audio", @@ -183,29 +107,6 @@ }, "_syncId": "3bc77a0e-4499-439f-b632-a3740a9bfe37" }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Insight generation over the past 3 months", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "time-series", - "position_x": 2, - "position_y": 33, - "width": 23, - "height": 9, - "options": { - "collection": "insight", - "function": "count", - "dateField": "created_at", - "range": "3 months", - "valueField": "id", - "filter": null, - "precision": "day" - }, - "_syncId": "3f2471aa-5ddf-40a7-9b4f-15694d7043c1" - }, { "dashboard": "fadfb3fb-a781-41f1-a202-ef0f92174226", "name": "imagen", @@ -249,37 +150,6 @@ }, "_syncId": "4cd13340-f2a5-4e81-a76b-3b4fd7a51623" }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Total Chats", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "metric", - "position_x": 27, - "position_y": 87, - "width": 15, - "height": 9, - "options": { - "collection": "project_chat", - "field": "id", - "function": "count", - "filter": { - "_and": [ - { - "date_created": { - "_between": [ - "$NOW(-7)", - "$NOW" - ] - } - } - ] - } - }, - "_syncId": "58981e36-a7b3-49c9-aa59-ae0e59f0e3d0" - }, { "dashboard": "fadfb3fb-a781-41f1-a202-ef0f92174226", "name": "MAU", @@ -313,42 +183,6 @@ }, "_syncId": "7badae0f-0f55-40f4-99a1-6dbba630595f" }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": null, - "icon": null, - "color": null, - "show_header": false, - "note": null, - "type": "label", - "position_x": 2, - "position_y": 60, - "width": 23, - "height": 3, - "options": { - "text": "Measures Weekly\n" - }, - "_syncId": "844e94da-44a5-4447-ada6-e9255157d479" - }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Total Conversations", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "metric", - "position_x": 27, - "position_y": 22, - "width": 15, - "height": 9, - "options": { - "collection": "conversation", - "field": "id", - "function": "count" - }, - "_syncId": "85da5c87-97f7-47ba-b9a5-15c982996889" - }, { "dashboard": "fadfb3fb-a781-41f1-a202-ef0f92174226", "name": "new projects / week", @@ -372,61 +206,6 @@ }, "_syncId": "912a84fa-549c-4275-ba47-dcb7a4c6b99e" }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Number of insights", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "metric", - "position_x": 2, - "position_y": 44, - "width": 23, - "height": 9, - "options": { - "collection": "insight", - "field": "id", - "function": "count", - "filter": null, - "conditionalFormatting": null, - "suffix": "Insights", - "numberStyle": "unit", - "sortField": "id" - }, - "_syncId": "97cd07c5-27be-419c-8e4e-701deec03ac8" - }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Total Conversations", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "metric", - "position_x": 27, - "position_y": 76, - "width": 15, - "height": 9, - "options": { - "collection": "conversation", - "field": "id", - "function": "count", - "filter": { - "_and": [ - { - "created_at": { - "_between": [ - "$NOW(-7)", - "$NOW" - ] - } - } - ] - } - }, - "_syncId": "a3492153-f6b8-45ee-9796-c21a3019bfcc" - }, { "dashboard": "d88e3e89-f55a-4279-87a5-159d8efc6dbc", "name": "Total Read Announcements", @@ -493,27 +272,6 @@ }, "_syncId": "b284f15f-5949-4d95-942e-da3004516ea4" }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Number of conversations past week", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "time-series", - "position_x": 2, - "position_y": 65, - "width": 23, - "height": 20, - "options": { - "collection": "conversation", - "function": "count", - "dateField": "created_at", - "color": null, - "valueField": "id" - }, - "_syncId": "bb95bf10-c43b-401b-adb3-97ea6898f160" - }, { "dashboard": "d88e3e89-f55a-4279-87a5-159d8efc6dbc", "name": "Read By Users", @@ -534,41 +292,6 @@ }, "_syncId": "c478c29d-8855-46e3-a84a-06c5350b1e11" }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Number of insights", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "metric", - "position_x": 2, - "position_y": 98, - "width": 23, - "height": 9, - "options": { - "collection": "insight", - "field": "id", - "function": "count", - "filter": { - "_and": [ - { - "created_at": { - "_between": [ - "$NOW(-7)", - "$NOW" - ] - } - } - ] - }, - "conditionalFormatting": null, - "suffix": "Insights", - "numberStyle": "unit", - "sortField": "id" - }, - "_syncId": "c5a01857-a208-4034-b430-a9c7893d79ae" - }, { "dashboard": "fadfb3fb-a781-41f1-a202-ef0f92174226", "name": "chunk / NOW", @@ -591,25 +314,6 @@ }, "_syncId": "cb5eebf3-4b85-4b64-bc12-0c1a0acb6b68" }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Number of projects", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "metric", - "position_x": 27, - "position_y": 11, - "width": 15, - "height": 9, - "options": { - "collection": "project", - "field": "id", - "function": "count" - }, - "_syncId": "cee00ae2-95b9-49c6-b4fb-cc5666b401b2" - }, { "dashboard": "fadfb3fb-a781-41f1-a202-ef0f92174226", "name": "chunk / day ", @@ -749,78 +453,6 @@ }, "_syncId": "e73a55b3-f505-41d2-aa84-012f7d76ad0a" }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": null, - "icon": null, - "color": null, - "show_header": false, - "note": null, - "type": "label", - "position_x": 2, - "position_y": 6, - "width": 23, - "height": 3, - "options": { - "text": "Measures Total" - }, - "_syncId": "eaf1ce6b-879c-4fd6-8e37-5b8ed1bc1f6a" - }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Number of conversations past 3 months", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "time-series", - "position_x": 2, - "position_y": 11, - "width": 23, - "height": 20, - "options": { - "collection": "conversation", - "function": "count", - "dateField": "created_at", - "color": null, - "precision": "day", - "range": "3 months", - "valueField": "id" - }, - "_syncId": "f7cdfc84-8f95-4824-8d14-477dab874fbd" - }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Number of Views generated", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "metric", - "position_x": 27, - "position_y": 98, - "width": 15, - "height": 9, - "options": { - "collection": "view", - "field": "id", - "function": "count", - "sortField": null, - "filter": { - "_and": [ - { - "created_at": { - "_between": [ - "$NOW(-7)", - "$NOW" - ] - } - } - ] - } - }, - "_syncId": "f7cf0d59-57d4-46ab-8743-4548a3745981" - }, { "dashboard": "fadfb3fb-a781-41f1-a202-ef0f92174226", "name": "chat messages / week", @@ -844,25 +476,5 @@ "missingData": "ignore" }, "_syncId": "f8188818-a2b2-451e-9962-0ba0e4aca1e8" - }, - { - "dashboard": "79cde56d-c5cd-4a4c-9903-d99d3b4b796b", - "name": "Number of Views generated", - "icon": null, - "color": null, - "show_header": true, - "note": null, - "type": "metric", - "position_x": 27, - "position_y": 44, - "width": 15, - "height": 9, - "options": { - "collection": "view", - "field": "id", - "function": "count", - "sortField": null - }, - "_syncId": "fc1cf84b-354a-4e7f-80fe-b083558d13b3" } ] diff --git a/echo/directus/sync/snapshot/collections/announcement.json b/echo/directus/sync/snapshot/collections/announcement.json index 09f88562..cbe16ee3 100644 --- a/echo/directus/sync/snapshot/collections/announcement.json +++ b/echo/directus/sync/snapshot/collections/announcement.json @@ -16,7 +16,7 @@ "note": null, "preview_url": null, "singleton": false, - "sort": 12, + "sort": 11, "sort_field": "sort", "translations": null, "unarchive_value": "draft", diff --git a/echo/directus/sync/snapshot/collections/conversation_artifact.json b/echo/directus/sync/snapshot/collections/conversation_artifact.json new file mode 100644 index 00000000..96e2501e --- /dev/null +++ b/echo/directus/sync/snapshot/collections/conversation_artifact.json @@ -0,0 +1,28 @@ +{ + "collection": "conversation_artifact", + "meta": { + "accountability": "all", + "archive_app_filter": true, + "archive_field": null, + "archive_value": null, + "collapse": "open", + "collection": "conversation_artifact", + "color": null, + "display_template": null, + "group": null, + "hidden": false, + "icon": null, + "item_duplication_fields": null, + "note": null, + "preview_url": null, + "singleton": false, + "sort": 13, + "sort_field": null, + "translations": null, + "unarchive_value": null, + "versioning": false + }, + "schema": { + "name": "conversation_artifact" + } +} diff --git a/echo/directus/sync/snapshot/collections/languages.json b/echo/directus/sync/snapshot/collections/languages.json index 16407be4..2da014f1 100644 --- a/echo/directus/sync/snapshot/collections/languages.json +++ b/echo/directus/sync/snapshot/collections/languages.json @@ -16,7 +16,7 @@ "note": null, "preview_url": null, "singleton": false, - "sort": 13, + "sort": 12, "sort_field": null, "translations": null, "unarchive_value": null, diff --git a/echo/directus/sync/snapshot/collections/verification_topic.json b/echo/directus/sync/snapshot/collections/verification_topic.json new file mode 100644 index 00000000..6a161fd0 --- /dev/null +++ b/echo/directus/sync/snapshot/collections/verification_topic.json @@ -0,0 +1,28 @@ +{ + "collection": "verification_topic", + "meta": { + "accountability": "all", + "archive_app_filter": true, + "archive_field": null, + "archive_value": null, + "collapse": "open", + "collection": "verification_topic", + "color": null, + "display_template": null, + "group": null, + "hidden": false, + "icon": null, + "item_duplication_fields": null, + "note": null, + "preview_url": null, + "singleton": false, + "sort": 14, + "sort_field": "sort", + "translations": null, + "unarchive_value": null, + "versioning": false + }, + "schema": { + "name": "verification_topic" + } +} diff --git a/echo/directus/sync/snapshot/collections/verification_topic_translations.json b/echo/directus/sync/snapshot/collections/verification_topic_translations.json new file mode 100644 index 00000000..6c8e48a0 --- /dev/null +++ b/echo/directus/sync/snapshot/collections/verification_topic_translations.json @@ -0,0 +1,28 @@ +{ + "collection": "verification_topic_translations", + "meta": { + "accountability": "all", + "archive_app_filter": true, + "archive_field": null, + "archive_value": null, + "collapse": "open", + "collection": "verification_topic_translations", + "color": null, + "display_template": null, + "group": "verification_topic", + "hidden": true, + "icon": "import_export", + "item_duplication_fields": null, + "note": null, + "preview_url": null, + "singleton": false, + "sort": 1, + "sort_field": null, + "translations": null, + "unarchive_value": null, + "versioning": false + }, + "schema": { + "name": "verification_topic_translations" + } +} diff --git a/echo/directus/sync/snapshot/fields/conversation/conversation_artifacts.json b/echo/directus/sync/snapshot/fields/conversation/conversation_artifacts.json new file mode 100644 index 00000000..8b8d9b7c --- /dev/null +++ b/echo/directus/sync/snapshot/fields/conversation/conversation_artifacts.json @@ -0,0 +1,28 @@ +{ + "collection": "conversation", + "field": "conversation_artifacts", + "type": "alias", + "meta": { + "collection": "conversation", + "conditions": null, + "display": null, + "display_options": null, + "field": "conversation_artifacts", + "group": null, + "hidden": false, + "interface": "list-o2m", + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 25, + "special": [ + "o2m" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + } +} diff --git a/echo/directus/sync/snapshot/fields/conversation_artifact/approved_at.json b/echo/directus/sync/snapshot/fields/conversation_artifact/approved_at.json new file mode 100644 index 00000000..2189b4ac --- /dev/null +++ b/echo/directus/sync/snapshot/fields/conversation_artifact/approved_at.json @@ -0,0 +1,44 @@ +{ + "collection": "conversation_artifact", + "field": "approved_at", + "type": "dateTime", + "meta": { + "collection": "conversation_artifact", + "conditions": null, + "display": null, + "display_options": null, + "field": "approved_at", + "group": null, + "hidden": false, + "interface": "datetime", + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 7, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "approved_at", + "table": "conversation_artifact", + "data_type": "timestamp without time zone", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/conversation_artifact/content.json b/echo/directus/sync/snapshot/fields/conversation_artifact/content.json new file mode 100644 index 00000000..fc475ec5 --- /dev/null +++ b/echo/directus/sync/snapshot/fields/conversation_artifact/content.json @@ -0,0 +1,44 @@ +{ + "collection": "conversation_artifact", + "field": "content", + "type": "text", + "meta": { + "collection": "conversation_artifact", + "conditions": null, + "display": null, + "display_options": null, + "field": "content", + "group": null, + "hidden": false, + "interface": "input-rich-text-md", + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 6, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "content", + "table": "conversation_artifact", + "data_type": "text", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/conversation_artifact/conversation_id.json b/echo/directus/sync/snapshot/fields/conversation_artifact/conversation_id.json new file mode 100644 index 00000000..5b6d2633 --- /dev/null +++ b/echo/directus/sync/snapshot/fields/conversation_artifact/conversation_id.json @@ -0,0 +1,49 @@ +{ + "collection": "conversation_artifact", + "field": "conversation_id", + "type": "uuid", + "meta": { + "collection": "conversation_artifact", + "conditions": null, + "display": null, + "display_options": null, + "field": "conversation_id", + "group": null, + "hidden": false, + "interface": "select-dropdown-m2o", + "note": null, + "options": { + "enableLink": true, + "template": "{{participant_name}}{{project_id.name}}" + }, + "readonly": false, + "required": false, + "searchable": true, + "sort": 10, + "special": [ + "m2o" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "conversation_id", + "table": "conversation_artifact", + "data_type": "uuid", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": "conversation", + "foreign_key_column": "id" + } +} diff --git a/echo/directus/sync/snapshot/fields/conversation_artifact/date_created.json b/echo/directus/sync/snapshot/fields/conversation_artifact/date_created.json new file mode 100644 index 00000000..400cf8ce --- /dev/null +++ b/echo/directus/sync/snapshot/fields/conversation_artifact/date_created.json @@ -0,0 +1,48 @@ +{ + "collection": "conversation_artifact", + "field": "date_created", + "type": "timestamp", + "meta": { + "collection": "conversation_artifact", + "conditions": null, + "display": "datetime", + "display_options": { + "relative": true + }, + "field": "date_created", + "group": null, + "hidden": true, + "interface": "datetime", + "note": null, + "options": null, + "readonly": true, + "required": false, + "searchable": true, + "sort": 3, + "special": [ + "date-created" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "half" + }, + "schema": { + "name": "date_created", + "table": "conversation_artifact", + "data_type": "timestamp with time zone", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/conversation_artifact/id.json b/echo/directus/sync/snapshot/fields/conversation_artifact/id.json new file mode 100644 index 00000000..583cc908 --- /dev/null +++ b/echo/directus/sync/snapshot/fields/conversation_artifact/id.json @@ -0,0 +1,46 @@ +{ + "collection": "conversation_artifact", + "field": "id", + "type": "uuid", + "meta": { + "collection": "conversation_artifact", + "conditions": null, + "display": null, + "display_options": null, + "field": "id", + "group": null, + "hidden": true, + "interface": "input", + "note": null, + "options": null, + "readonly": true, + "required": false, + "searchable": true, + "sort": 1, + "special": [ + "uuid" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "id", + "table": "conversation_artifact", + "data_type": "uuid", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": false, + "is_unique": true, + "is_indexed": false, + "is_primary_key": true, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/conversation_artifact/key.json b/echo/directus/sync/snapshot/fields/conversation_artifact/key.json new file mode 100644 index 00000000..0a29a13b --- /dev/null +++ b/echo/directus/sync/snapshot/fields/conversation_artifact/key.json @@ -0,0 +1,44 @@ +{ + "collection": "conversation_artifact", + "field": "key", + "type": "string", + "meta": { + "collection": "conversation_artifact", + "conditions": null, + "display": null, + "display_options": null, + "field": "key", + "group": null, + "hidden": false, + "interface": "input", + "note": "Which verification topic used to create this artifact", + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 9, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "key", + "table": "conversation_artifact", + "data_type": "character varying", + "default_value": null, + "max_length": 255, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/conversation_artifact/last_updated_at.json b/echo/directus/sync/snapshot/fields/conversation_artifact/last_updated_at.json new file mode 100644 index 00000000..2e578b6e --- /dev/null +++ b/echo/directus/sync/snapshot/fields/conversation_artifact/last_updated_at.json @@ -0,0 +1,44 @@ +{ + "collection": "conversation_artifact", + "field": "last_updated_at", + "type": "dateTime", + "meta": { + "collection": "conversation_artifact", + "conditions": null, + "display": null, + "display_options": null, + "field": "last_updated_at", + "group": null, + "hidden": false, + "interface": "datetime", + "note": "This field is set when user (participant) updates it on the Portal UI", + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 5, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "last_updated_at", + "table": "conversation_artifact", + "data_type": "timestamp without time zone", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/conversation_artifact/read_aloud_stream_url.json b/echo/directus/sync/snapshot/fields/conversation_artifact/read_aloud_stream_url.json new file mode 100644 index 00000000..58045da8 --- /dev/null +++ b/echo/directus/sync/snapshot/fields/conversation_artifact/read_aloud_stream_url.json @@ -0,0 +1,44 @@ +{ + "collection": "conversation_artifact", + "field": "read_aloud_stream_url", + "type": "text", + "meta": { + "collection": "conversation_artifact", + "conditions": null, + "display": null, + "display_options": null, + "field": "read_aloud_stream_url", + "group": null, + "hidden": false, + "interface": "input", + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 8, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "read_aloud_stream_url", + "table": "conversation_artifact", + "data_type": "text", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/conversation_artifact/user_created.json b/echo/directus/sync/snapshot/fields/conversation_artifact/user_created.json new file mode 100644 index 00000000..634beabf --- /dev/null +++ b/echo/directus/sync/snapshot/fields/conversation_artifact/user_created.json @@ -0,0 +1,48 @@ +{ + "collection": "conversation_artifact", + "field": "user_created", + "type": "uuid", + "meta": { + "collection": "conversation_artifact", + "conditions": null, + "display": "user", + "display_options": null, + "field": "user_created", + "group": null, + "hidden": true, + "interface": "select-dropdown-m2o", + "note": null, + "options": { + "template": "{{avatar}} {{first_name}} {{last_name}}" + }, + "readonly": true, + "required": false, + "searchable": true, + "sort": 2, + "special": [ + "user-created" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "half" + }, + "schema": { + "name": "user_created", + "table": "conversation_artifact", + "data_type": "uuid", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": "directus_users", + "foreign_key_column": "id" + } +} diff --git a/echo/directus/sync/snapshot/fields/conversation_artifact/user_updated.json b/echo/directus/sync/snapshot/fields/conversation_artifact/user_updated.json new file mode 100644 index 00000000..e3d5ffbe --- /dev/null +++ b/echo/directus/sync/snapshot/fields/conversation_artifact/user_updated.json @@ -0,0 +1,48 @@ +{ + "collection": "conversation_artifact", + "field": "user_updated", + "type": "uuid", + "meta": { + "collection": "conversation_artifact", + "conditions": null, + "display": "user", + "display_options": null, + "field": "user_updated", + "group": null, + "hidden": true, + "interface": "select-dropdown-m2o", + "note": null, + "options": { + "template": "{{avatar}} {{first_name}} {{last_name}}" + }, + "readonly": true, + "required": false, + "searchable": true, + "sort": 4, + "special": [ + "user-updated" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "half" + }, + "schema": { + "name": "user_updated", + "table": "conversation_artifact", + "data_type": "uuid", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": "directus_users", + "foreign_key_column": "id" + } +} diff --git a/echo/directus/sync/snapshot/fields/project/context.json b/echo/directus/sync/snapshot/fields/project/context.json index 09f14e2e..1c673e09 100644 --- a/echo/directus/sync/snapshot/fields/project/context.json +++ b/echo/directus/sync/snapshot/fields/project/context.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 8, + "sort": 7, "special": null, "translations": null, "validation": null, diff --git a/echo/directus/sync/snapshot/fields/project/conversation_ask_for_participant_name_label.json b/echo/directus/sync/snapshot/fields/project/conversation_ask_for_participant_name_label.json index 56862612..debcb328 100644 --- a/echo/directus/sync/snapshot/fields/project/conversation_ask_for_participant_name_label.json +++ b/echo/directus/sync/snapshot/fields/project/conversation_ask_for_participant_name_label.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 9, + "sort": 8, "special": null, "translations": null, "validation": null, diff --git a/echo/directus/sync/snapshot/fields/project/conversations.json b/echo/directus/sync/snapshot/fields/project/conversations.json index ac8e81fa..c730aa94 100644 --- a/echo/directus/sync/snapshot/fields/project/conversations.json +++ b/echo/directus/sync/snapshot/fields/project/conversations.json @@ -26,7 +26,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 17, + "sort": 16, "special": [ "o2m" ], diff --git a/echo/directus/sync/snapshot/fields/project/custom_verification_topics.json b/echo/directus/sync/snapshot/fields/project/custom_verification_topics.json new file mode 100644 index 00000000..060a91dd --- /dev/null +++ b/echo/directus/sync/snapshot/fields/project/custom_verification_topics.json @@ -0,0 +1,28 @@ +{ + "collection": "project", + "field": "custom_verification_topics", + "type": "alias", + "meta": { + "collection": "project", + "conditions": null, + "display": null, + "display_options": null, + "field": "custom_verification_topics", + "group": null, + "hidden": false, + "interface": "list-o2m", + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 30, + "special": [ + "o2m" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + } +} diff --git a/echo/directus/sync/snapshot/fields/project/default_conversation_ask_for_participant_name.json b/echo/directus/sync/snapshot/fields/project/default_conversation_ask_for_participant_name.json index e8ffd19c..a587b7c0 100644 --- a/echo/directus/sync/snapshot/fields/project/default_conversation_ask_for_participant_name.json +++ b/echo/directus/sync/snapshot/fields/project/default_conversation_ask_for_participant_name.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 10, + "sort": 9, "special": [ "cast-boolean" ], diff --git a/echo/directus/sync/snapshot/fields/project/default_conversation_description.json b/echo/directus/sync/snapshot/fields/project/default_conversation_description.json index 3762f13a..aef337d2 100644 --- a/echo/directus/sync/snapshot/fields/project/default_conversation_description.json +++ b/echo/directus/sync/snapshot/fields/project/default_conversation_description.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 13, + "sort": 12, "special": null, "translations": null, "validation": null, diff --git a/echo/directus/sync/snapshot/fields/project/default_conversation_finish_text.json b/echo/directus/sync/snapshot/fields/project/default_conversation_finish_text.json index 8742fffb..bf8036e9 100644 --- a/echo/directus/sync/snapshot/fields/project/default_conversation_finish_text.json +++ b/echo/directus/sync/snapshot/fields/project/default_conversation_finish_text.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 15, + "sort": 14, "special": null, "translations": null, "validation": null, diff --git a/echo/directus/sync/snapshot/fields/project/default_conversation_title.json b/echo/directus/sync/snapshot/fields/project/default_conversation_title.json index 01dca870..1ab55d9b 100644 --- a/echo/directus/sync/snapshot/fields/project/default_conversation_title.json +++ b/echo/directus/sync/snapshot/fields/project/default_conversation_title.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 12, + "sort": 11, "special": null, "translations": null, "validation": null, diff --git a/echo/directus/sync/snapshot/fields/project/default_conversation_transcript_prompt.json b/echo/directus/sync/snapshot/fields/project/default_conversation_transcript_prompt.json index 942fdba7..04c4e66b 100644 --- a/echo/directus/sync/snapshot/fields/project/default_conversation_transcript_prompt.json +++ b/echo/directus/sync/snapshot/fields/project/default_conversation_transcript_prompt.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 14, + "sort": 13, "special": null, "translations": null, "validation": null, diff --git a/echo/directus/sync/snapshot/fields/project/default_conversation_tutorial_slug.json b/echo/directus/sync/snapshot/fields/project/default_conversation_tutorial_slug.json index 521971a8..d7303f59 100644 --- a/echo/directus/sync/snapshot/fields/project/default_conversation_tutorial_slug.json +++ b/echo/directus/sync/snapshot/fields/project/default_conversation_tutorial_slug.json @@ -31,7 +31,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 11, + "sort": 10, "special": null, "translations": null, "validation": null, diff --git a/echo/directus/sync/snapshot/fields/project/directus_user_id.json b/echo/directus/sync/snapshot/fields/project/directus_user_id.json index 785d847f..091dd5cb 100644 --- a/echo/directus/sync/snapshot/fields/project/directus_user_id.json +++ b/echo/directus/sync/snapshot/fields/project/directus_user_id.json @@ -18,7 +18,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 19, + "sort": 18, "special": [ "m2o" ], diff --git a/echo/directus/sync/snapshot/fields/project/divider-n6xep9.json b/echo/directus/sync/snapshot/fields/project/divider-n6xep9.json index 5378881d..6c00c9d6 100644 --- a/echo/directus/sync/snapshot/fields/project/divider-n6xep9.json +++ b/echo/directus/sync/snapshot/fields/project/divider-n6xep9.json @@ -19,7 +19,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 22, + "sort": 21, "special": [ "alias", "no-data" diff --git a/echo/directus/sync/snapshot/fields/project/get_reply_mode.json b/echo/directus/sync/snapshot/fields/project/get_reply_mode.json index 79ad0ce2..4ab6f721 100644 --- a/echo/directus/sync/snapshot/fields/project/get_reply_mode.json +++ b/echo/directus/sync/snapshot/fields/project/get_reply_mode.json @@ -58,7 +58,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 26, + "sort": 25, "special": null, "translations": null, "validation": null, diff --git a/echo/directus/sync/snapshot/fields/project/get_reply_prompt.json b/echo/directus/sync/snapshot/fields/project/get_reply_prompt.json index 09f02b0e..499fad69 100644 --- a/echo/directus/sync/snapshot/fields/project/get_reply_prompt.json +++ b/echo/directus/sync/snapshot/fields/project/get_reply_prompt.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 25, + "sort": 24, "special": null, "translations": null, "validation": null, diff --git a/echo/directus/sync/snapshot/fields/project/image_generation_model.json b/echo/directus/sync/snapshot/fields/project/image_generation_model.json index 42934681..33aa24e4 100644 --- a/echo/directus/sync/snapshot/fields/project/image_generation_model.json +++ b/echo/directus/sync/snapshot/fields/project/image_generation_model.json @@ -31,7 +31,7 @@ "readonly": false, "required": true, "searchable": true, - "sort": 7, + "sort": 6, "special": null, "translations": null, "validation": null, diff --git a/echo/directus/sync/snapshot/fields/project/is_conversation_allowed.json b/echo/directus/sync/snapshot/fields/project/is_conversation_allowed.json index cb028847..a8326ec7 100644 --- a/echo/directus/sync/snapshot/fields/project/is_conversation_allowed.json +++ b/echo/directus/sync/snapshot/fields/project/is_conversation_allowed.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 23, + "sort": 22, "special": [ "cast-boolean" ], diff --git a/echo/directus/sync/snapshot/fields/project/is_enhanced_audio_processing_enabled.json b/echo/directus/sync/snapshot/fields/project/is_enhanced_audio_processing_enabled.json index 22ee522e..2c5097a9 100644 --- a/echo/directus/sync/snapshot/fields/project/is_enhanced_audio_processing_enabled.json +++ b/echo/directus/sync/snapshot/fields/project/is_enhanced_audio_processing_enabled.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 27, + "sort": 26, "special": [ "cast-boolean" ], diff --git a/echo/directus/sync/snapshot/fields/project/is_get_reply_enabled.json b/echo/directus/sync/snapshot/fields/project/is_get_reply_enabled.json index fedbf32c..2d2e0ef8 100644 --- a/echo/directus/sync/snapshot/fields/project/is_get_reply_enabled.json +++ b/echo/directus/sync/snapshot/fields/project/is_get_reply_enabled.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 24, + "sort": 23, "special": [ "cast-boolean" ], diff --git a/echo/directus/sync/snapshot/fields/project/is_project_notification_subscription_allowed.json b/echo/directus/sync/snapshot/fields/project/is_project_notification_subscription_allowed.json index 3f2caba4..543b35dc 100644 --- a/echo/directus/sync/snapshot/fields/project/is_project_notification_subscription_allowed.json +++ b/echo/directus/sync/snapshot/fields/project/is_project_notification_subscription_allowed.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 28, + "sort": 27, "special": [ "cast-boolean" ], diff --git a/echo/directus/sync/snapshot/fields/project/is_verify_enabled.json b/echo/directus/sync/snapshot/fields/project/is_verify_enabled.json new file mode 100644 index 00000000..01276f01 --- /dev/null +++ b/echo/directus/sync/snapshot/fields/project/is_verify_enabled.json @@ -0,0 +1,46 @@ +{ + "collection": "project", + "field": "is_verify_enabled", + "type": "boolean", + "meta": { + "collection": "project", + "conditions": null, + "display": null, + "display_options": null, + "field": "is_verify_enabled", + "group": null, + "hidden": false, + "interface": "boolean", + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 29, + "special": [ + "cast-boolean" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "is_verify_enabled", + "table": "project", + "data_type": "boolean", + "default_value": false, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/project/processing_status.json b/echo/directus/sync/snapshot/fields/project/processing_status.json index c4b81d17..c745544a 100644 --- a/echo/directus/sync/snapshot/fields/project/processing_status.json +++ b/echo/directus/sync/snapshot/fields/project/processing_status.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 29, + "sort": 28, "special": [ "o2m" ], diff --git a/echo/directus/sync/snapshot/fields/project/project_analysis_runs.json b/echo/directus/sync/snapshot/fields/project/project_analysis_runs.json index bba605b0..3e8b6be7 100644 --- a/echo/directus/sync/snapshot/fields/project/project_analysis_runs.json +++ b/echo/directus/sync/snapshot/fields/project/project_analysis_runs.json @@ -24,7 +24,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 18, + "sort": 17, "special": [ "o2m" ], diff --git a/echo/directus/sync/snapshot/fields/project/project_chats.json b/echo/directus/sync/snapshot/fields/project/project_chats.json index 5f398a49..56d33b81 100644 --- a/echo/directus/sync/snapshot/fields/project/project_chats.json +++ b/echo/directus/sync/snapshot/fields/project/project_chats.json @@ -16,7 +16,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 20, + "sort": 19, "special": [ "o2m" ], diff --git a/echo/directus/sync/snapshot/fields/project/project_reports.json b/echo/directus/sync/snapshot/fields/project/project_reports.json index d629d15c..e6876257 100644 --- a/echo/directus/sync/snapshot/fields/project/project_reports.json +++ b/echo/directus/sync/snapshot/fields/project/project_reports.json @@ -21,7 +21,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 21, + "sort": 20, "special": [ "o2m" ], diff --git a/echo/directus/sync/snapshot/fields/project/selected_verification_key_list.json b/echo/directus/sync/snapshot/fields/project/selected_verification_key_list.json new file mode 100644 index 00000000..ef9722d6 --- /dev/null +++ b/echo/directus/sync/snapshot/fields/project/selected_verification_key_list.json @@ -0,0 +1,47 @@ +{ + "collection": "project", + "field": "selected_verification_key_list", + "type": "text", + "meta": { + "collection": "project", + "conditions": null, + "display": null, + "display_options": null, + "field": "selected_verification_key_list", + "group": null, + "hidden": false, + "interface": "input-multiline", + "note": null, + "options": { + "clear": true, + "trim": true + }, + "readonly": false, + "required": false, + "searchable": true, + "sort": 31, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "selected_verification_key_list", + "table": "project", + "data_type": "text", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/project/tags.json b/echo/directus/sync/snapshot/fields/project/tags.json index fbfeb4ae..e7163eeb 100644 --- a/echo/directus/sync/snapshot/fields/project/tags.json +++ b/echo/directus/sync/snapshot/fields/project/tags.json @@ -20,7 +20,7 @@ "readonly": false, "required": false, "searchable": true, - "sort": 16, + "sort": 15, "special": [ "o2m" ], diff --git a/echo/directus/sync/snapshot/fields/verification_topic/date_created.json b/echo/directus/sync/snapshot/fields/verification_topic/date_created.json new file mode 100644 index 00000000..eea86166 --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic/date_created.json @@ -0,0 +1,48 @@ +{ + "collection": "verification_topic", + "field": "date_created", + "type": "timestamp", + "meta": { + "collection": "verification_topic", + "conditions": null, + "display": "datetime", + "display_options": { + "relative": true + }, + "field": "date_created", + "group": null, + "hidden": true, + "interface": "datetime", + "note": null, + "options": null, + "readonly": true, + "required": false, + "searchable": true, + "sort": 4, + "special": [ + "date-created" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "half" + }, + "schema": { + "name": "date_created", + "table": "verification_topic", + "data_type": "timestamp with time zone", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/verification_topic/date_updated.json b/echo/directus/sync/snapshot/fields/verification_topic/date_updated.json new file mode 100644 index 00000000..a9770c0e --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic/date_updated.json @@ -0,0 +1,48 @@ +{ + "collection": "verification_topic", + "field": "date_updated", + "type": "timestamp", + "meta": { + "collection": "verification_topic", + "conditions": null, + "display": "datetime", + "display_options": { + "relative": true + }, + "field": "date_updated", + "group": null, + "hidden": true, + "interface": "datetime", + "note": null, + "options": null, + "readonly": true, + "required": false, + "searchable": true, + "sort": 6, + "special": [ + "date-updated" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "half" + }, + "schema": { + "name": "date_updated", + "table": "verification_topic", + "data_type": "timestamp with time zone", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/verification_topic/icon.json b/echo/directus/sync/snapshot/fields/verification_topic/icon.json new file mode 100644 index 00000000..96f57a24 --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic/icon.json @@ -0,0 +1,44 @@ +{ + "collection": "verification_topic", + "field": "icon", + "type": "string", + "meta": { + "collection": "verification_topic", + "conditions": null, + "display": null, + "display_options": null, + "field": "icon", + "group": null, + "hidden": false, + "interface": "input", + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 10, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "icon", + "table": "verification_topic", + "data_type": "character varying", + "default_value": null, + "max_length": 255, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/verification_topic/key.json b/echo/directus/sync/snapshot/fields/verification_topic/key.json new file mode 100644 index 00000000..79dd4e5b --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic/key.json @@ -0,0 +1,44 @@ +{ + "collection": "verification_topic", + "field": "key", + "type": "string", + "meta": { + "collection": "verification_topic", + "conditions": null, + "display": null, + "display_options": null, + "field": "key", + "group": null, + "hidden": false, + "interface": "input", + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 1, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "key", + "table": "verification_topic", + "data_type": "character varying", + "default_value": null, + "max_length": 255, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": false, + "is_unique": true, + "is_indexed": false, + "is_primary_key": true, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/verification_topic/project_id.json b/echo/directus/sync/snapshot/fields/verification_topic/project_id.json new file mode 100644 index 00000000..80691c1f --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic/project_id.json @@ -0,0 +1,49 @@ +{ + "collection": "verification_topic", + "field": "project_id", + "type": "uuid", + "meta": { + "collection": "verification_topic", + "conditions": null, + "display": null, + "display_options": null, + "field": "project_id", + "group": null, + "hidden": false, + "interface": "select-dropdown-m2o", + "note": null, + "options": { + "enableLink": true, + "template": "{{name}}" + }, + "readonly": false, + "required": false, + "searchable": true, + "sort": 7, + "special": [ + "m2o" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "project_id", + "table": "verification_topic", + "data_type": "uuid", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": "project", + "foreign_key_column": "id" + } +} diff --git a/echo/directus/sync/snapshot/fields/verification_topic/prompt.json b/echo/directus/sync/snapshot/fields/verification_topic/prompt.json new file mode 100644 index 00000000..2885317c --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic/prompt.json @@ -0,0 +1,44 @@ +{ + "collection": "verification_topic", + "field": "prompt", + "type": "text", + "meta": { + "collection": "verification_topic", + "conditions": null, + "display": null, + "display_options": null, + "field": "prompt", + "group": null, + "hidden": false, + "interface": "input-rich-text-md", + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 9, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "prompt", + "table": "verification_topic", + "data_type": "text", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/verification_topic/sort.json b/echo/directus/sync/snapshot/fields/verification_topic/sort.json new file mode 100644 index 00000000..ad281b42 --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic/sort.json @@ -0,0 +1,44 @@ +{ + "collection": "verification_topic", + "field": "sort", + "type": "integer", + "meta": { + "collection": "verification_topic", + "conditions": null, + "display": null, + "display_options": null, + "field": "sort", + "group": null, + "hidden": true, + "interface": "input", + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 2, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "sort", + "table": "verification_topic", + "data_type": "integer", + "default_value": null, + "max_length": null, + "numeric_precision": 32, + "numeric_scale": 0, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/verification_topic/translations.json b/echo/directus/sync/snapshot/fields/verification_topic/translations.json new file mode 100644 index 00000000..cccf3aa3 --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic/translations.json @@ -0,0 +1,31 @@ +{ + "collection": "verification_topic", + "field": "translations", + "type": "alias", + "meta": { + "collection": "verification_topic", + "conditions": null, + "display": null, + "display_options": null, + "field": "translations", + "group": null, + "hidden": false, + "interface": "translations", + "note": null, + "options": { + "defaultOpenSplitView": true, + "languageField": "code" + }, + "readonly": false, + "required": false, + "searchable": true, + "sort": 8, + "special": [ + "translations" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + } +} diff --git a/echo/directus/sync/snapshot/fields/verification_topic/user_created.json b/echo/directus/sync/snapshot/fields/verification_topic/user_created.json new file mode 100644 index 00000000..4c02a796 --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic/user_created.json @@ -0,0 +1,48 @@ +{ + "collection": "verification_topic", + "field": "user_created", + "type": "uuid", + "meta": { + "collection": "verification_topic", + "conditions": null, + "display": "user", + "display_options": null, + "field": "user_created", + "group": null, + "hidden": true, + "interface": "select-dropdown-m2o", + "note": null, + "options": { + "template": "{{avatar}} {{first_name}} {{last_name}}" + }, + "readonly": true, + "required": false, + "searchable": true, + "sort": 3, + "special": [ + "user-created" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "half" + }, + "schema": { + "name": "user_created", + "table": "verification_topic", + "data_type": "uuid", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": "directus_users", + "foreign_key_column": "id" + } +} diff --git a/echo/directus/sync/snapshot/fields/verification_topic/user_updated.json b/echo/directus/sync/snapshot/fields/verification_topic/user_updated.json new file mode 100644 index 00000000..2234b2fe --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic/user_updated.json @@ -0,0 +1,48 @@ +{ + "collection": "verification_topic", + "field": "user_updated", + "type": "uuid", + "meta": { + "collection": "verification_topic", + "conditions": null, + "display": "user", + "display_options": null, + "field": "user_updated", + "group": null, + "hidden": true, + "interface": "select-dropdown-m2o", + "note": null, + "options": { + "template": "{{avatar}} {{first_name}} {{last_name}}" + }, + "readonly": true, + "required": false, + "searchable": true, + "sort": 5, + "special": [ + "user-updated" + ], + "translations": null, + "validation": null, + "validation_message": null, + "width": "half" + }, + "schema": { + "name": "user_updated", + "table": "verification_topic", + "data_type": "uuid", + "default_value": null, + "max_length": null, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": "directus_users", + "foreign_key_column": "id" + } +} diff --git a/echo/directus/sync/snapshot/fields/verification_topic_translations/id.json b/echo/directus/sync/snapshot/fields/verification_topic_translations/id.json new file mode 100644 index 00000000..88dddab2 --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic_translations/id.json @@ -0,0 +1,44 @@ +{ + "collection": "verification_topic_translations", + "field": "id", + "type": "integer", + "meta": { + "collection": "verification_topic_translations", + "conditions": null, + "display": null, + "display_options": null, + "field": "id", + "group": null, + "hidden": true, + "interface": null, + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 1, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "id", + "table": "verification_topic_translations", + "data_type": "integer", + "default_value": "nextval('verification_topic_translations_id_seq'::regclass)", + "max_length": null, + "numeric_precision": 32, + "numeric_scale": 0, + "is_nullable": false, + "is_unique": true, + "is_indexed": false, + "is_primary_key": true, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": true, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/verification_topic_translations/label.json b/echo/directus/sync/snapshot/fields/verification_topic_translations/label.json new file mode 100644 index 00000000..a07be26f --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic_translations/label.json @@ -0,0 +1,44 @@ +{ + "collection": "verification_topic_translations", + "field": "label", + "type": "string", + "meta": { + "collection": "verification_topic_translations", + "conditions": null, + "display": null, + "display_options": null, + "field": "label", + "group": null, + "hidden": false, + "interface": "input", + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 4, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "label", + "table": "verification_topic_translations", + "data_type": "character varying", + "default_value": null, + "max_length": 255, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": null, + "foreign_key_column": null + } +} diff --git a/echo/directus/sync/snapshot/fields/verification_topic_translations/languages_code.json b/echo/directus/sync/snapshot/fields/verification_topic_translations/languages_code.json new file mode 100644 index 00000000..69cec47c --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic_translations/languages_code.json @@ -0,0 +1,44 @@ +{ + "collection": "verification_topic_translations", + "field": "languages_code", + "type": "string", + "meta": { + "collection": "verification_topic_translations", + "conditions": null, + "display": null, + "display_options": null, + "field": "languages_code", + "group": null, + "hidden": true, + "interface": null, + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 3, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "languages_code", + "table": "verification_topic_translations", + "data_type": "character varying", + "default_value": null, + "max_length": 255, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": "languages", + "foreign_key_column": "code" + } +} diff --git a/echo/directus/sync/snapshot/fields/verification_topic_translations/verification_topic_key.json b/echo/directus/sync/snapshot/fields/verification_topic_translations/verification_topic_key.json new file mode 100644 index 00000000..7c418542 --- /dev/null +++ b/echo/directus/sync/snapshot/fields/verification_topic_translations/verification_topic_key.json @@ -0,0 +1,44 @@ +{ + "collection": "verification_topic_translations", + "field": "verification_topic_key", + "type": "string", + "meta": { + "collection": "verification_topic_translations", + "conditions": null, + "display": null, + "display_options": null, + "field": "verification_topic_key", + "group": null, + "hidden": true, + "interface": null, + "note": null, + "options": null, + "readonly": false, + "required": false, + "searchable": true, + "sort": 2, + "special": null, + "translations": null, + "validation": null, + "validation_message": null, + "width": "full" + }, + "schema": { + "name": "verification_topic_key", + "table": "verification_topic_translations", + "data_type": "character varying", + "default_value": null, + "max_length": 255, + "numeric_precision": null, + "numeric_scale": null, + "is_nullable": true, + "is_unique": false, + "is_indexed": false, + "is_primary_key": false, + "is_generated": false, + "generation_expression": null, + "has_auto_increment": false, + "foreign_key_table": "verification_topic", + "foreign_key_column": "key" + } +} diff --git a/echo/directus/sync/snapshot/relations/conversation_artifact/conversation_id.json b/echo/directus/sync/snapshot/relations/conversation_artifact/conversation_id.json new file mode 100644 index 00000000..7ecaa7c1 --- /dev/null +++ b/echo/directus/sync/snapshot/relations/conversation_artifact/conversation_id.json @@ -0,0 +1,25 @@ +{ + "collection": "conversation_artifact", + "field": "conversation_id", + "related_collection": "conversation", + "meta": { + "junction_field": null, + "many_collection": "conversation_artifact", + "many_field": "conversation_id", + "one_allowed_collections": null, + "one_collection": "conversation", + "one_collection_field": null, + "one_deselect_action": "nullify", + "one_field": "conversation_artifacts", + "sort_field": null + }, + "schema": { + "table": "conversation_artifact", + "column": "conversation_id", + "foreign_key_table": "conversation", + "foreign_key_column": "id", + "constraint_name": "conversation_artifact_conversation_id_foreign", + "on_update": "NO ACTION", + "on_delete": "CASCADE" + } +} diff --git a/echo/directus/sync/snapshot/relations/conversation_artifact/user_created.json b/echo/directus/sync/snapshot/relations/conversation_artifact/user_created.json new file mode 100644 index 00000000..15df66eb --- /dev/null +++ b/echo/directus/sync/snapshot/relations/conversation_artifact/user_created.json @@ -0,0 +1,25 @@ +{ + "collection": "conversation_artifact", + "field": "user_created", + "related_collection": "directus_users", + "meta": { + "junction_field": null, + "many_collection": "conversation_artifact", + "many_field": "user_created", + "one_allowed_collections": null, + "one_collection": "directus_users", + "one_collection_field": null, + "one_deselect_action": "nullify", + "one_field": null, + "sort_field": null + }, + "schema": { + "table": "conversation_artifact", + "column": "user_created", + "foreign_key_table": "directus_users", + "foreign_key_column": "id", + "constraint_name": "conversation_artifact_user_created_foreign", + "on_update": "NO ACTION", + "on_delete": "NO ACTION" + } +} diff --git a/echo/directus/sync/snapshot/relations/conversation_artifact/user_updated.json b/echo/directus/sync/snapshot/relations/conversation_artifact/user_updated.json new file mode 100644 index 00000000..cb252122 --- /dev/null +++ b/echo/directus/sync/snapshot/relations/conversation_artifact/user_updated.json @@ -0,0 +1,25 @@ +{ + "collection": "conversation_artifact", + "field": "user_updated", + "related_collection": "directus_users", + "meta": { + "junction_field": null, + "many_collection": "conversation_artifact", + "many_field": "user_updated", + "one_allowed_collections": null, + "one_collection": "directus_users", + "one_collection_field": null, + "one_deselect_action": "nullify", + "one_field": null, + "sort_field": null + }, + "schema": { + "table": "conversation_artifact", + "column": "user_updated", + "foreign_key_table": "directus_users", + "foreign_key_column": "id", + "constraint_name": "conversation_artifact_user_updated_foreign", + "on_update": "NO ACTION", + "on_delete": "NO ACTION" + } +} diff --git a/echo/directus/sync/snapshot/relations/verification_topic/project_id.json b/echo/directus/sync/snapshot/relations/verification_topic/project_id.json new file mode 100644 index 00000000..58ce24ba --- /dev/null +++ b/echo/directus/sync/snapshot/relations/verification_topic/project_id.json @@ -0,0 +1,25 @@ +{ + "collection": "verification_topic", + "field": "project_id", + "related_collection": "project", + "meta": { + "junction_field": null, + "many_collection": "verification_topic", + "many_field": "project_id", + "one_allowed_collections": null, + "one_collection": "project", + "one_collection_field": null, + "one_deselect_action": "nullify", + "one_field": "custom_verification_topics", + "sort_field": null + }, + "schema": { + "table": "verification_topic", + "column": "project_id", + "foreign_key_table": "project", + "foreign_key_column": "id", + "constraint_name": "verification_topic_project_id_foreign", + "on_update": "NO ACTION", + "on_delete": "SET NULL" + } +} diff --git a/echo/directus/sync/snapshot/relations/verification_topic/user_created.json b/echo/directus/sync/snapshot/relations/verification_topic/user_created.json new file mode 100644 index 00000000..96893ca8 --- /dev/null +++ b/echo/directus/sync/snapshot/relations/verification_topic/user_created.json @@ -0,0 +1,25 @@ +{ + "collection": "verification_topic", + "field": "user_created", + "related_collection": "directus_users", + "meta": { + "junction_field": null, + "many_collection": "verification_topic", + "many_field": "user_created", + "one_allowed_collections": null, + "one_collection": "directus_users", + "one_collection_field": null, + "one_deselect_action": "nullify", + "one_field": null, + "sort_field": null + }, + "schema": { + "table": "verification_topic", + "column": "user_created", + "foreign_key_table": "directus_users", + "foreign_key_column": "id", + "constraint_name": "verification_topic_user_created_foreign", + "on_update": "NO ACTION", + "on_delete": "NO ACTION" + } +} diff --git a/echo/directus/sync/snapshot/relations/verification_topic/user_updated.json b/echo/directus/sync/snapshot/relations/verification_topic/user_updated.json new file mode 100644 index 00000000..d42c5d06 --- /dev/null +++ b/echo/directus/sync/snapshot/relations/verification_topic/user_updated.json @@ -0,0 +1,25 @@ +{ + "collection": "verification_topic", + "field": "user_updated", + "related_collection": "directus_users", + "meta": { + "junction_field": null, + "many_collection": "verification_topic", + "many_field": "user_updated", + "one_allowed_collections": null, + "one_collection": "directus_users", + "one_collection_field": null, + "one_deselect_action": "nullify", + "one_field": null, + "sort_field": null + }, + "schema": { + "table": "verification_topic", + "column": "user_updated", + "foreign_key_table": "directus_users", + "foreign_key_column": "id", + "constraint_name": "verification_topic_user_updated_foreign", + "on_update": "NO ACTION", + "on_delete": "NO ACTION" + } +} diff --git a/echo/directus/sync/snapshot/relations/verification_topic_translations/languages_code.json b/echo/directus/sync/snapshot/relations/verification_topic_translations/languages_code.json new file mode 100644 index 00000000..4022e5dc --- /dev/null +++ b/echo/directus/sync/snapshot/relations/verification_topic_translations/languages_code.json @@ -0,0 +1,25 @@ +{ + "collection": "verification_topic_translations", + "field": "languages_code", + "related_collection": "languages", + "meta": { + "junction_field": "verification_topic_key", + "many_collection": "verification_topic_translations", + "many_field": "languages_code", + "one_allowed_collections": null, + "one_collection": "languages", + "one_collection_field": null, + "one_deselect_action": "nullify", + "one_field": null, + "sort_field": null + }, + "schema": { + "table": "verification_topic_translations", + "column": "languages_code", + "foreign_key_table": "languages", + "foreign_key_column": "code", + "constraint_name": "verification_topic_translations_languages_code_foreign", + "on_update": "NO ACTION", + "on_delete": "SET NULL" + } +} diff --git a/echo/directus/sync/snapshot/relations/verification_topic_translations/verification_topic_key.json b/echo/directus/sync/snapshot/relations/verification_topic_translations/verification_topic_key.json new file mode 100644 index 00000000..17e1e3f3 --- /dev/null +++ b/echo/directus/sync/snapshot/relations/verification_topic_translations/verification_topic_key.json @@ -0,0 +1,25 @@ +{ + "collection": "verification_topic_translations", + "field": "verification_topic_key", + "related_collection": "verification_topic", + "meta": { + "junction_field": "languages_code", + "many_collection": "verification_topic_translations", + "many_field": "verification_topic_key", + "one_allowed_collections": null, + "one_collection": "verification_topic", + "one_collection_field": null, + "one_deselect_action": "nullify", + "one_field": "translations", + "sort_field": null + }, + "schema": { + "table": "verification_topic_translations", + "column": "verification_topic_key", + "foreign_key_table": "verification_topic", + "foreign_key_column": "key", + "constraint_name": "verification_topic_translations_verificati__34868e89_foreign", + "on_update": "NO ACTION", + "on_delete": "SET NULL" + } +} diff --git a/echo/server/AGENTS.md b/echo/server/AGENTS.md new file mode 100644 index 00000000..3cb782e2 --- /dev/null +++ b/echo/server/AGENTS.md @@ -0,0 +1,47 @@ +Last updated: 2025-11-07T08:32:55Z + +# Project Snapshot +- Dembrane ECHO server exposes a FastAPI app (`dembrane.main:app`) with async-heavy LightRAG integrations. +- Python 3.11 required; dependencies managed through `pyproject.toml` with `uv` as the package/runtime tool. +- Background work uses Dramatiq (network + cpu queues) and a scheduler module for periodic tasks. + +# Build & Run +- Development API: `uv run uvicorn dembrane.main:app --port 8000 --reload --loop asyncio` +- Development scheduler: `uv run python -m dembrane.scheduler` +- Development workers: + - Network: `uv run dramatiq-gevent --watch ./dembrane --queues network --processes 2 --threads 1 dembrane.tasks` + - CPU: `uv run dramatiq --watch ./dembrane --queues cpu --processes 1 --threads 2 dembrane.tasks` +- Production API: `gunicorn dembrane.main:app --worker-class dembrane.lightrag_uvicorn_worker.LightRagUvicornWorker ...` + - Uses env vars `API_WORKERS`, `API_WORKER_TIMEOUT`, `API_WORKER_MAX_REQUESTS` +- Production workers: + - Network: `dramatiq-gevent --queues network --processes $PROCESSES --threads $THREADS dembrane.tasks` + - CPU: `dramatiq --queues cpu --processes $PROCESSES --threads $THREADS --watch . --watch-use-polling dembrane.tasks` +- Production scheduler: `python -m dembrane.scheduler` + +# Repeating Patterns +- `uv run` wraps all local entry points (uvicorn, python modules, dramatiq runners) to ensure env + dependencies stay consistent. Prefer this manager whenever spawning dev services. +- For API handlers, favor Directus queries over raw SQLAlchemy sessions when reading project/conversation data to keep behavior consistent with the admin console. + +# Change Hotspots (last 90 days) +- High-churn (watch for conflicts): `echo/server/dembrane/tasks.py`, `echo/server/dembrane/config.py`, `echo/server/dembrane/transcribe.py`, `echo/server/pyproject.toml` +- Slow movers (risk of stale assumptions): CI workflow YAMLs under `.github/workflows/`, `contributors.yml`, and `echo-user-docs` backups. + +# TODO / FIXME / HACK Inventory +- `dembrane/config.py:5` – Refactor messy config handling; consider YAML-based management. +- `dembrane/embedding.py:8` – Replace placeholder embeddings with Dembrane implementation. +- `dembrane/sentry.py:47` – Complete Sentry integration per docs. +- `dembrane/tasks.py:72` – Remove SSL bypass once proper certificate/VPC isolation exists. +- `dembrane/tasks.py:342` – Fetch contextual transcripts for previous segments. +- `dembrane/tasks.py:525` – Respect `use_pii_redaction` flag when available. +- `dembrane/tasks.py:724` – Handle RunPod error class explicitly. +- `dembrane/quote_utils.py:118/272/289` – Link quotes to chunks; fix sampling algorithm; adjust context limit math. +- `dembrane/service/conversation.py:101` – Validate `project_tag_id_list`. +- `dembrane/transcribe.py:179` – Replace polling with webhook approach. +- `dembrane/api/chat.py` – Multiple TODOs: fill module stub, add RAG shortcut when quotes exist, implement Directus project fetch, conversation endpoint completion, admin auth checks. +- `dembrane/api/participant.py:76` – Remove unused `pin`. + +# Gotchas & Notes +- Gunicorn uses custom LightRAG uvicorn worker; avoid uvloop to keep LightRAG compatible. +- CPU Dramatiq worker deliberately single-threaded to dodge LightRAG locking issues—respect `THREADS=1` guidance in prod. +- Watching directories (`--watch`, `--watch-use-polling`) adds overhead; keep file changes minimal when workers run locally. +- S3 audio paths used in verification/transcription flows should be loaded via the shared file service (`_get_audio_file_object`) so Gemini always receives fresh bytes—signed URLs may expire mid-request. diff --git a/echo/server/dembrane/api/api.py b/echo/server/dembrane/api/api.py index cc703463..8d2d491a 100644 --- a/echo/server/dembrane/api/api.py +++ b/echo/server/dembrane/api/api.py @@ -10,6 +10,7 @@ from dembrane.api.stateless import StatelessRouter from dembrane.api.participant import ParticipantRouter from dembrane.api.conversation import ConversationRouter +from dembrane.api.verify import VerifyRouter logger = getLogger("api") @@ -27,3 +28,4 @@ async def health() -> dict: api.include_router(ParticipantRouter, prefix="/participant") api.include_router(ConversationRouter, prefix="/conversations") api.include_router(StatelessRouter, prefix="/stateless") +api.include_router(VerifyRouter, prefix="/verify") diff --git a/echo/server/dembrane/api/verify.py b/echo/server/dembrane/api/verify.py new file mode 100644 index 00000000..22573a53 --- /dev/null +++ b/echo/server/dembrane/api/verify.py @@ -0,0 +1,633 @@ +from __future__ import annotations + +import logging +from typing import Any, Dict, List, Optional +from datetime import datetime + +import litellm +from fastapi import APIRouter, HTTPException +from pydantic import Field, BaseModel, validator + +from dembrane.utils import generate_uuid +from dembrane.config import GCP_SA_JSON +from dembrane.prompts import render_prompt +from dembrane.directus import directus +from dembrane.transcribe import _get_audio_file_object +from dembrane.async_helpers import run_in_thread_pool +from dembrane.api.exceptions import ProjectNotFoundException, ConversationNotFoundException +from dembrane.api.dependency_auth import DependencyDirectusSession + +logger = logging.getLogger("api.verify") + +VerifyRouter = APIRouter(tags=["verify"]) + +DEFAULT_LANG = "en-US" + + +class VerificationTopicSeed(BaseModel): + key: str + prompt: str + icon: str + label: str + sort: int + + +DEFAULT_VERIFICATION_TOPICS: List[VerificationTopicSeed] = [ + VerificationTopicSeed( + key="agreements", + icon=":white_check_mark:", + label="What we actually agreed on", + sort=1, + prompt=( + "Extract the concrete agreements and shared understandings from this conversation. " + "Focus on points where multiple participants explicitly or implicitly aligned. " + "Include both major decisions and small points of consensus. Present these as clear, " + "unambiguous statements that all participants would recognize as accurate. Distinguish " + "between firm agreements and tentative consensus. If participants used different words " + "to express the same idea, synthesize into shared language. Format as a living document " + "of mutual understanding. Output character should be diplomatic but precise, like meeting " + "minutes with soul." + ), + ), + VerificationTopicSeed( + key="gems", + icon=":mag:", + label="Hidden gems", + sort=2, + prompt=( + "Identify the valuable insights that emerged unexpectedly or were mentioned briefly but " + "contain significant potential. Look for: throwaway comments that solve problems, questions " + "that reframe the entire discussion, metaphors that clarify complex ideas, connections between " + "seemingly unrelated points, and wisdom hiding in personal anecdotes. Present these as discoveries " + "worth preserving, explaining why each gem matters. These are the insights people might forget but " + "shouldn't. Output character should be excited and precise." + ), + ), + VerificationTopicSeed( + key="truths", + icon=":eyes:", + label="Painful truths", + sort=3, + prompt=( + "Surface the uncomfortable realities acknowledged in this conversation - the elephants in the room that " + "got named, the difficult facts accepted, the challenging feedback given or received. Include systemic " + "problems identified, personal blind spots revealed, and market realities confronted. Present these with " + "compassion but without sugar-coating. Frame them as shared recognitions that took courage to voice. " + "These truths are painful but necessary for genuine progress. Output character should be gentle but " + "unflinching." + ), + ), + VerificationTopicSeed( + key="moments", + icon=":rocket:", + label="Breakthrough moments", + sort=4, + prompt=( + "Capture the moments when thinking shifted, new possibilities emerged, or collective understanding jumped " + "to a new level. Identify: sudden realizations, creative solutions, perspective shifts, moments when " + "complexity became simple, and ideas that energized the group. Show both the breakthrough itself and what " + "made it possible. These are the moments when the conversation transcended its starting point. Output " + "character should be energetic and forward-looking." + ), + ), + VerificationTopicSeed( + key="actions", + icon=":arrow_upper_right:", + label="What we think should happen", + sort=5, + prompt=( + "Synthesize the group's emerging sense of direction and next steps. Include: explicit recommendations made, " + "implicit preferences expressed, priorities that emerged through discussion, and logical next actions even " + "if not explicitly stated. Distinguish between unanimous direction and majority leanings. Present as " + "provisional navigation rather than fixed commands. This is the group's best current thinking about the " + "path forward. Output character should be pragmatic but inspirational." + ), + ), + VerificationTopicSeed( + key="disagreements", + icon=":warning:", + label="Moments we agreed to disagree", + sort=6, + prompt=( + "Document the points of productive tension where different perspectives remained distinct but respected. " + "Include: fundamental differences in approach, varying priorities, different risk tolerances, and contrasting " + "interpretations of data. Frame these not as failures to agree but as valuable diversity of thought. Show how " + "each perspective has merit. These disagreements are features, not bugs - they prevent premature convergence " + "and keep important tensions alive. Output character should be respectful and balanced." + ), + ), +] + + +class VerificationTopicTranslation(BaseModel): + label: str + + +class VerificationTopicMetadata(BaseModel): + key: str + prompt: Optional[str] = None + icon: Optional[str] = None + sort: Optional[int] = None + translations: Dict[str, VerificationTopicTranslation] = Field(default_factory=dict) + + +class GetVerificationTopicsResponse(BaseModel): + selected_topics: List[str] + available_topics: List[VerificationTopicMetadata] + + +class GenerateArtifactsRequest(BaseModel): + topic_list: List[str] = Field(..., min_items=1) + conversation_id: str + + @validator("topic_list") + def validate_topic_list(cls, value: List[str]) -> List[str]: + cleaned = [item.strip() for item in value if item and item.strip()] + if not cleaned: + raise ValueError("topic_list must contain at least one topic key") + return cleaned + + +class ConversationArtifactResponse(BaseModel): + id: str + key: Optional[str] = None + content: str + conversation_id: str + approved_at: Optional[str] = None + read_aloud_stream_url: str + + +class GenerateArtifactsResponse(BaseModel): + artifact_list: List[ConversationArtifactResponse] + + +def _parse_directus_datetime(value: Optional[str]) -> Optional[datetime]: + if value is None: + return None + try: + # Directus returns ISO strings that may end with 'Z' + return datetime.fromisoformat(value.replace("Z", "+00:00")) + except ValueError: + logger.warning("Unable to parse datetime value '%s'", value) + return None + + +async def seed_default_verification_topics() -> None: + """ + Ensure that the canonical verification topics exist in Directus. + """ + + for topic in DEFAULT_VERIFICATION_TOPICS: + existing = await run_in_thread_pool( + directus.get_items, + "verification_topic", + { + "query": { + "filter": { + "key": {"_eq": topic.key}, + "project_id": {"_null": True}, + }, + "fields": ["key"], + "limit": 1, + } + }, + ) + + if existing: + continue + + logger.info("Seeding verification topic '%s'", topic.key) + translations_payload = [ + { + "languages_code": DEFAULT_LANG, + "label": topic.label, + } + ] + + await run_in_thread_pool( + directus.create_item, + "verification_topic", + item_data={ + "key": topic.key, + "prompt": topic.prompt, + "icon": topic.icon, + "sort": topic.sort, + "translations": { + "create": translations_payload, + }, + }, + ) + + +async def _get_project(project_id: str) -> dict: + project_rows = await run_in_thread_pool( + directus.get_items, + "project", + { + "query": { + "filter": {"id": {"_eq": project_id}}, + "fields": [ + "id", + "is_verify_enabled", + "selected_verification_key_list", + "language", + "name", + ], + } + }, + ) + + if not project_rows: + raise ProjectNotFoundException + + project = project_rows[0] + if not project.get("is_verify_enabled", False): + raise HTTPException(status_code=403, detail="Verify is not enabled for this project") + + return project + + +async def _get_verification_topics_for_project(project_id: str) -> List[VerificationTopicMetadata]: + topic_rows = await run_in_thread_pool( + directus.get_items, + "verification_topic", + { + "query": { + "filter": { + "_or": [ + {"project_id": {"_null": True}}, + {"project_id": {"_eq": project_id}}, + ] + }, + "fields": [ + "key", + "prompt", + "icon", + "sort", + "project_id", + "translations.languages_code", + "translations.label", + ], + "limit": -1, + "sort": ["sort", "key"], + } + }, + ) + + topics: List[VerificationTopicMetadata] = [] + for raw_topic in topic_rows: + translations_map: Dict[str, VerificationTopicTranslation] = {} + for translation in raw_topic.get("translations", []) or []: + code = translation.get("languages_code") + label = translation.get("label") + if code and label: + translations_map[code] = VerificationTopicTranslation(label=label) + + topics.append( + VerificationTopicMetadata( + key=raw_topic.get("key"), + prompt=raw_topic.get("prompt"), + icon=raw_topic.get("icon"), + sort=raw_topic.get("sort"), + translations=translations_map, + ) + ) + + topics.sort(key=lambda topic: (topic.sort or 0, topic.key)) + return topics + + +def _parse_selected_topics( + raw_value: Optional[str], all_topics: List[VerificationTopicMetadata] +) -> List[str]: + if raw_value: + selected = [topic_key.strip() for topic_key in raw_value.split(",") if topic_key.strip()] + else: + selected = [] + + available_keys = {topic.key for topic in all_topics if topic.key} + filtered = [key for key in selected if key in available_keys] + + if filtered: + return filtered + return [topic.key for topic in all_topics if topic.key] + + +@VerifyRouter.get("/topics/{project_id}", response_model=GetVerificationTopicsResponse) +async def get_verification_topics( + project_id: str, + auth: DependencyDirectusSession, # noqa: ARG001 - reserved for future use +) -> GetVerificationTopicsResponse: + project = await _get_project(project_id) + topics = await _get_verification_topics_for_project(project_id) + selected_topics = _parse_selected_topics(project.get("selected_verification_key_list"), topics) + + return GetVerificationTopicsResponse(selected_topics=selected_topics, available_topics=topics) + + +async def _get_conversation_with_project(conversation_id: str) -> dict: + conversation_rows = await run_in_thread_pool( + directus.get_items, + "conversation", + { + "query": { + "filter": {"id": {"_eq": conversation_id}}, + "fields": [ + "id", + "participant_name", + "participant_email", + "project_id.id", + "project_id.language", + "project_id.name", + "project_id.is_verify_enabled", + ], + } + }, + ) + + if not conversation_rows: + raise ConversationNotFoundException + + conversation = conversation_rows[0] + project = conversation.get("project_id") or {} + if not project.get("is_verify_enabled", False): + raise HTTPException(status_code=403, detail="Verify is not enabled for this project") + + return conversation + + +async def _get_conversation_artifacts(conversation_id: str) -> List[dict]: + artifacts = await run_in_thread_pool( + directus.get_items, + "conversation_artifact", + { + "query": { + "filter": {"conversation_id": {"_eq": conversation_id}}, + "fields": [ + "id", + "key", + "content", + "date_created", + "approved_at", + "read_aloud_stream_url", + ], + "limit": -1, + "sort": ["date_created"], + } + }, + ) + return artifacts or [] + + +def _format_previous_artifacts(artifacts: List[dict]) -> str: + if not artifacts: + return "Previous artifacts: None\n" + + lines = ["Previous artifacts:"] + for artifact in artifacts: + created = artifact.get("date_created") or "unknown" + key = artifact.get("key") or "unknown key" + content = artifact.get("content") or "" + lines.append(f"- [{created}] ({key}) {content}") + lines.append("") + return "\n".join(lines) + + +async def _get_conversation_chunks(conversation_id: str) -> List[dict]: + chunk_rows = await run_in_thread_pool( + directus.get_items, + "conversation_chunk", + { + "query": { + "filter": {"conversation_id": {"_eq": conversation_id}}, + "fields": ["id", "timestamp", "transcript", "path"], + "sort": "timestamp", + "limit": 1500, + } + }, + ) + + chunks: List[dict] = [] + for row in chunk_rows or []: + chunks.append( + { + "id": row.get("id"), + "timestamp": _parse_directus_datetime(row.get("timestamp")), + "transcript": row.get("transcript"), + "path": row.get("path"), + } + ) + + return chunks + + +def _build_transcript_text(chunks: List[dict]) -> str: + transcripts: List[str] = [] + for chunk in chunks: + transcript = (chunk.get("transcript") or "").strip() + if transcript: + transcripts.append(transcript) + return "\n".join(transcripts) + + +def _select_audio_chunks( + chunks: List[dict], + last_artifact_time: Optional[datetime], +) -> List[dict]: + selected: List[dict] = [] + for chunk in chunks: + transcript = (chunk.get("transcript") or "").strip() + if transcript: + continue + + timestamp = chunk.get("timestamp") + if last_artifact_time and isinstance(timestamp, datetime): + if timestamp <= last_artifact_time: + continue + + if chunk.get("path"): + selected.append(chunk) + + return selected + + +def _format_audio_summary(audio_chunks: List[dict]) -> str: + if not audio_chunks: + return "Audio attachments: None." + + lines = ["Audio attachments for chunks without transcripts after the last artifact:"] + for chunk in audio_chunks: + timestamp = chunk.get("timestamp") + ts_value = timestamp.isoformat() if isinstance(timestamp, datetime) else "unknown" + lines.append(f"- chunk_id={chunk.get('id')} timestamp={ts_value}") + return "\n".join(lines) + + +def _build_user_message_content( + conversation: dict, + artifacts: List[dict], + transcript_text: str, + audio_summary: str, +) -> str: + project = conversation.get("project_id") or {} + parts = [ + f"Project: {project.get('name') or project.get('id')}", + f"Conversation ID: {conversation.get('id')}", + ] + + participant_name = conversation.get("participant_name") + if participant_name: + parts.append(f"Participant name: {participant_name}") + participant_email = conversation.get("participant_email") + if participant_email: + parts.append(f"Participant email: {participant_email}") + + parts.append("") # spacer + parts.append(_format_previous_artifacts(artifacts)) + parts.append("Conversation transcript:") + if transcript_text: + parts.append(transcript_text) + else: + parts.append("No transcript available.") + parts.append("") + parts.append(audio_summary) + return "\n".join(parts) + + +def _extract_response_text(response: Any) -> str: + """ + Normalize LiteLLM response content into a plain string. + """ + choice = response.choices[0].message + content = choice.get("content") + if isinstance(content, str): + return content + if isinstance(content, list): + texts = [item.get("text", "") for item in content if isinstance(item, dict)] + return "\n".join(filter(None, texts)) + raise ValueError("Unexpected response format from completion call") + + +async def _create_conversation_artifact( + conversation_id: str, + key: str, + content: str, +) -> dict: + artifact_payload = { + "id": generate_uuid(), + "conversation_id": conversation_id, + "key": key, + "content": content, + "read_aloud_stream_url": "", + } + + created = await run_in_thread_pool( + directus.create_item, + "conversation_artifact", + item_data=artifact_payload, + ) + return created.get("data", artifact_payload) + + +@VerifyRouter.post("/generate", response_model=GenerateArtifactsResponse) +async def generate_verification_artifacts( + body: GenerateArtifactsRequest, + auth: DependencyDirectusSession, # noqa: ARG001 - reserved for future use +) -> GenerateArtifactsResponse: + if not GCP_SA_JSON: + raise HTTPException(status_code=500, detail="GCP credentials are not configured") + + conversation = await _get_conversation_with_project(body.conversation_id) + project_id = (conversation.get("project_id") or {}).get("id") + if not project_id: + raise HTTPException(status_code=400, detail="Conversation is missing project information") + + topics = await _get_verification_topics_for_project(project_id) + topic_map = {topic.key: topic for topic in topics if topic.key} + + target_topic_key = body.topic_list[0] + target_topic = topic_map.get(target_topic_key) + if not target_topic or not target_topic.prompt: + raise HTTPException( + status_code=400, detail=f"Verification topic '{target_topic_key}' not found" + ) + + artifacts = await _get_conversation_artifacts(body.conversation_id) + last_artifact_time = None + if artifacts: + last_artifact_time = _parse_directus_datetime(artifacts[-1].get("date_created")) + + chunks = await _get_conversation_chunks(body.conversation_id) + transcript_text = _build_transcript_text(chunks) + audio_chunks = _select_audio_chunks(chunks, last_artifact_time) + audio_summary = _format_audio_summary(audio_chunks) + + user_text = _build_user_message_content(conversation, artifacts, transcript_text, audio_summary) + message_content = [{"type": "text", "text": user_text}] + + for chunk in audio_chunks: + timestamp = chunk.get("timestamp") + ts_value = timestamp.isoformat() if isinstance(timestamp, datetime) else "unknown" + chunk_id = chunk.get("id") + message_content.append( + { + "type": "text", + "text": f"Audio chunk {chunk_id} captured at {ts_value}", + } + ) + path = chunk.get("path") + if path: + try: + message_content.append(_get_audio_file_object(path)) + except Exception as exc: + logger.warning("Failed to attach audio chunk %s: %s", chunk_id, exc) + + system_prompt = render_prompt( + "generate_artifact", + "en", + { + "prompt": target_topic.prompt, + }, + ) + + try: + response = litellm.completion( + model="vertex_ai/gemini-2.5-flash", + vertex_credentials=GCP_SA_JSON, + messages=[ + { + "role": "system", + "content": [ + { + "type": "text", + "text": system_prompt, + } + ], + }, + { + "role": "user", + "content": message_content, + }, + ], + ) + except Exception as exc: + logger.error("Gemini completion failed: %s", exc, exc_info=True) + raise HTTPException( + status_code=500, detail="Failed to generate verification artifact" + ) from exc + + generated_text = _extract_response_text(response) + artifact_record = await _create_conversation_artifact( + body.conversation_id, target_topic_key, generated_text + ) + + artifact_response = ConversationArtifactResponse( + id=artifact_record.get("id"), + key=artifact_record.get("key"), + content=artifact_record.get("content", ""), + conversation_id=artifact_record.get("conversation_id", body.conversation_id), + approved_at=artifact_record.get("approved_at"), + read_aloud_stream_url=artifact_record.get("read_aloud_stream_url") or "", + ) + + return GenerateArtifactsResponse(artifact_list=[artifact_response]) diff --git a/echo/server/dembrane/main.py b/echo/server/dembrane/main.py index fb3d7286..3e18e2e3 100644 --- a/echo/server/dembrane/main.py +++ b/echo/server/dembrane/main.py @@ -27,6 +27,7 @@ ) from dembrane.sentry import init_sentry from dembrane.api.api import api +from dembrane.api.verify import seed_default_verification_topics from dembrane.postgresdb_manager import PostgresDBManager # from lightrag.llm.azure_openai import azure_openai_complete @@ -85,6 +86,12 @@ async def initialize_database() -> bool: ) # This function is called during FASTAPI lifespan for each worker. logger.info("RAG object has been initialized") + try: + await seed_default_verification_topics() + logger.info("Verification topics seeded") + except Exception: # pragma: no cover - startup logging only + logger.exception("Failed to seed verification topics during startup") + yield # shutdown logger.info("shutting down server") diff --git a/echo/server/prompt_templates/generate_artifact.en.jinja b/echo/server/prompt_templates/generate_artifact.en.jinja new file mode 100644 index 00000000..5858ce19 --- /dev/null +++ b/echo/server/prompt_templates/generate_artifact.en.jinja @@ -0,0 +1,14 @@ +You will recieve a conversation transcript (and optionally an audio fragment of the last part of a conversation). You are a conversation crystaliser turning these vague transcripts into super concrete wisdom nuggets we call outcomes. + +Specific Ask: +{{ prompt }} + +General guidelines: +Do not introduce yourself or write a preamble: go directly to writing the outcome. +Use "we/our" language to reinforce collective ownership. +Be concise enough to review quickly but complete enough to stand alone. +Include enough context that someone reading it weeks later understands. +Feel worth signing - important enough to formalize but not overwrought. +Acknowledge uncertainty where it exists. +Make participants feel heard and understood. +The magic is turning messy human dialogue into something people want to put their name on - not because it's perfect, but because it's true. From 2b3b04f9144cdb685675ce2fdf96b0c7996e80d4 Mon Sep 17 00:00:00 2001 From: Usama Date: Fri, 7 Nov 2025 10:36:57 +0000 Subject: [PATCH 09/23] - add verified icon in conversation accordians - show "Verified Artifacts" for individual conversations in dashbaoard --- .../conversation/ConversationAccordion.tsx | 20 +++ .../conversation/VerifiedArtefactsSection.tsx | 117 ++++++++++++++++++ .../components/conversation/hooks/index.ts | 3 + echo/frontend/src/lib/typesDirectus.d.ts | 1 + .../ProjectConversationOverview.tsx | 6 + 5 files changed, 147 insertions(+) create mode 100644 echo/frontend/src/components/conversation/VerifiedArtefactsSection.tsx diff --git a/echo/frontend/src/components/conversation/ConversationAccordion.tsx b/echo/frontend/src/components/conversation/ConversationAccordion.tsx index 74d2fa11..0e04eafb 100644 --- a/echo/frontend/src/components/conversation/ConversationAccordion.tsx +++ b/echo/frontend/src/components/conversation/ConversationAccordion.tsx @@ -38,6 +38,7 @@ import { IconArrowsUpDown, IconChevronDown, IconChevronUp, + IconRosetteDiscountCheckFilled, IconSearch, IconTags, IconX, @@ -466,6 +467,12 @@ const ConversationAccordionItem = ({ const isAutoSelectEnabled = chatContextQuery.data?.auto_select_bool ?? false; + // Check if conversation has approved artefacts + const hasVerifiedArtefacts = + conversation?.artefacts && + conversation?.artefacts?.length > 0 && + conversation?.artefacts?.some((artefact) => artefact.approved_at); + return ( {conversation.participant_email ?? conversation.participant_name} + {hasVerifiedArtefacts && ( + + + + + + )} { + if (!timestamp) return ""; + + try { + return format(new Date(timestamp), "MMM d, yyyy 'at' h:mm a"); + } catch { + return ""; + } +}; + +export const VerifiedArtefactsSection = ({ + conversationId, +}: VerifiedArtefactsSectionProps) => { + // Fetch all artefacts with content for display + const { data: artefacts, isLoading } = useQuery({ + enabled: !!conversationId, + queryFn: () => + directus.request( + readItems("conversation_artefact", { + fields: [ + "id", + "conversation_id", + "approved_at", + "key", + "content", + "title", + ], + filter: { conversation_id: { _eq: conversationId } }, + sort: ["-approved_at"], + }), + ), + queryKey: ["conversation_artefacts_full", conversationId], + }); + + // Don't show the section if there are no artefacts + if (!isLoading && (!artefacts || artefacts.length === 0)) { + return null; + } + + return ( + + + + <Trans>Verified Artefacts</Trans> + + + + + + + {isLoading && ( + + + + + )} + + {!isLoading && artefacts && artefacts.length > 0 && ( + + {artefacts.map((artefact) => { + const formattedDate = formatArtefactTime(artefact.approved_at); + + return ( + + + + + {artefact.title ?? artefact.key} + {formattedDate && ( + + + Approved + {" "} + {formattedDate} + + )} + + + + + + + + ); + })} + + )} + + ); +}; diff --git a/echo/frontend/src/components/conversation/hooks/index.ts b/echo/frontend/src/components/conversation/hooks/index.ts index 1dbfb0f8..6cc367c1 100644 --- a/echo/frontend/src/components/conversation/hooks/index.ts +++ b/echo/frontend/src/components/conversation/hooks/index.ts @@ -903,6 +903,9 @@ export const useInfiniteConversationsByProjectId = ( "error", ], }, + { + artefacts: ["id", "approved_at"], + }, ], filter: { chunks: { diff --git a/echo/frontend/src/lib/typesDirectus.d.ts b/echo/frontend/src/lib/typesDirectus.d.ts index c2a22412..90ce0658 100644 --- a/echo/frontend/src/lib/typesDirectus.d.ts +++ b/echo/frontend/src/lib/typesDirectus.d.ts @@ -90,6 +90,7 @@ type ConversationArtefact = { created_at?: string | null; id: string; key?: string | null; + title?: string | null; updated_at?: string | null; }; diff --git a/echo/frontend/src/routes/project/conversation/ProjectConversationOverview.tsx b/echo/frontend/src/routes/project/conversation/ProjectConversationOverview.tsx index cb924f16..3b7b4bd7 100644 --- a/echo/frontend/src/routes/project/conversation/ProjectConversationOverview.tsx +++ b/echo/frontend/src/routes/project/conversation/ProjectConversationOverview.tsx @@ -24,6 +24,7 @@ import { useConversationById, useConversationChunks, } from "@/components/conversation/hooks"; +import { VerifiedArtefactsSection } from "@/components/conversation/VerifiedArtefactsSection"; import { useProjectById } from "@/components/project/hooks"; import { ENABLE_DISPLAY_CONVERSATION_LINKS } from "@/config"; import { generateConversationSummary } from "@/lib/api"; @@ -159,6 +160,11 @@ export const ProjectConversationOverviewRoute = () => { )} + {/* Verified artefacts */} + {conversationId && ( + + )} + {conversationQuery.data && projectQuery.data && ( <> From 3c165b214c2be51cb9277606c9bbf49e40ac64fb Mon Sep 17 00:00:00 2001 From: Usama Date: Fri, 7 Nov 2025 10:43:55 +0000 Subject: [PATCH 10/23] - comment title for now --- .../src/components/conversation/VerifiedArtefactsSection.tsx | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/echo/frontend/src/components/conversation/VerifiedArtefactsSection.tsx b/echo/frontend/src/components/conversation/VerifiedArtefactsSection.tsx index fcc3852d..97daa587 100644 --- a/echo/frontend/src/components/conversation/VerifiedArtefactsSection.tsx +++ b/echo/frontend/src/components/conversation/VerifiedArtefactsSection.tsx @@ -45,7 +45,7 @@ export const VerifiedArtefactsSection = ({ "approved_at", "key", "content", - "title", + // "title", ], filter: { conversation_id: { _eq: conversationId } }, sort: ["-approved_at"], @@ -92,7 +92,8 @@ export const VerifiedArtefactsSection = ({ - {artefact.title ?? artefact.key} + {/* {artefact.title ?? artefact.key} */} + {artefact.key || ""} {formattedDate && ( From 5a74bb4a90212356d42e594b6822f6de69efbd61 Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti Date: Fri, 7 Nov 2025 11:01:28 +0000 Subject: [PATCH 11/23] feat: wire verify endpoints into frontend --- .../ParticipantConversationAudioContent.tsx | 6 +- .../verify/VerifiedArtefactItem.tsx | 20 +- .../verify/VerifiedArtefactsList.tsx | 41 ++- .../participant/verify/VerifyArtefact.tsx | 152 ++++++++++-- .../participant/verify/VerifySelection.tsx | 124 ++++++---- .../participant/verify/hooks/index.ts | 24 +- .../project/ProjectPortalEditor.tsx | 234 ++++++++++++++---- .../src/components/project/hooks/index.ts | 36 +++ echo/frontend/src/lib/api.ts | 99 ++++---- .../src/routes/project/ProjectRoutes.tsx | 21 +- echo/server/dembrane/api/verify.py | 34 +++ .../generate_artifact.en.jinja | 1 + 12 files changed, 608 insertions(+), 184 deletions(-) diff --git a/echo/frontend/src/components/participant/ParticipantConversationAudioContent.tsx b/echo/frontend/src/components/participant/ParticipantConversationAudioContent.tsx index 1bc2d9bd..50a03e42 100644 --- a/echo/frontend/src/components/participant/ParticipantConversationAudioContent.tsx +++ b/echo/frontend/src/components/participant/ParticipantConversationAudioContent.tsx @@ -31,7 +31,11 @@ export const ParticipantConversationAudioContent = () => { /> )} - + void; }; @@ -19,14 +19,7 @@ const formatArtefactTime = (timestamp: string | null | undefined): string => { } }; -export const VerifiedArtefactItem = ({ - artefact, - onViewArtefact, -}: VerifiedArtefactItemProps) => { - // Get the label from the key - const option = VERIFY_OPTIONS.find((opt) => opt.key === artefact.key); - const label = option?.label || artefact.key; - +export const VerifiedArtefactItem = ({ artefact, label, icon, onViewArtefact }: VerifiedArtefactItemProps) => { // Format the timestamp using date-fns const formattedDate = formatArtefactTime(artefact.approved_at); @@ -40,13 +33,16 @@ export const VerifiedArtefactItem = ({ - {label} + + {icon ? {icon} : null} + {label} + {formattedDate && ( {formattedDate} diff --git a/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx b/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx index b84500e6..7b060e6c 100644 --- a/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx +++ b/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx @@ -2,15 +2,24 @@ import { Box, Skeleton, Stack } from "@mantine/core"; import { useDisclosure } from "@mantine/hooks"; import { useState } from "react"; import { ArtefactModal } from "./ArtefactModal"; -import { useConversationArtefact, useConversationArtefacts } from "./hooks"; +import { + useConversationArtefact, + useConversationArtefacts, + useVerificationTopics, +} from "./hooks"; +import { TOPIC_ICON_MAP } from "./VerifySelection"; import { VerifiedArtefactItem } from "./VerifiedArtefactItem"; type VerifiedArtefactsListProps = { conversationId: string; + projectId: string; + projectLanguage?: string | null; }; export const VerifiedArtefactsList = ({ conversationId, + projectId, + projectLanguage, }: VerifiedArtefactsListProps) => { const { data: artefacts, isLoading } = useConversationArtefacts(conversationId); @@ -22,6 +31,34 @@ export const VerifiedArtefactsList = ({ // Fetch the full artefact content when one is selected const { data: selectedArtefact, isLoading: isLoadingArtefact } = useConversationArtefact(selectedArtefactId ?? undefined); + const topicsQuery = useVerificationTopics(projectId); + + const LANGUAGE_TO_LOCALE: Record = { + de: "de-DE", + en: "en-US", + es: "es-ES", + fr: "fr-FR", + nl: "nl-NL", + }; + + const locale = + LANGUAGE_TO_LOCALE[projectLanguage ?? "en"] ?? LANGUAGE_TO_LOCALE.en; + + const availableTopics = topicsQuery.data?.available_topics ?? []; + const topicMetadataMap = new Map( + availableTopics.map((topic) => [ + topic.key, + { + label: + topic.translations?.[locale]?.label ?? + topic.translations?.["en-US"]?.label ?? + topic.key, + icon: + TOPIC_ICON_MAP[topic.key] ?? + (topic.icon && !topic.icon.startsWith(":") ? topic.icon : undefined), + }, + ]), + ); const handleViewArtefact = (artefactId: string) => { setSelectedArtefactId(artefactId); @@ -55,6 +92,8 @@ export const VerifiedArtefactsList = ({ ))} diff --git a/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx b/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx index 72e3cf1f..2a047b33 100644 --- a/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx +++ b/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx @@ -17,12 +17,21 @@ import { useI18nNavigate } from "@/hooks/useI18nNavigate"; import { Logo } from "../../common/Logo"; import { Markdown } from "../../common/Markdown"; import { MarkdownWYSIWYG } from "../../form/MarkdownWYSIWYG/MarkdownWYSIWYG"; +import { useParticipantProjectById } from "../hooks"; import { useGenerateVerificationArtefact, useSaveVerificationArtefact, + useVerificationTopics, } from "./hooks"; import { VerifyInstructions } from "./VerifyInstructions"; -import { VERIFY_OPTIONS } from "./VerifySelection"; + +const LANGUAGE_TO_LOCALE: Record = { + de: "de-DE", + en: "en-US", + es: "es-ES", + fr: "fr-FR", + nl: "nl-NL", +}; const MemoizedMarkdownWYSIWYG = memo(MarkdownWYSIWYG); @@ -32,6 +41,8 @@ export const VerifyArtefact = () => { const [searchParams] = useSearchParams(); const saveArtefactMutation = useSaveVerificationArtefact(); const generateArtefactMutation = useGenerateVerificationArtefact(); + const projectQuery = useParticipantProjectById(projectId ?? ""); + const topicsQuery = useVerificationTopics(projectId); // Get selected option from URL params const selectedOptionKey = searchParams.get("key"); @@ -48,32 +59,67 @@ export const VerifyArtefact = () => { const [isPlaying, setIsPlaying] = useState(false); const [lastReviseTime, setLastReviseTime] = useState(null); const [reviseTimeRemaining, setReviseTimeRemaining] = useState(0); + const [generatedArtifactId, setGeneratedArtifactId] = useState( + null, + ); // Ref for audio element const audioRef = useRef(null); const reviseTimerRef = useRef(null); - const selectedOption = VERIFY_OPTIONS.find( - (opt) => opt.key === selectedOptionKey, + const projectLanguage = projectQuery.data?.language ?? "en"; + const languageLocale = + LANGUAGE_TO_LOCALE[projectLanguage] ?? LANGUAGE_TO_LOCALE.en; + + const availableTopics = topicsQuery.data?.available_topics ?? []; + const selectedTopics = topicsQuery.data?.selected_topics ?? []; + + const selectedTopic = availableTopics.find( + (topic) => topic.key === selectedOptionKey, ); - const selectedOptionLabel = selectedOption?.label || t`verified`; + + const selectedOptionLabel = + selectedTopic?.translations?.[languageLocale]?.label ?? + selectedTopic?.translations?.["en-US"]?.label ?? + selectedTopic?.key ?? + t`verified`; // Redirect back if no selected option key useEffect(() => { - if (!selectedOptionKey) { + if ( + !selectedOptionKey || + (topicsQuery.isSuccess && + !selectedTopics.includes(selectedOptionKey)) + ) { navigate(`/${projectId}/conversation/${conversationId}/verify`, { replace: true, }); } - }, [selectedOptionKey, navigate, projectId, conversationId]); + }, [ + selectedOptionKey, + selectedTopics, + topicsQuery.isSuccess, + navigate, + projectId, + conversationId, + ]); // biome-ignore lint/correctness/useExhaustiveDependencies: we want to regenerate the artefact if the user clicks the next button useEffect(() => { - if (!selectedOptionKey || !conversationId || hasGenerated) return; + if ( + !selectedOptionKey || + !conversationId || + hasGenerated || + topicsQuery.isLoading || + !selectedTopics.includes(selectedOptionKey) + ) + return; const generateArtefact = async () => { try { setHasGenerated(true); + setGeneratedArtifactId(null); + setReadAloudUrl(""); const response = await generateArtefactMutation.mutateAsync({ conversationId, topicList: [selectedOptionKey], // only one for now @@ -83,6 +129,7 @@ export const VerifyArtefact = () => { if (response && response.length > 0) { const artifact = response[0]; setArtefactContent(artifact.content); + setGeneratedArtifactId(artifact.id); // Set read aloud URL from API response setReadAloudUrl(artifact.read_aloud_stream_url || ""); } @@ -93,21 +140,34 @@ export const VerifyArtefact = () => { }; generateArtefact(); - }, [selectedOptionKey, conversationId, hasGenerated]); + }, [ + selectedOptionKey, + conversationId, + hasGenerated, + topicsQuery.isLoading, + selectedTopics, + generateArtefactMutation, + ]); const handleNextFromInstructions = () => { setShowInstructions(false); }; const handleApprove = async () => { - if (!conversationId || !selectedOptionKey || !artefactContent) return; + if ( + !conversationId || + !selectedOptionKey || + !artefactContent || + !generatedArtifactId + ) + return; setIsApproving(true); try { await saveArtefactMutation.mutateAsync({ + artefactId: generatedArtifactId, artefactContent, conversationId, - key: selectedOptionKey, }); // Navigate back to conversation @@ -122,19 +182,17 @@ export const VerifyArtefact = () => { if (!conversationId || !selectedOptionKey) return; setIsRevising(true); try { - // Mock API call to revise artefact (3 seconds) - await new Promise((resolve) => setTimeout(resolve, 3000)); - + setGeneratedArtifactId(null); + setReadAloudUrl(""); const response = await generateArtefactMutation.mutateAsync({ conversationId: conversationId, topicList: [selectedOptionKey], // only one for now }); - // Get the first artifact from the response if (response && response.length > 0) { const artifact = response[0]; setArtefactContent(artifact.content); - // Set read aloud URL from API response + setGeneratedArtifactId(artifact.id); setReadAloudUrl(artifact.read_aloud_stream_url || ""); } setLastReviseTime(Date.now()); // Start cooldown timer @@ -228,12 +286,18 @@ export const VerifyArtefact = () => { }; }, []); + const isInitialLoading = + topicsQuery.isLoading || + projectQuery.isLoading || + generateArtefactMutation.isPending || + !generatedArtifactId; + // step 1: show instructions while generating response from api if (showInstructions) { return ( ); @@ -272,6 +336,9 @@ export const VerifyArtefact = () => { {/* Title with Read Aloud Button */} + {selectedOptionIcon ? ( + <span className="mr-2">{selectedOptionIcon}</span> + ) : null} <Trans id="participant.verify.artefact.title"> Artefact: {selectedOptionLabel} </Trans> @@ -340,7 +407,13 @@ export const VerifyArtefact = () => { variant="default" className="flex-1" onClick={handleRevise} - disabled={isRevising || isApproving || reviseTimeRemaining > 0} + disabled={ + isInitialLoading || + isRevising || + isApproving || + reviseTimeRemaining > 0 || + !selectedOptionKey + } > {reviseTimeRemaining > 0 ? ( <>{Math.ceil(reviseTimeRemaining / 1000)}s</> @@ -356,7 +429,12 @@ export const VerifyArtefact = () => { variant="default" onClick={handleEdit} px="sm" - disabled={isRevising || isApproving} + disabled={ + isRevising || + isApproving || + isInitialLoading || + !generatedArtifactId + } > <IconPencil size={20} /> </Button> @@ -368,9 +446,15 @@ export const VerifyArtefact = () => { className="flex-1" onClick={handleApprove} loading={isApproving} - disabled={isApproving || isRevising} + disabled={ + isApproving || + isRevising || + isInitialLoading || + !generatedArtifactId || + !artefactContent + } > - <Trans id="participant.verify.action.button.aprrove"> + <Trans id="participant.verify.action.button.approve"> Approve </Trans> </Button> @@ -380,3 +464,31 @@ export const VerifyArtefact = () => { </Stack> ); }; + const selectedOptionIcon = + (selectedTopic && + (TOPIC_ICON_MAP[selectedTopic.key] ?? + (selectedTopic.icon && !selectedTopic.icon.startsWith(":") + ? selectedTopic.icon + : undefined))) ?? + undefined; + if (projectQuery.isError || topicsQuery.isError) { + return ( + <Stack gap="md" align="center" justify="center" className="h-full"> + <Text c="red"> + <Trans> + Something went wrong while preparing the verification experience. + </Trans> + </Text> + <Button + variant="subtle" + onClick={() => + navigate(`/${projectId}/conversation/${conversationId}/verify`, { + replace: true, + }) + } + > + <Trans>Go back</Trans> + </Button> + </Stack> + ); + } diff --git a/echo/frontend/src/components/participant/verify/VerifySelection.tsx b/echo/frontend/src/components/participant/verify/VerifySelection.tsx index 7f71e8d8..a63ceca2 100644 --- a/echo/frontend/src/components/participant/verify/VerifySelection.tsx +++ b/echo/frontend/src/components/participant/verify/VerifySelection.tsx @@ -1,57 +1,75 @@ -import { t } from "@lingui/core/macro"; import { Trans } from "@lingui/react/macro"; -import { Box, Button, Group, Stack, Title } from "@mantine/core"; +import { Box, Button, Group, Stack, Text, Title } from "@mantine/core"; import { IconArrowRight } from "@tabler/icons-react"; -import { useState } from "react"; +import { useEffect, useState } from "react"; import { useParams } from "react-router"; import { useI18nNavigate } from "@/hooks/useI18nNavigate"; import { useParticipantProjectById } from "../hooks"; +import { useVerificationTopics } from "./hooks"; -// Verify options that match the verification_topics field -export const VERIFY_OPTIONS = [ - { - icon: "✅", - key: "agreements", - label: t`What we actually agreed on`, - }, - { - icon: "🔍", - key: "gems", - label: t`Hidden gems`, - }, - { - icon: "👀", - key: "truths", - label: t`Painful truths`, - }, - { - icon: "🚀", - key: "moments", - label: t`Breakthrough moments`, - }, - { - icon: "↗️", - key: "actions", - label: t`What we think should happen`, - }, - { - icon: "⚠️", - key: "disagreements", - label: t`Moments we agreed to disagree`, - }, -]; +const LANGUAGE_TO_LOCALE: Record<string, string> = { + de: "de-DE", + en: "en-US", + es: "es-ES", + fr: "fr-FR", + nl: "nl-NL", +}; + +export const TOPIC_ICON_MAP: Record<string, string> = { + actions: "↗️", + agreements: "✅", + disagreements: "⚠️", + gems: "🔍", + moments: "🚀", + truths: "👀", +}; export const VerifySelection = () => { const { projectId, conversationId } = useParams(); const navigate = useI18nNavigate(); const [selectedOption, setSelectedOption] = useState<string | null>(null); const projectQuery = useParticipantProjectById(projectId ?? ""); + const topicsQuery = useVerificationTopics(projectId); - // Filter options based on enabled topics - const enabledTopics = projectQuery.data?.verification_topics ?? []; - const availableOptions = VERIFY_OPTIONS.filter((option) => - enabledTopics.includes(option.key), - ); + const projectLanguage = projectQuery.data?.language ?? "en"; + const languageLocale = + LANGUAGE_TO_LOCALE[projectLanguage] ?? LANGUAGE_TO_LOCALE.en; + + const selectedTopics = topicsQuery.data?.selected_topics ?? []; + const availableTopics = topicsQuery.data?.available_topics ?? []; + + const availableOptions = availableTopics + .filter((topic) => selectedTopics.includes(topic.key)) + .map((topic) => { + const translations = topic.translations ?? {}; + const localizedLabel = + translations[languageLocale]?.label ?? + translations["en-US"]?.label ?? + topic.key; + + const icon = + TOPIC_ICON_MAP[topic.key] ?? + (topic.icon && !topic.icon.startsWith(":") ? topic.icon : undefined) ?? + "•"; + + return { + key: topic.key, + label: localizedLabel, + icon, + }; + }); + + const isLoading = projectQuery.isLoading || topicsQuery.isLoading; + + useEffect(() => { + if ( + selectedOption && + selectedTopics.length > 0 && + !selectedTopics.includes(selectedOption) + ) { + setSelectedOption(null); + } + }, [selectedOption, selectedTopics]); const handleNext = () => { if (!selectedOption) return; @@ -74,6 +92,16 @@ export const VerifySelection = () => { {/* Options list */} <Group gap="md"> + {isLoading && ( + <Text size="sm" c="dimmed"> + <Trans>Loading verification topics…</Trans> + </Text> + )} + {!isLoading && availableOptions.length === 0 && ( + <Text size="sm" c="dimmed"> + <Trans>No verification topics are configured for this project.</Trans> + </Text> + )} {availableOptions.map((option) => ( <Box key={option.key} @@ -99,10 +127,18 @@ export const VerifySelection = () => { radius="3xl" onClick={handleNext} className="w-full" - rightSection={<IconArrowRight size={20} className="ml-1" />} - disabled={!selectedOption} + rightSection={ + isLoading ? null : <IconArrowRight size={20} className="ml-1" /> + } + disabled={!selectedOption || isLoading} > - <Trans id="participant.verify.selection.button.next">Next</Trans> + {isLoading ? ( + <Trans>Loading…</Trans> + ) : ( + <Trans id="participant.verify.selection.button.next"> + Next + </Trans> + )} </Button> </Stack> ); diff --git a/echo/frontend/src/components/participant/verify/hooks/index.ts b/echo/frontend/src/components/participant/verify/hooks/index.ts index f523f58d..d1dba5c4 100644 --- a/echo/frontend/src/components/participant/verify/hooks/index.ts +++ b/echo/frontend/src/components/participant/verify/hooks/index.ts @@ -1,9 +1,20 @@ -import { createItem, readItems } from "@directus/sdk"; +import { readItems, updateItem } from "@directus/sdk"; import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; import { toast } from "@/components/common/Toaster"; -import { generateVerificationArtefact } from "@/lib/api"; +import { + generateVerificationArtefact, + getVerificationTopics, +} from "@/lib/api"; import { directus } from "@/lib/directus"; +export const useVerificationTopics = (projectId: string | undefined) => { + return useQuery({ + enabled: !!projectId, + queryFn: () => getVerificationTopics(projectId!), + queryKey: ["verify", "topics", projectId], + }); +}; + // Hook for generating verification artefacts export const useGenerateVerificationArtefact = () => { return useMutation({ @@ -21,21 +32,18 @@ export const useSaveVerificationArtefact = () => { return useMutation({ mutationFn: async ({ + artefactId, conversationId, artefactContent, - key, }: { + artefactId: string; conversationId: string; artefactContent: string; - key: string; }) => { - // await new Promise((resolve) => setTimeout(resolve, 1000)); return directus.request( - createItem("conversation_artefact", { + updateItem("conversation_artifact", artefactId, { approved_at: new Date().toISOString(), content: artefactContent, - conversation_id: conversationId, - key, }), ); }, diff --git a/echo/frontend/src/components/project/ProjectPortalEditor.tsx b/echo/frontend/src/components/project/ProjectPortalEditor.tsx index a2932072..227a28cd 100644 --- a/echo/frontend/src/components/project/ProjectPortalEditor.tsx +++ b/echo/frontend/src/components/project/ProjectPortalEditor.tsx @@ -25,12 +25,16 @@ import { memo, useCallback, useEffect, useMemo, useRef, useState } from "react"; import { Controller, useForm, useWatch } from "react-hook-form"; import { z } from "zod"; import { useAutoSave } from "@/hooks/useAutoSave"; +import type { VerificationTopicsResponse } from "@/lib/api"; import { Logo } from "../common/Logo"; import { FormLabel } from "../form/FormLabel"; import { MarkdownWYSIWYG } from "../form/MarkdownWYSIWYG/MarkdownWYSIWYG"; import { SaveStatus } from "../form/SaveStatus"; -import { VERIFY_OPTIONS } from "../participant/verify/VerifySelection"; -import { useUpdateProjectByIdMutation } from "./hooks"; +import { TOPIC_ICON_MAP } from "../participant/verify/VerifySelection"; +import { + useUpdateProjectByIdMutation, + useUpdateVerificationTopicsMutation, +} from "./hooks"; import { useProjectSharingLink } from "./ProjectQRCode"; import { ProjectTagsInput } from "./ProjectTagsInput"; @@ -52,6 +56,17 @@ const FormSchema = z.object({ type ProjectPortalFormValues = z.infer<typeof FormSchema>; +const LANGUAGE_TO_LOCALE: Record<string, string> = { + de: "de-DE", + en: "en-US", + es: "es-ES", + fr: "fr-FR", + nl: "nl-NL", +}; + +const normalizeTopicList = (topics: string[]): string[] => + Array.from(new Set(topics.map((topic) => topic.trim()).filter(Boolean))).sort(); + const ProperNounInput = ({ value, onChange, @@ -137,8 +152,16 @@ const MemoizedMarkdownWYSIWYG = memo(MarkdownWYSIWYG); // Memoized ProjectTagsInput wrapper const MemoizedProjectTagsInput = memo(ProjectTagsInput); -const ProjectPortalEditorComponent: React.FC<{ project: Project }> = ({ +type ProjectPortalEditorProps = { + project: Project; + verificationTopics: VerificationTopicsResponse; + isVerificationTopicsLoading?: boolean; +}; + +const ProjectPortalEditorComponent: React.FC<ProjectPortalEditorProps> = ({ project, + verificationTopics, + isVerificationTopicsLoading = false, }) => { const [showPreview, setShowPreview] = useState(false); const link = useProjectSharingLink(project); @@ -146,6 +169,37 @@ const ProjectPortalEditorComponent: React.FC<{ project: Project }> = ({ const [previewWidth, setPreviewWidth] = useState(400); const [previewHeight, setPreviewHeight] = useState(300); + const projectLanguageCode = (project.language ?? "en") as + | "en" + | "nl" + | "de" + | "fr" + | "es"; + const languageLocale = + LANGUAGE_TO_LOCALE[projectLanguageCode] ?? LANGUAGE_TO_LOCALE.en; + + const availableVerifyTopics = useMemo( + () => + (verificationTopics?.available_topics ?? []).map((topic) => ({ + key: topic.key, + label: + topic.translations?.[languageLocale]?.label ?? + topic.translations?.["en-US"]?.label ?? + topic.key, + icon: + TOPIC_ICON_MAP[topic.key] ?? + (topic.icon && !topic.icon.startsWith(":") + ? topic.icon + : undefined), + })), + [verificationTopics, languageLocale], + ); + + const selectedTopicDefaults = useMemo( + () => verificationTopics?.selected_topics ?? [], + [verificationTopics], + ); + // biome-ignore lint/correctness/useExhaustiveDependencies: just a dependency issue biome catches, not an issue though const defaultValues = useMemo(() => { return { @@ -166,14 +220,14 @@ const ProjectPortalEditorComponent: React.FC<{ project: Project }> = ({ is_project_notification_subscription_allowed: project.is_project_notification_subscription_allowed ?? false, is_verify_enabled: project.is_verify_enabled ?? false, - language: (project.language ?? "en") as "en" | "nl" | "de" | "fr" | "es", - verification_topics: project.verification_topics ?? [], + language: projectLanguageCode, + verification_topics: selectedTopicDefaults, }; - }, [project.id]); + }, [project.id, projectLanguageCode, selectedTopicDefaults]); const formResolver = useMemo(() => zodResolver(FormSchema), []); - const { control, handleSubmit, watch, formState, reset } = + const { control, handleSubmit, watch, formState, reset, setValue, getValues } = useForm<ProjectPortalFormValues>({ defaultValues, mode: "onChange", @@ -198,18 +252,50 @@ const ProjectPortalEditorComponent: React.FC<{ project: Project }> = ({ }); const updateProjectMutation = useUpdateProjectByIdMutation(); + const updateVerificationTopicsMutation = + useUpdateVerificationTopicsMutation(); const onSave = useCallback( async (values: ProjectPortalFormValues) => { + const { verification_topics, ...projectPayload } = values; + await updateProjectMutation.mutateAsync({ id: project.id, - payload: values, + payload: projectPayload, }); + const normalizedNewTopics = normalizeTopicList(verification_topics); + const normalizedCurrentTopics = normalizeTopicList( + selectedTopicDefaults, + ); + const topicsChanged = + normalizedNewTopics.length !== normalizedCurrentTopics.length || + normalizedNewTopics.some( + (topic, index) => topic !== normalizedCurrentTopics[index], + ); + + if (topicsChanged) { + await updateVerificationTopicsMutation.mutateAsync({ + projectId: project.id, + topicList: normalizedNewTopics, + }); + } + // Reset the form with the current values to clear the dirty state - reset(values, { keepDirty: false, keepValues: true }); + reset( + { + ...values, + verification_topics: normalizedNewTopics, + } + ); }, - [project.id, updateProjectMutation, reset], + [ + project.id, + updateProjectMutation, + updateVerificationTopicsMutation, + reset, + selectedTopicDefaults, + ], ); const { @@ -230,6 +316,38 @@ const ProjectPortalEditorComponent: React.FC<{ project: Project }> = ({ dispatchAutoSaveRef.current = dispatchAutoSave; }, [dispatchAutoSave]); + useEffect(() => { + if (!verificationTopics || isVerificationTopicsLoading) { + return; + } + + if (formState.dirtyFields.verification_topics) { + return; + } + + const normalizedSelected = normalizeTopicList( + verificationTopics.selected_topics ?? [], + ); + const current = normalizeTopicList(getValues("verification_topics") ?? []); + + const differs = + normalizedSelected.length !== current.length || + normalizedSelected.some((topic, index) => topic !== current[index]); + + if (differs) { + setValue("verification_topics", normalizedSelected, { + shouldDirty: false, + shouldTouch: false, + }); + } + }, [ + formState.dirtyFields.verification_topics, + getValues, + setValue, + verificationTopics, + isVerificationTopicsLoading, + ]); + useEffect(() => { const subscription = watch((values, { type }) => { if (type === "change" && values) { @@ -620,44 +738,68 @@ const ProjectPortalEditorComponent: React.FC<{ project: Project }> = ({ verification. </Trans> </Text> - <Group gap="xs"> - {VERIFY_OPTIONS.map((topic) => ( - <Badge - key={topic.key} - className={ - watchedVerifyEnabled - ? "cursor-pointer capitalize" - : "capitalize" - } - variant={ - field.value.includes(topic.key) - ? "filled" - : "default" - } - size="lg" - style={{ - cursor: watchedVerifyEnabled - ? "pointer" - : "not-allowed", - opacity: watchedVerifyEnabled ? 1 : 0.6, - }} - onClick={() => { - if (!watchedVerifyEnabled) return; - const newTopics = field.value.includes( - topic.key, - ) - ? field.value.filter((t) => t !== topic.key) - : [...field.value, topic.key]; - field.onChange(newTopics); - }} - > + {isVerificationTopicsLoading ? ( + <Text size="sm" c="dimmed"> + <Trans>Loading verification topics…</Trans> + </Text> + ) : ( + <> + {availableVerifyTopics.length === 0 ? ( + <Text size="sm" c="dimmed"> + <Trans>No verification topics available.</Trans> + </Text> + ) : ( <Group gap="xs"> - <span>{topic.icon}</span> - <span>{topic.label}</span> + {availableVerifyTopics.map((topic) => ( + <Badge + key={topic.key} + className={ + watchedVerifyEnabled + ? "cursor-pointer capitalize" + : "capitalize" + } + variant={ + field.value.includes(topic.key) + ? "filled" + : "default" + } + size="lg" + style={{ + cursor: watchedVerifyEnabled + ? "pointer" + : "not-allowed", + opacity: watchedVerifyEnabled ? 1 : 0.6, + }} + onClick={() => { + if (!watchedVerifyEnabled) return; + const normalizedCurrent = + normalizeTopicList(field.value ?? []); + const isSelected = normalizedCurrent.includes( + topic.key, + ); + const updated = isSelected + ? normalizedCurrent.filter( + (item) => item !== topic.key, + ) + : normalizeTopicList([ + ...normalizedCurrent, + topic.key, + ]); + field.onChange(updated); + }} + > + <Group gap="xs"> + {topic.icon ? ( + <span>{topic.icon}</span> + ) : null} + <span>{topic.label}</span> + </Group> + </Badge> + ))} </Group> - </Badge> - ))} - </Group> + )} + </> + )} </Stack> )} /> diff --git a/echo/frontend/src/components/project/hooks/index.ts b/echo/frontend/src/components/project/hooks/index.ts index f96f53af..e0ba3116 100644 --- a/echo/frontend/src/components/project/hooks/index.ts +++ b/echo/frontend/src/components/project/hooks/index.ts @@ -19,6 +19,8 @@ import { api, cloneProjectById, getLatestProjectAnalysisRunByProjectId, + getVerificationTopics, + updateVerificationTopics, } from "@/lib/api"; import { directus } from "@/lib/directus"; @@ -276,3 +278,37 @@ export const useProjectById = ({ queryKey: ["projects", projectId, query], }); }; + +export const useVerificationTopicsQuery = (projectId: string | undefined) => { + return useQuery({ + enabled: !!projectId, + queryFn: () => getVerificationTopics(projectId!), + queryKey: ["verify", "topics", projectId], + }); +}; + +export const useUpdateVerificationTopicsMutation = () => { + const queryClient = useQueryClient(); + return useMutation({ + mutationFn: ({ + projectId, + topicList, + }: { + projectId: string; + topicList: string[]; + }) => + updateVerificationTopics({ + projectId, + topicList, + }), + onSuccess: (_data, variables) => { + queryClient.invalidateQueries({ + queryKey: ["verify", "topics", variables.projectId], + }); + }, + onError: (error) => { + console.error("Failed to update verification topics", error); + toast.error("Failed to update verification topics"); + }, + }); +}; diff --git a/echo/frontend/src/lib/api.ts b/echo/frontend/src/lib/api.ts index 9e62f3f6..b7dd1de6 100644 --- a/echo/frontend/src/lib/api.ts +++ b/echo/frontend/src/lib/api.ts @@ -1063,61 +1063,66 @@ export const generateConversationSummary = async (conversationId: string) => { >(`/conversations/${conversationId}/summarize`); }; -// Mock API call for generating verification artefacts -// TODO: Replace with actual API endpoint -export const generateVerificationArtefact = async (payload: { - conversationId: string; - topicList: string[]; -}): Promise< - { - id: string; - approved_at?: string | null; - content: string; - conversation_id: string; - key: string; - read_aloud_stream_url: string; - }[] -> => { - // Simulate API delay (3 seconds) - await new Promise((resolve) => setTimeout(resolve, 2000)); - - // Mock response matching the API pattern - const mockContent = ` -### 1. Mapping, Wisdom, and Collaboration - -The participants exchanged perspectives on collective intelligence, mapping tools, and developing processes that foster group wisdom. There was mutual interest in both browser-based collaborative mind mapping (like Mindmeister and XMind) and participatory conversation tools that can capture, visualize, and refine insights. - -### 2. Influences and Foundations +export type VerificationTopicTranslation = { + label: string; +}; -They discussed influential thinkers and frameworks that shape their approaches: +export type VerificationTopicMetadata = { + key: string; + prompt?: string | null; + icon?: string | null; + sort?: number | null; + translations: Record<string, VerificationTopicTranslation>; +}; -* Tom Atlee's Wise Democracy Pattern Language, with an emphasis on collective wisdom oriented toward long-term, broad benefit. -* Doug Engelbart's concepts of dynamic knowledge repositories and networked improvement communities, emphasizing continual, systemic improvement. +export type VerificationTopicsResponse = { + selected_topics: string[]; + available_topics: VerificationTopicMetadata[]; +}; -### 3. Next Steps +export const getVerificationTopics = async (projectId: string) => { + return api.get<unknown, VerificationTopicsResponse>(`/verify/topics/${projectId}`); +}; -Moving forward, the team agreed to: +export const updateVerificationTopics = async ({ + projectId, + topicList, +}: { + projectId: string; + topicList: string[]; +}) => { + return api.put<unknown, VerificationTopicsResponse>( + `/verify/topics/${projectId}`, + { + topic_list: topicList, + }, + ); +}; -1. Schedule a follow-up meeting to dive deeper into specific tools -2. Share relevant resources and documentation -3. Explore potential collaboration opportunities`; +export type VerificationArtifact = { + id: string; + approved_at?: string | null; + content: string; + conversation_id: string; + key: string; + read_aloud_stream_url: string; +}; - return [ +export const generateVerificationArtefact = async (payload: { + conversationId: string; + topicList: string[]; +}): Promise<VerificationArtifact[]> => { + const response = await api.post< + unknown, { - approved_at: new Date().toISOString(), - content: mockContent, - conversation_id: payload.conversationId, - id: `artifact-${Date.now()}`, - key: `key-${Date.now()}`, - read_aloud_stream_url: "", - }, - ]; + artifact_list?: VerificationArtifact[]; + } + >("/verify/generate", { + conversation_id: payload.conversationId, + topic_list: payload.topicList, + }); - // When ready to use real API, replace with: - // return apiNoAuth.post<GenerateVerificationArtefactRequest, GenerateVerificationArtefactResponse>( - // '/verify/generate', - // payload - // ); + return response?.artifact_list ?? []; }; export const unsubscribeParticipant = async ( diff --git a/echo/frontend/src/routes/project/ProjectRoutes.tsx b/echo/frontend/src/routes/project/ProjectRoutes.tsx index 3f0fc81a..2102b2cf 100644 --- a/echo/frontend/src/routes/project/ProjectRoutes.tsx +++ b/echo/frontend/src/routes/project/ProjectRoutes.tsx @@ -2,7 +2,10 @@ import { Trans } from "@lingui/react/macro"; import { Alert, Divider, LoadingOverlay, Stack } from "@mantine/core"; import { useMemo } from "react"; import { useParams } from "react-router"; -import { useProjectById } from "@/components/project/hooks"; +import { + useProjectById, + useVerificationTopicsQuery, +} from "@/components/project/hooks"; import ProjectBasicEdit from "@/components/project/ProjectBasicEdit"; import { ProjectDangerZone } from "@/components/project/ProjectDangerZone"; import { ProjectExportSection } from "@/components/project/ProjectExportSection"; @@ -103,6 +106,10 @@ export const ProjectPortalSettingsRoute = () => { // @ts-expect-error tags field structure not properly typed in Directus SDK query, }); + const verificationTopicsQuery = useVerificationTopicsQuery(projectId); + + const isLoading = projectQuery.isLoading || verificationTopicsQuery.isLoading; + const isError = projectQuery.isError || verificationTopicsQuery.isError; // Memoize the project data to ensure stable reference // biome-ignore lint/correctness/useExhaustiveDependencies: needs to be fixed @@ -118,15 +125,19 @@ export const ProjectPortalSettingsRoute = () => { px={{ base: "1rem", md: "2rem" }} py={{ base: "2rem", md: "4rem" }} > - {projectQuery.isLoading && <LoadingOverlay visible />} - {projectQuery.isError && ( + {isLoading && <LoadingOverlay visible />} + {isError && ( <Alert variant="outline" color="red"> <Trans>Error loading project</Trans> </Alert> )} - {project && !projectQuery.isLoading && ( - <ProjectPortalEditor project={project} /> + {project && verificationTopicsQuery.data && !isLoading && ( + <ProjectPortalEditor + project={project} + verificationTopics={verificationTopicsQuery.data} + isVerificationTopicsLoading={verificationTopicsQuery.isLoading} + /> )} </Stack> ); diff --git a/echo/server/dembrane/api/verify.py b/echo/server/dembrane/api/verify.py index 22573a53..559c456d 100644 --- a/echo/server/dembrane/api/verify.py +++ b/echo/server/dembrane/api/verify.py @@ -161,6 +161,10 @@ class GenerateArtifactsResponse(BaseModel): artifact_list: List[ConversationArtifactResponse] +class UpdateVerificationTopicsRequest(BaseModel): + topic_list: List[str] = Field(default_factory=list) + + def _parse_directus_datetime(value: Optional[str]) -> Optional[datetime]: if value is None: return None @@ -325,6 +329,36 @@ async def get_verification_topics( return GetVerificationTopicsResponse(selected_topics=selected_topics, available_topics=topics) +@VerifyRouter.put("/topics/{project_id}", response_model=GetVerificationTopicsResponse) +async def update_verification_topics( + project_id: str, + body: UpdateVerificationTopicsRequest, + auth: DependencyDirectusSession, # noqa: ARG001 - reserved for future use +) -> GetVerificationTopicsResponse: + await _get_project(project_id) + topics = await _get_verification_topics_for_project(project_id) + available_keys = [topic.key for topic in topics if topic.key] + + normalized_keys = [] + for key in body.topic_list: + key = key.strip() + if key and key in available_keys and key not in normalized_keys: + normalized_keys.append(key) + + serialized_keys = ",".join(normalized_keys) + + await run_in_thread_pool( + directus.update_item, + "project", + project_id, + {"selected_verification_key_list": serialized_keys or None}, + ) + + refreshed_topics = await _get_verification_topics_for_project(project_id) + selected_topics = _parse_selected_topics(serialized_keys, refreshed_topics) + return GetVerificationTopicsResponse(selected_topics=selected_topics, available_topics=refreshed_topics) + + async def _get_conversation_with_project(conversation_id: str) -> dict: conversation_rows = await run_in_thread_pool( directus.get_items, diff --git a/echo/server/prompt_templates/generate_artifact.en.jinja b/echo/server/prompt_templates/generate_artifact.en.jinja index 5858ce19..70951854 100644 --- a/echo/server/prompt_templates/generate_artifact.en.jinja +++ b/echo/server/prompt_templates/generate_artifact.en.jinja @@ -9,6 +9,7 @@ Use "we/our" language to reinforce collective ownership. Be concise enough to review quickly but complete enough to stand alone. Include enough context that someone reading it weeks later understands. Feel worth signing - important enough to formalize but not overwrought. +Generate the crystalisation in the same major language that is in the transcript / audio. Acknowledge uncertainty where it exists. Make participants feel heard and understood. The magic is turning messy human dialogue into something people want to put their name on - not because it's perfect, but because it's true. From c42ee731995fa8b31edf6cbabb6dabe27f1d1a6e Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti <sameer@dembrane.com> Date: Fri, 7 Nov 2025 11:51:59 +0000 Subject: [PATCH 12/23] api integration for verify --- .../participant/verify/VerifyArtefact.tsx | 234 +++++++++++------- .../participant/verify/hooks/index.ts | 48 ++-- echo/frontend/src/lib/api.ts | 28 +++ echo/server/dembrane/api/verify.py | 195 +++++++++++++++ .../prompt_templates/revise_artifact.en.jinja | 22 ++ 5 files changed, 417 insertions(+), 110 deletions(-) create mode 100644 echo/server/prompt_templates/revise_artifact.en.jinja diff --git a/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx b/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx index 2a047b33..5c52340e 100644 --- a/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx +++ b/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx @@ -11,20 +11,29 @@ import { Title, } from "@mantine/core"; import { IconPencil, IconPlayerPause, IconVolume } from "@tabler/icons-react"; -import { memo, useEffect, useRef, useState } from "react"; +import { useQueryClient } from "@tanstack/react-query"; +import { memo, useEffect, useMemo, useRef, useState } from "react"; import { useParams, useSearchParams } from "react-router"; +import { toast } from "@/components/common/Toaster"; import { useI18nNavigate } from "@/hooks/useI18nNavigate"; import { Logo } from "../../common/Logo"; import { Markdown } from "../../common/Markdown"; import { MarkdownWYSIWYG } from "../../form/MarkdownWYSIWYG/MarkdownWYSIWYG"; -import { useParticipantProjectById } from "../hooks"; +import { + useConversationChunksQuery, + useParticipantProjectById, +} from "../hooks"; import { useGenerateVerificationArtefact, - useSaveVerificationArtefact, + useUpdateVerificationArtefact, useVerificationTopics, } from "./hooks"; import { VerifyInstructions } from "./VerifyInstructions"; +type ConversationChunkLike = { + timestamp?: string | null; +}; + const LANGUAGE_TO_LOCALE: Record<string, string> = { de: "de-DE", en: "en-US", @@ -35,19 +44,38 @@ const LANGUAGE_TO_LOCALE: Record<string, string> = { const MemoizedMarkdownWYSIWYG = memo(MarkdownWYSIWYG); +const computeLatestTimestamp = ( + chunks: ConversationChunkLike[] | undefined, +): string | null => { + if (!chunks || chunks.length === 0) { + return null; + } + + let latest: string | null = null; + for (const chunk of chunks) { + if (!chunk.timestamp) continue; + const currentIso = new Date(chunk.timestamp).toISOString(); + if (!latest || new Date(currentIso) > new Date(latest)) { + latest = currentIso; + } + } + return latest; +}; + export const VerifyArtefact = () => { const { projectId, conversationId } = useParams(); const navigate = useI18nNavigate(); const [searchParams] = useSearchParams(); - const saveArtefactMutation = useSaveVerificationArtefact(); + const queryClient = useQueryClient(); + const generateArtefactMutation = useGenerateVerificationArtefact(); + const updateArtefactMutation = useUpdateVerificationArtefact(); const projectQuery = useParticipantProjectById(projectId ?? ""); const topicsQuery = useVerificationTopics(projectId); + const chunksQuery = useConversationChunksQuery(projectId, conversationId); - // Get selected option from URL params const selectedOptionKey = searchParams.get("key"); - // States const [showInstructions, setShowInstructions] = useState(true); const [isApproving, setIsApproving] = useState(false); const [isRevising, setIsRevising] = useState(false); @@ -62,11 +90,16 @@ export const VerifyArtefact = () => { const [generatedArtifactId, setGeneratedArtifactId] = useState<string | null>( null, ); + const [contextTimestamp, setContextTimestamp] = useState<string | null>(null); - // Ref for audio element const audioRef = useRef<HTMLAudioElement | null>(null); const reviseTimerRef = useRef<NodeJS.Timeout | null>(null); + const latestChunkTimestamp = useMemo( + () => computeLatestTimestamp(chunksQuery.data as ConversationChunkLike[]), + [chunksQuery.data], + ); + const projectLanguage = projectQuery.data?.language ?? "en"; const languageLocale = LANGUAGE_TO_LOCALE[projectLanguage] ?? LANGUAGE_TO_LOCALE.en; @@ -84,12 +117,10 @@ export const VerifyArtefact = () => { selectedTopic?.key ?? t`verified`; - // Redirect back if no selected option key useEffect(() => { if ( !selectedOptionKey || - (topicsQuery.isSuccess && - !selectedTopics.includes(selectedOptionKey)) + (topicsQuery.isSuccess && !selectedTopics.includes(selectedOptionKey)) ) { navigate(`/${projectId}/conversation/${conversationId}/verify`, { replace: true, @@ -104,7 +135,7 @@ export const VerifyArtefact = () => { conversationId, ]); - // biome-ignore lint/correctness/useExhaustiveDependencies: we want to regenerate the artefact if the user clicks the next button + // biome-ignore lint/correctness/useExhaustiveDependencies: regenerate only when generating first artefact useEffect(() => { if ( !selectedOptionKey || @@ -112,30 +143,33 @@ export const VerifyArtefact = () => { hasGenerated || topicsQuery.isLoading || !selectedTopics.includes(selectedOptionKey) - ) + ) { return; + } const generateArtefact = async () => { try { setHasGenerated(true); setGeneratedArtifactId(null); setReadAloudUrl(""); + const response = await generateArtefactMutation.mutateAsync({ conversationId, - topicList: [selectedOptionKey], // only one for now + topicList: [selectedOptionKey], }); - // Get the first artifact from the response if (response && response.length > 0) { const artifact = response[0]; setArtefactContent(artifact.content); setGeneratedArtifactId(artifact.id); - // Set read aloud URL from API response setReadAloudUrl(artifact.read_aloud_stream_url || ""); + if (latestChunkTimestamp) { + setContextTimestamp(latestChunkTimestamp); + } } } catch (error) { console.error("Failed to generate artifact:", error); - setHasGenerated(false); // Reset on error so user can retry + setHasGenerated(false); } }; @@ -147,6 +181,7 @@ export const VerifyArtefact = () => { topicsQuery.isLoading, selectedTopics, generateArtefactMutation, + latestChunkTimestamp, ]); const handleNextFromInstructions = () => { @@ -159,18 +194,20 @@ export const VerifyArtefact = () => { !selectedOptionKey || !artefactContent || !generatedArtifactId - ) + ) { return; + } setIsApproving(true); try { - await saveArtefactMutation.mutateAsync({ - artefactId: generatedArtifactId, - artefactContent, + await updateArtefactMutation.mutateAsync({ + artifactId: generatedArtifactId, conversationId, + content: artefactContent, + approvedAt: new Date().toISOString(), + successMessage: t`Artefact approved successfully!`, }); - // Navigate back to conversation const conversationUrl = `/${projectId}/conversation/${conversationId}`; navigate(conversationUrl); } finally { @@ -179,23 +216,43 @@ export const VerifyArtefact = () => { }; const handleRevise = async () => { - if (!conversationId || !selectedOptionKey) return; + if (!conversationId || !selectedOptionKey || !generatedArtifactId) { + return; + } + const timestampToUse = contextTimestamp ?? latestChunkTimestamp; + if (!timestampToUse) { + toast.error("No feedback available yet. Try again after sharing updates."); + return; + } + setIsRevising(true); try { - setGeneratedArtifactId(null); - setReadAloudUrl(""); - const response = await generateArtefactMutation.mutateAsync({ - conversationId: conversationId, - topicList: [selectedOptionKey], // only one for now + const response = await updateArtefactMutation.mutateAsync({ + artifactId: generatedArtifactId, + conversationId, + useConversation: { + conversationId, + timestamp: timestampToUse, + }, + successMessage: t`Artefact revised successfully!`, }); - if (response && response.length > 0) { - const artifact = response[0]; - setArtefactContent(artifact.content); - setGeneratedArtifactId(artifact.id); - setReadAloudUrl(artifact.read_aloud_stream_url || ""); + if (response) { + setArtefactContent(response.content); + setGeneratedArtifactId(response.id); + setReadAloudUrl(response.read_aloud_stream_url || ""); } - setLastReviseTime(Date.now()); // Start cooldown timer + + setLastReviseTime(Date.now()); + const refreshed = await chunksQuery.refetch(); + await queryClient.invalidateQueries({ + queryKey: ["participant", "conversation_chunks", conversationId], + }); + + const updatedLatest = computeLatestTimestamp( + (refreshed.data ?? chunksQuery.data) as ConversationChunkLike[], + ); + setContextTimestamp(updatedLatest ?? timestampToUse); } finally { setIsRevising(false); } @@ -211,14 +268,27 @@ export const VerifyArtefact = () => { setEditedContent(""); }; - const handleSaveEdit = () => { - if (!editedContent) return; - - // Update the artefact content with edited content - setArtefactContent(editedContent); - // Exit edit mode to show Revise/Approve buttons - setIsEditing(false); - setEditedContent(""); + const handleSaveEdit = async () => { + if (!editedContent || !generatedArtifactId || !conversationId) { + return; + } + try { + const response = await updateArtefactMutation.mutateAsync({ + artifactId: generatedArtifactId, + conversationId, + content: editedContent, + successMessage: t`Artefact updated successfully!`, + }); + if (response) { + setArtefactContent(response.content); + } else { + setArtefactContent(editedContent); + } + setIsEditing(false); + setEditedContent(""); + } catch (error) { + console.error("Failed to update artefact content:", error); + } }; const handleReadAloud = () => { @@ -241,31 +311,23 @@ export const VerifyArtefact = () => { } }; - // Cooldown timer for revise button (2 minutes) useEffect(() => { if (lastReviseTime === null) return; - const COOLDOWN_MS = 2 * 60 * 1000; // 2 minutes in milliseconds + const COOLDOWN_MS = 2 * 60 * 1000; const updateTimer = () => { const now = Date.now(); const elapsed = now - lastReviseTime; const remaining = Math.max(0, COOLDOWN_MS - elapsed); - setReviseTimeRemaining(remaining); - - if (remaining === 0) { - if (reviseTimerRef.current) { - clearInterval(reviseTimerRef.current); - reviseTimerRef.current = null; - } + if (remaining === 0 && reviseTimerRef.current) { + clearInterval(reviseTimerRef.current); + reviseTimerRef.current = null; } }; - // Update immediately updateTimer(); - - // Update every second reviseTimerRef.current = setInterval(updateTimer, 1000); return () => { @@ -276,7 +338,6 @@ export const VerifyArtefact = () => { }; }, [lastReviseTime]); - // Cleanup audio on unmount useEffect(() => { return () => { if (audioRef.current) { @@ -286,13 +347,34 @@ export const VerifyArtefact = () => { }; }, []); + if (projectQuery.isError || topicsQuery.isError) { + return ( + <Stack gap="md" align="center" justify="center" className="h-full"> + <Text c="red"> + <Trans> + Something went wrong while preparing the verification experience. + </Trans> + </Text> + <Button + variant="subtle" + onClick={() => + navigate(`/${projectId}/conversation/${conversationId}/verify`, { + replace: true, + }) + } + > + <Trans>Go back</Trans> + </Button> + </Stack> + ); + } + const isInitialLoading = topicsQuery.isLoading || projectQuery.isLoading || generateArtefactMutation.isPending || - !generatedArtifactId; + (!generatedArtifactId && !artefactContent); - // step 1: show instructions while generating response from api if (showInstructions) { return ( <VerifyInstructions @@ -303,7 +385,6 @@ export const VerifyArtefact = () => { ); } - // step 2: show artefact with revise/approve once user clicks next on step 1 return ( <Stack gap="lg" className="h-full"> <ScrollArea className="flex-grow"> @@ -326,19 +407,15 @@ export const VerifyArtefact = () => { </Text> <Text size="sm" c="dimmed"> <Trans id="participant.verify.regenerating.artefact.description"> - This would just take a few moments + This will just take a few moments </Trans> </Text> </Stack> </Stack> ) : ( <Stack gap="md"> - {/* Title with Read Aloud Button */} <Group justify="space-between" align="center" wrap="nowrap"> <Title order={4} className="font-semibold"> - {selectedOptionIcon ? ( - <span className="mr-2">{selectedOptionIcon}</span> - ) : null} <Trans id="participant.verify.artefact.title"> Artefact: {selectedOptionLabel} </Trans> @@ -360,7 +437,6 @@ export const VerifyArtefact = () => { )} </Group> - {/* Markdown Content or Editor */} {isEditing ? ( <MemoizedMarkdownWYSIWYG markdown={editedContent} @@ -376,7 +452,6 @@ export const VerifyArtefact = () => { </Paper> </ScrollArea> - {/* Action buttons */} <Group gap="md" className="w-full sticky bottom-[11%] bg-white py-2 px-1"> {isEditing ? ( <> @@ -394,6 +469,7 @@ export const VerifyArtefact = () => { radius="md" className="flex-1" onClick={handleSaveEdit} + loading={updateArtefactMutation.isPending} > <Trans id="participant.verify.action.button.save">Save</Trans> </Button> @@ -412,7 +488,7 @@ export const VerifyArtefact = () => { isRevising || isApproving || reviseTimeRemaining > 0 || - !selectedOptionKey + !generatedArtifactId } > {reviseTimeRemaining > 0 ? ( @@ -464,31 +540,3 @@ export const VerifyArtefact = () => { </Stack> ); }; - const selectedOptionIcon = - (selectedTopic && - (TOPIC_ICON_MAP[selectedTopic.key] ?? - (selectedTopic.icon && !selectedTopic.icon.startsWith(":") - ? selectedTopic.icon - : undefined))) ?? - undefined; - if (projectQuery.isError || topicsQuery.isError) { - return ( - <Stack gap="md" align="center" justify="center" className="h-full"> - <Text c="red"> - <Trans> - Something went wrong while preparing the verification experience. - </Trans> - </Text> - <Button - variant="subtle" - onClick={() => - navigate(`/${projectId}/conversation/${conversationId}/verify`, { - replace: true, - }) - } - > - <Trans>Go back</Trans> - </Button> - </Stack> - ); - } diff --git a/echo/frontend/src/components/participant/verify/hooks/index.ts b/echo/frontend/src/components/participant/verify/hooks/index.ts index d1dba5c4..a8906922 100644 --- a/echo/frontend/src/components/participant/verify/hooks/index.ts +++ b/echo/frontend/src/components/participant/verify/hooks/index.ts @@ -1,9 +1,11 @@ -import { readItems, updateItem } from "@directus/sdk"; +import { readItems } from "@directus/sdk"; import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; import { toast } from "@/components/common/Toaster"; import { generateVerificationArtefact, getVerificationTopics, + updateVerificationArtefact, + type UpdateVerificationArtefactPayload, } from "@/lib/api"; import { directus } from "@/lib/directus"; @@ -27,32 +29,44 @@ export const useGenerateVerificationArtefact = () => { }; // Hook for saving verification artefacts -export const useSaveVerificationArtefact = () => { +type UpdateArtefactVariables = { + artifactId: string; + conversationId: string; + useConversation?: { + conversationId: string; + timestamp: string; + }; + content?: string; + approvedAt?: string; + successMessage?: string; +}; + +export const useUpdateVerificationArtefact = () => { const queryClient = useQueryClient(); return useMutation({ mutationFn: async ({ - artefactId, - conversationId, - artefactContent, - }: { - artefactId: string; - conversationId: string; - artefactContent: string; - }) => { - return directus.request( - updateItem("conversation_artifact", artefactId, { - approved_at: new Date().toISOString(), - content: artefactContent, - }), - ); + artifactId, + useConversation, + content, + approvedAt, + }: UpdateArtefactVariables) => { + const payload: UpdateVerificationArtefactPayload = { + artifactId, + useConversation, + content, + approvedAt, + }; + return updateVerificationArtefact(payload); }, onError: (error) => { console.error("Failed to save verification artefact:", error); toast.error("Failed to approve artefact. Please try again."); }, onSuccess: (_data, variables) => { - toast.success("Artefact approved successfully!"); + toast.success( + variables?.successMessage ?? "Verification artefact updated successfully!", + ); queryClient.invalidateQueries({ queryKey: ["conversations", variables.conversationId], }); diff --git a/echo/frontend/src/lib/api.ts b/echo/frontend/src/lib/api.ts index b7dd1de6..5bf20379 100644 --- a/echo/frontend/src/lib/api.ts +++ b/echo/frontend/src/lib/api.ts @@ -1125,6 +1125,34 @@ export const generateVerificationArtefact = async (payload: { return response?.artifact_list ?? []; }; +export type UpdateVerificationArtefactPayload = { + artifactId: string; + useConversation?: { + conversationId: string; + timestamp: string; + }; + content?: string; + approvedAt?: string; +}; + +export const updateVerificationArtefact = async ({ + artifactId, + useConversation, + content, + approvedAt, +}: UpdateVerificationArtefactPayload) => { + return api.put<unknown, VerificationArtifact>(`/verify/artifact/${artifactId}`, { + use_conversation: useConversation + ? { + conversationId: useConversation.conversationId, + timestamp: useConversation.timestamp, + } + : undefined, + content, + approvedAt, + }); +}; + export const unsubscribeParticipant = async ( projectId: string, token: string, diff --git a/echo/server/dembrane/api/verify.py b/echo/server/dembrane/api/verify.py index 559c456d..819e3611 100644 --- a/echo/server/dembrane/api/verify.py +++ b/echo/server/dembrane/api/verify.py @@ -165,6 +165,25 @@ class UpdateVerificationTopicsRequest(BaseModel): topic_list: List[str] = Field(default_factory=list) +class UseConversationPayload(BaseModel): + conversation_id: str = Field(..., alias="conversationId") + timestamp: datetime + + class Config: + allow_population_by_field_name = True + + +class UpdateArtifactRequest(BaseModel): + use_conversation: Optional[UseConversationPayload] = Field( + None, alias="useConversation" + ) + content: Optional[str] = None + approved_at: Optional[datetime] = Field(None, alias="approvedAt") + + class Config: + allow_population_by_field_name = True + + def _parse_directus_datetime(value: Optional[str]) -> Optional[datetime]: if value is None: return None @@ -359,6 +378,32 @@ async def update_verification_topics( return GetVerificationTopicsResponse(selected_topics=selected_topics, available_topics=refreshed_topics) +async def _get_artifact_or_404(artifact_id: str) -> dict: + artifact_rows = await run_in_thread_pool( + directus.get_items, + "conversation_artifact", + { + "query": { + "filter": {"id": {"_eq": artifact_id}}, + "fields": [ + "id", + "conversation_id", + "content", + "key", + "approved_at", + "read_aloud_stream_url", + ], + "limit": 1, + } + }, + ) + + if not artifact_rows: + raise HTTPException(status_code=404, detail="Artifact not found") + + return artifact_rows[0] + + async def _get_conversation_with_project(conversation_id: str) -> dict: conversation_rows = await run_in_thread_pool( directus.get_items, @@ -464,6 +509,17 @@ def _build_transcript_text(chunks: List[dict]) -> str: return "\n".join(transcripts) +def _build_feedback_text(chunks: List[dict], reference_time: datetime) -> str: + feedback_segments: List[str] = [] + for chunk in chunks: + timestamp = chunk.get("timestamp") + if timestamp and isinstance(timestamp, datetime) and timestamp > reference_time: + transcript = (chunk.get("transcript") or "").strip() + if transcript: + feedback_segments.append(f"[{timestamp.isoformat()}] {transcript}") + return "\n".join(feedback_segments) + + def _select_audio_chunks( chunks: List[dict], last_artifact_time: Optional[datetime], @@ -665,3 +721,142 @@ async def generate_verification_artifacts( ) return GenerateArtifactsResponse(artifact_list=[artifact_response]) + + +@VerifyRouter.put("/artifact/{artifact_id}", response_model=ConversationArtifactResponse) +async def update_verification_artifact( + artifact_id: str, + body: UpdateArtifactRequest, + auth: DependencyDirectusSession, # noqa: ARG001 - reserved for future use +) -> ConversationArtifactResponse: + if not (body.use_conversation or body.content is not None or body.approved_at is not None): + raise HTTPException(status_code=400, detail="No updates provided") + + if body.use_conversation and body.content is not None: + raise HTTPException( + status_code=400, + detail="Provide either useConversation or content, not both", + ) + + artifact = await _get_artifact_or_404(artifact_id) + conversation_id = artifact.get("conversation_id") + + updates: Dict[str, Any] = {} + + if body.approved_at is not None: + updates["approved_at"] = body.approved_at.isoformat() + + generated_text = None + + if body.use_conversation: + if not GCP_SA_JSON: + raise HTTPException(status_code=500, detail="GCP credentials are not configured") + + reference_conversation_id = body.use_conversation.conversation_id + reference_timestamp = body.use_conversation.timestamp + + conversation = await _get_conversation_with_project(reference_conversation_id) + chunks = await _get_conversation_chunks(reference_conversation_id) + + conversation_transcript = _build_transcript_text(chunks) + feedback_text = _build_feedback_text(chunks, reference_timestamp) + + if not feedback_text and not any( + chunk.get("path") + for chunk in chunks + if chunk.get("timestamp") + and isinstance(chunk.get("timestamp"), datetime) + and chunk.get("timestamp") > reference_timestamp + and not (chunk.get("transcript") or "").strip() + ): + raise HTTPException( + status_code=400, + detail="No new feedback found since provided timestamp", + ) + + audio_chunks = _select_audio_chunks(chunks, reference_timestamp) + + system_prompt = render_prompt( + "revise_artifact", + "en", + { + "transcript": conversation_transcript or "No transcript available.", + "outcome": artifact.get("content") or "", + "feedback": feedback_text or "No textual feedback available.", + }, + ) + + message_content: List[Dict[str, Any]] = [ + { + "type": "text", + "text": "Please revise the outcome using the feedback provided. Audio clips accompany segments without transcripts.", + } + ] + + for chunk in audio_chunks: + timestamp = chunk.get("timestamp") + ts_value = timestamp.isoformat() if isinstance(timestamp, datetime) else "unknown" + chunk_id = chunk.get("id") + message_content.append( + { + "type": "text", + "text": f"Audio chunk {chunk_id} captured at {ts_value}", + } + ) + path = chunk.get("path") + if path: + try: + message_content.append(_get_audio_file_object(path)) + except Exception as exc: # pragma: no cover - logging side effect + logger.warning("Failed to attach audio chunk %s: %s", chunk_id, exc) + + try: + response = litellm.completion( + model="vertex_ai/gemini-2.5-flash", + vertex_credentials=GCP_SA_JSON, + messages=[ + { + "role": "system", + "content": [ + { + "type": "text", + "text": system_prompt, + } + ], + }, + { + "role": "user", + "content": message_content, + }, + ], + ) + except Exception as exc: # pragma: no cover - external failure + logger.error("Gemini revision failed: %s", exc, exc_info=True) + raise HTTPException(status_code=500, detail="Failed to revise verification artifact") from exc + + generated_text = _extract_response_text(response) + updates["content"] = generated_text + elif body.content is not None: + updates["content"] = body.content + + if not updates: + raise HTTPException(status_code=400, detail="No valid fields to update") + + updated_artifact = await run_in_thread_pool( + directus.update_item, + "conversation_artifact", + artifact_id, + updates, + ) + updated_data = updated_artifact.get("data", {}) + + return ConversationArtifactResponse( + id=updated_data.get("id", artifact_id), + key=updated_data.get("key"), + content=updated_data.get("content") or updates.get("content") or artifact.get("content") or "", + conversation_id=updated_data.get("conversation_id") or conversation_id or "", + approved_at=updated_data.get("approved_at") or updates.get("approved_at"), + read_aloud_stream_url=updated_data.get("read_aloud_stream_url") + or artifact.get("read_aloud_stream_url") + or "", + ) diff --git a/echo/server/prompt_templates/revise_artifact.en.jinja b/echo/server/prompt_templates/revise_artifact.en.jinja new file mode 100644 index 00000000..3c8467e3 --- /dev/null +++ b/echo/server/prompt_templates/revise_artifact.en.jinja @@ -0,0 +1,22 @@ +You are provided with: +A conversation transcript between two or more participants. +An artifact created to verify the contributions of the conversation on a wider collaborative process. +Additional participant feedback on that outcome. + +Transcript: +{{ transcript }} + +Outcome: +{{ outcome }} + +Feedback: +{{ feedback }} + +Your task is to revise the outcome based on this feedback so that it more accurately reflects the original conversation, as interpreted and according to the needs of the participants. + +Please note: +- Use participant feedback to correct errors, clarify ambiguous points, or add missing references from the original conversation. +- Integrate all corrections and additions coherently into the outcome’s main content, rather than simply appending meta-comments or highlighting points of contention. +- Avoid framing corrections as “negations” (e.g., “X is not Y”), unless clearly necessary for accurate understanding; instead, place each reference or influence in the most contextually appropriate place in the outcome. +- Ensure the revised outcome is clear, succinct, and consistent with both the spirit and content of the original conversation, as clarified by participant reflection. +- Do not include commentary or direct references to the feedback process itself in the final outcome. From c2d75f9ad102b229489d7b1fd8b378b9658a88af Mon Sep 17 00:00:00 2001 From: Usama <reach.usamazafar@gmail.com> Date: Fri, 7 Nov 2025 12:06:56 +0000 Subject: [PATCH 13/23] - add filter for verified conversations --- .../conversation/ConversationAccordion.tsx | 43 ++++++++++++++++--- 1 file changed, 37 insertions(+), 6 deletions(-) diff --git a/echo/frontend/src/components/conversation/ConversationAccordion.tsx b/echo/frontend/src/components/conversation/ConversationAccordion.tsx index 0e04eafb..b2a24fb7 100644 --- a/echo/frontend/src/components/conversation/ConversationAccordion.tsx +++ b/echo/frontend/src/components/conversation/ConversationAccordion.tsx @@ -663,6 +663,7 @@ export const ConversationAccordion = ({ }); const [tagSearch, setTagSearch] = useState(""); const [selectedTagIds, setSelectedTagIds] = useState<string[]>([]); + const [showOnlyVerified, setShowOnlyVerified] = useState(false); const allProjectTags = useMemo( () => (projectTags?.tags as unknown as ProjectTag[]) ?? [], [projectTags?.tags], @@ -698,6 +699,15 @@ export const ConversationAccordion = ({ }, }, }), + ...(showOnlyVerified && { + artefacts: { + _some: { + approved_at: { + _nnull: true, + }, + }, + }, + }), }, search: debouncedConversationSearchValue, sort: sortBy, @@ -740,18 +750,24 @@ export const ConversationAccordion = ({ () => debouncedConversationSearchValue !== "" || sortBy !== "-created_at" || - selectedTagIds.length > 0, + selectedTagIds.length > 0 || + showOnlyVerified, // Temporarily disabled source filters // sortBy !== "-created_at" || // activeFilters.length !== FILTER_OPTIONS.length, // [debouncedConversationSearchValue, sortBy, activeFilters], - [debouncedConversationSearchValue, sortBy, selectedTagIds.length], + [ + debouncedConversationSearchValue, + sortBy, + selectedTagIds.length, + showOnlyVerified, + ], ); // biome-ignore lint/correctness/useExhaustiveDependencies: <should update when sortBy or selectedTagIds.length changes> const appliedFiltersCount = useMemo(() => { - return selectedTagIds.length; - }, [sortBy, selectedTagIds.length]); + return selectedTagIds.length + (showOnlyVerified ? 1 : 0); + }, [sortBy, selectedTagIds.length, showOnlyVerified]); const [showFilterActions, setShowFilterActions] = useState(false); const [sortMenuOpened, setSortMenuOpened] = useState(false); @@ -765,6 +781,7 @@ export const ConversationAccordion = ({ setShowDuration(true); setSelectedTagIds([]); setTagSearch(""); + setShowOnlyVerified(false); // not sure why only these 2 were needed. biome seems to shut up with these 2. i tried putting all. will need to investigate }, [setSortBy, setShowDuration]); @@ -916,7 +933,7 @@ export const ConversationAccordion = ({ <Menu.Target> <Button variant="outline" - size="sm" + size="xs" color="gray" fw={500} leftSection={<IconArrowsUpDown size={16} />} @@ -973,7 +990,7 @@ export const ConversationAccordion = ({ <Button variant="outline" color="gray" - size="sm" + size="xs" fw={500} leftSection={<IconTags size={16} />} rightSection={ @@ -1092,6 +1109,17 @@ export const ConversationAccordion = ({ </Menu.Dropdown> </Menu> + <Button + variant={showOnlyVerified ? "filled" : "outline"} + color={showOnlyVerified ? "blue" : "gray"} + size="xs" + fw={500} + leftSection={<IconRosetteDiscountCheckFilled size={16} />} + onClick={() => setShowOnlyVerified((prev) => !prev)} + > + <Trans id="conversation.filters.verified.text">Verified</Trans> + </Button> + <Tooltip label={t`Reset to default`}> <ActionIcon variant="outline" @@ -1099,6 +1127,9 @@ export const ConversationAccordion = ({ onClick={resetEverything} aria-label={t`Reset to default`} disabled={!filterApplied} + size="md" + py={14} + ml="auto" > <IconX size={16} /> </ActionIcon> From 0cb39133d8ed6f792f1c40bdc9569960082ae3c3 Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti <sameer@dembrane.com> Date: Mon, 10 Nov 2025 12:54:45 +0000 Subject: [PATCH 14/23] updates --- echo/.cursor/mcp.json | 17 - .../conversation/ConversationAccordion.tsx | 20 +- .../conversation/VerifiedArtefactsSection.tsx | 21 +- .../components/conversation/hooks/index.ts | 2 +- .../frontend/src/components/layout/Header.tsx | 2 +- .../verify/VerifiedArtefactItem.tsx | 10 +- .../verify/VerifiedArtefactsList.tsx | 34 +- .../participant/verify/VerifyArtefact.tsx | 43 +- .../participant/verify/VerifySelection.tsx | 10 +- .../participant/verify/hooks/index.ts | 57 +- .../project/ProjectPortalEditor.tsx | 95 +- .../src/components/project/hooks/index.ts | 27 - .../src/components/report/ReportTimeline.tsx | 2 - .../settings/TwoFactorSettingsCard.tsx | 90 +- .../src/components/settings/hooks/index.ts | 2 +- echo/frontend/src/lib/api.ts | 44 +- echo/frontend/src/lib/typesDirectus.d.ts | 1228 ++++++----------- .../src/routes/project/ProjectRoutes.tsx | 2 +- echo/server/dembrane/anthropic.py | 108 -- echo/server/dembrane/api/verify.py | 57 +- .../dembrane/audio_lightrag/__init__.py | 0 .../audio_lightrag/services/__init__.py | 1 - .../audio_lightrag/services/contextualizer.py | 102 -- .../audio_lightrag/utils/async_utils.py | 102 -- .../audio_lightrag/utils/echo_utils.py | 82 -- .../audio_lightrag/utils/lightrag_utils.py | 492 ------- .../audio_lightrag/utils/litellm_utils.py | 56 - echo/server/dembrane/main.py | 46 + echo/server/dembrane/service/project.py | 4 +- 29 files changed, 736 insertions(+), 2020 deletions(-) delete mode 100644 echo/.cursor/mcp.json delete mode 100644 echo/server/dembrane/anthropic.py delete mode 100644 echo/server/dembrane/audio_lightrag/__init__.py delete mode 100644 echo/server/dembrane/audio_lightrag/services/__init__.py delete mode 100644 echo/server/dembrane/audio_lightrag/services/contextualizer.py delete mode 100644 echo/server/dembrane/audio_lightrag/utils/async_utils.py delete mode 100644 echo/server/dembrane/audio_lightrag/utils/echo_utils.py delete mode 100644 echo/server/dembrane/audio_lightrag/utils/lightrag_utils.py delete mode 100644 echo/server/dembrane/audio_lightrag/utils/litellm_utils.py diff --git a/echo/.cursor/mcp.json b/echo/.cursor/mcp.json deleted file mode 100644 index 2d431242..00000000 --- a/echo/.cursor/mcp.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "mcpServers": { - "playwright": { - "command": "npx", - "args": [ - "-y", - "@executeautomation/playwright-mcp-server" - ] - }, - "fetch": { - "command": "uvx", - "args": [ - "mcp-server-fetch" - ] - } - } -} \ No newline at end of file diff --git a/echo/frontend/src/components/conversation/ConversationAccordion.tsx b/echo/frontend/src/components/conversation/ConversationAccordion.tsx index b2a24fb7..ceb32991 100644 --- a/echo/frontend/src/components/conversation/ConversationAccordion.tsx +++ b/echo/frontend/src/components/conversation/ConversationAccordion.tsx @@ -121,7 +121,9 @@ const ConversationAccordionLabelChatSelection = ({ // Check if conversation has any content const hasContent = conversation.chunks?.some( - (chunk) => chunk.transcript && chunk.transcript.trim().length > 0, + (chunk) => + (chunk as unknown as ConversationChunk).transcript && + (chunk as unknown as ConversationChunk).transcript?.trim().length > 0, ); const handleSelectChat = () => { @@ -196,7 +198,6 @@ export const MoveConversationButton = ({ enabled: opened, query: { filter: { - // @ts-expect-error not tyed _and: [{ id: { _neq: conversation.project_id } }], }, search: search, @@ -369,7 +370,10 @@ export const ConversationStatusIndicators = ({ const hasOnlyTextContent = useMemo( () => conversation.chunks?.length > 0 && - conversation.chunks?.every((chunk) => chunk.source === "PORTAL_TEXT"), + conversation.chunks?.every( + (chunk) => + (chunk as unknown as ConversationChunk).source === "PORTAL_TEXT", + ), [conversation.chunks], ); @@ -469,9 +473,11 @@ const ConversationAccordionItem = ({ // Check if conversation has approved artefacts const hasVerifiedArtefacts = - conversation?.artefacts && - conversation?.artefacts?.length > 0 && - conversation?.artefacts?.some((artefact) => artefact.approved_at); + conversation?.conversation_artifacts && + conversation?.conversation_artifacts?.length > 0 && + conversation?.conversation_artifacts?.some( + (artefact) => artefact.approved_at, + ); return ( <NavigationButton @@ -700,7 +706,7 @@ export const ConversationAccordion = ({ }, }), ...(showOnlyVerified && { - artefacts: { + conversation_artifacts: { _some: { approved_at: { _nnull: true, diff --git a/echo/frontend/src/components/conversation/VerifiedArtefactsSection.tsx b/echo/frontend/src/components/conversation/VerifiedArtefactsSection.tsx index 97daa587..f4b526d1 100644 --- a/echo/frontend/src/components/conversation/VerifiedArtefactsSection.tsx +++ b/echo/frontend/src/components/conversation/VerifiedArtefactsSection.tsx @@ -1,4 +1,3 @@ -import { readItems } from "@directus/sdk"; import { t } from "@lingui/core/macro"; import { Trans } from "@lingui/react/macro"; import { @@ -14,7 +13,7 @@ import { IconRosetteDiscountCheckFilled } from "@tabler/icons-react"; import { useQuery } from "@tanstack/react-query"; import { format } from "date-fns"; import { Markdown } from "@/components/common/Markdown"; -import { directus } from "@/lib/directus"; +import { getVerificationArtefacts } from "@/lib/api"; type VerifiedArtefactsSectionProps = { conversationId: string; @@ -36,22 +35,8 @@ export const VerifiedArtefactsSection = ({ // Fetch all artefacts with content for display const { data: artefacts, isLoading } = useQuery({ enabled: !!conversationId, - queryFn: () => - directus.request( - readItems("conversation_artefact", { - fields: [ - "id", - "conversation_id", - "approved_at", - "key", - "content", - // "title", - ], - filter: { conversation_id: { _eq: conversationId } }, - sort: ["-approved_at"], - }), - ), - queryKey: ["conversation_artefacts_full", conversationId], + queryFn: () => getVerificationArtefacts(conversationId), + queryKey: ["verify", "conversation_artifacts", conversationId], }); // Don't show the section if there are no artefacts diff --git a/echo/frontend/src/components/conversation/hooks/index.ts b/echo/frontend/src/components/conversation/hooks/index.ts index 6cc367c1..8eb9f0df 100644 --- a/echo/frontend/src/components/conversation/hooks/index.ts +++ b/echo/frontend/src/components/conversation/hooks/index.ts @@ -904,7 +904,7 @@ export const useInfiniteConversationsByProjectId = ( ], }, { - artefacts: ["id", "approved_at"], + conversation_artifacts: ["id", "approved_at"], }, ], filter: { diff --git a/echo/frontend/src/components/layout/Header.tsx b/echo/frontend/src/components/layout/Header.tsx index 39daef3a..4aceb890 100644 --- a/echo/frontend/src/components/layout/Header.tsx +++ b/echo/frontend/src/components/layout/Header.tsx @@ -17,12 +17,12 @@ import { } from "@/components/auth/hooks"; import { I18nLink } from "@/components/common/i18nLink"; import { ENABLE_ANNOUNCEMENTS } from "@/config"; +import { useI18nNavigate } from "@/hooks/useI18nNavigate"; import { AnnouncementIcon } from "../announcement/AnnouncementIcon"; import { Announcements } from "../announcement/Announcements"; import { TopAnnouncementBar } from "../announcement/TopAnnouncementBar"; import { Logo } from "../common/Logo"; import { LanguagePicker } from "../language/LanguagePicker"; -import { useI18nNavigate } from "@/hooks/useI18nNavigate"; const User = ({ name, email }: { name: string; email: string }) => ( <div diff --git a/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx b/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx index cc68776e..e72255a7 100644 --- a/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx +++ b/echo/frontend/src/components/participant/verify/VerifiedArtefactItem.tsx @@ -1,9 +1,10 @@ import { ActionIcon, Box, Group, Paper, Text } from "@mantine/core"; import { IconRosetteDiscountCheckFilled } from "@tabler/icons-react"; import { format } from "date-fns"; +import type { VerificationArtifact } from "@/lib/api"; type VerifiedArtefactItemProps = { - artefact: ConversationArtefact; + artefact: VerificationArtifact; label: string; icon?: string; onViewArtefact: (artefactId: string) => void; @@ -19,7 +20,12 @@ const formatArtefactTime = (timestamp: string | null | undefined): string => { } }; -export const VerifiedArtefactItem = ({ artefact, label, icon, onViewArtefact }: VerifiedArtefactItemProps) => { +export const VerifiedArtefactItem = ({ + artefact, + label, + icon, + onViewArtefact, +}: VerifiedArtefactItemProps) => { // Format the timestamp using date-fns const formattedDate = formatArtefactTime(artefact.approved_at); diff --git a/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx b/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx index 7b060e6c..4757d806 100644 --- a/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx +++ b/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx @@ -1,14 +1,11 @@ import { Box, Skeleton, Stack } from "@mantine/core"; import { useDisclosure } from "@mantine/hooks"; import { useState } from "react"; +import type { VerificationArtifact } from "@/lib/api"; import { ArtefactModal } from "./ArtefactModal"; -import { - useConversationArtefact, - useConversationArtefacts, - useVerificationTopics, -} from "./hooks"; -import { TOPIC_ICON_MAP } from "./VerifySelection"; +import { useConversationArtefacts, useVerificationTopics } from "./hooks"; import { VerifiedArtefactItem } from "./VerifiedArtefactItem"; +import { TOPIC_ICON_MAP } from "./VerifySelection"; type VerifiedArtefactsListProps = { conversationId: string; @@ -27,10 +24,6 @@ export const VerifiedArtefactsList = ({ const [selectedArtefactId, setSelectedArtefactId] = useState<string | null>( null, ); - - // Fetch the full artefact content when one is selected - const { data: selectedArtefact, isLoading: isLoadingArtefact } = - useConversationArtefact(selectedArtefactId ?? undefined); const topicsQuery = useVerificationTopics(projectId); const LANGUAGE_TO_LOCALE: Record<string, string> = { @@ -49,13 +42,13 @@ export const VerifiedArtefactsList = ({ availableTopics.map((topic) => [ topic.key, { + icon: + TOPIC_ICON_MAP[topic.key] ?? + (topic.icon && !topic.icon.startsWith(":") ? topic.icon : undefined), label: topic.translations?.[locale]?.label ?? topic.translations?.["en-US"]?.label ?? topic.key, - icon: - TOPIC_ICON_MAP[topic.key] ?? - (topic.icon && !topic.icon.startsWith(":") ? topic.icon : undefined), }, ]), ); @@ -73,6 +66,8 @@ export const VerifiedArtefactsList = ({ setSelectedArtefactId(null); }; + const artefactList: VerificationArtifact[] = artefacts ?? []; + if (isLoading) { return ( <Stack gap="sm" align="flex-end"> @@ -81,14 +76,14 @@ export const VerifiedArtefactsList = ({ ); } - if (!artefacts || artefacts.length === 0) { + if (artefactList.length === 0) { return null; } return ( <> <Box> - {artefacts.map((artefact: ConversationArtefact) => ( + {artefactList.map((artefact) => ( <VerifiedArtefactItem key={artefact.id} artefact={artefact} @@ -102,8 +97,13 @@ export const VerifiedArtefactsList = ({ opened={opened} onClose={handleCloseModal} onExited={handleModalExited} - isLoading={isLoadingArtefact} - artefact={selectedArtefact} + isLoading={false} + artefact={ + selectedArtefactId + ? (artefactList.find((item) => item.id === selectedArtefactId) ?? + null) + : null + } /> </> ); diff --git a/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx b/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx index 5c52340e..534ad60a 100644 --- a/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx +++ b/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx @@ -80,7 +80,6 @@ export const VerifyArtefact = () => { const [isApproving, setIsApproving] = useState(false); const [isRevising, setIsRevising] = useState(false); const [artefactContent, setArtefactContent] = useState<string>(""); - const [hasGenerated, setHasGenerated] = useState(false); const [isEditing, setIsEditing] = useState(false); const [editedContent, setEditedContent] = useState<string>(""); const [readAloudUrl, setReadAloudUrl] = useState<string>(""); @@ -94,6 +93,7 @@ export const VerifyArtefact = () => { const audioRef = useRef<HTMLAudioElement | null>(null); const reviseTimerRef = useRef<NodeJS.Timeout | null>(null); + const generationKeyRef = useRef<string | null>(null); const latestChunkTimestamp = useMemo( () => computeLatestTimestamp(chunksQuery.data as ConversationChunkLike[]), @@ -135,30 +135,37 @@ export const VerifyArtefact = () => { conversationId, ]); - // biome-ignore lint/correctness/useExhaustiveDependencies: regenerate only when generating first artefact + // biome-ignore lint/correctness/useExhaustiveDependencies: generation guard handled via ref useEffect(() => { if ( !selectedOptionKey || !conversationId || - hasGenerated || topicsQuery.isLoading || !selectedTopics.includes(selectedOptionKey) ) { return; } + const generationKey = `${conversationId}:${selectedOptionKey}`; + if (generationKeyRef.current === generationKey) { + return; + } + + let isCancelled = false; + generationKeyRef.current = generationKey; + setGeneratedArtifactId(null); + setReadAloudUrl(""); + setArtefactContent(""); + setContextTimestamp(null); + const generateArtefact = async () => { try { - setHasGenerated(true); - setGeneratedArtifactId(null); - setReadAloudUrl(""); - const response = await generateArtefactMutation.mutateAsync({ conversationId, topicList: [selectedOptionKey], }); - if (response && response.length > 0) { + if (!isCancelled && response && response.length > 0) { const artifact = response[0]; setArtefactContent(artifact.content); setGeneratedArtifactId(artifact.id); @@ -169,15 +176,19 @@ export const VerifyArtefact = () => { } } catch (error) { console.error("Failed to generate artifact:", error); - setHasGenerated(false); + if (!isCancelled) { + generationKeyRef.current = null; + } } }; generateArtefact(); + return () => { + isCancelled = true; + }; }, [ selectedOptionKey, conversationId, - hasGenerated, topicsQuery.isLoading, selectedTopics, generateArtefactMutation, @@ -201,10 +212,10 @@ export const VerifyArtefact = () => { setIsApproving(true); try { await updateArtefactMutation.mutateAsync({ + approvedAt: new Date().toISOString(), artifactId: generatedArtifactId, - conversationId, content: artefactContent, - approvedAt: new Date().toISOString(), + conversationId, successMessage: t`Artefact approved successfully!`, }); @@ -221,7 +232,9 @@ export const VerifyArtefact = () => { } const timestampToUse = contextTimestamp ?? latestChunkTimestamp; if (!timestampToUse) { - toast.error("No feedback available yet. Try again after sharing updates."); + toast.error( + "No feedback available yet. Try again after sharing updates.", + ); return; } @@ -230,11 +243,11 @@ export const VerifyArtefact = () => { const response = await updateArtefactMutation.mutateAsync({ artifactId: generatedArtifactId, conversationId, + successMessage: t`Artefact revised successfully!`, useConversation: { conversationId, timestamp: timestampToUse, }, - successMessage: t`Artefact revised successfully!`, }); if (response) { @@ -275,8 +288,8 @@ export const VerifyArtefact = () => { try { const response = await updateArtefactMutation.mutateAsync({ artifactId: generatedArtifactId, - conversationId, content: editedContent, + conversationId, successMessage: t`Artefact updated successfully!`, }); if (response) { diff --git a/echo/frontend/src/components/participant/verify/VerifySelection.tsx b/echo/frontend/src/components/participant/verify/VerifySelection.tsx index a63ceca2..47e2124a 100644 --- a/echo/frontend/src/components/participant/verify/VerifySelection.tsx +++ b/echo/frontend/src/components/participant/verify/VerifySelection.tsx @@ -53,9 +53,9 @@ export const VerifySelection = () => { "•"; return { + icon, key: topic.key, label: localizedLabel, - icon, }; }); @@ -99,7 +99,9 @@ export const VerifySelection = () => { )} {!isLoading && availableOptions.length === 0 && ( <Text size="sm" c="dimmed"> - <Trans>No verification topics are configured for this project.</Trans> + <Trans> + No verification topics are configured for this project. + </Trans> </Text> )} {availableOptions.map((option) => ( @@ -135,9 +137,7 @@ export const VerifySelection = () => { {isLoading ? ( <Trans>Loading…</Trans> ) : ( - <Trans id="participant.verify.selection.button.next"> - Next - </Trans> + <Trans id="participant.verify.selection.button.next">Next</Trans> )} </Button> </Stack> diff --git a/echo/frontend/src/components/participant/verify/hooks/index.ts b/echo/frontend/src/components/participant/verify/hooks/index.ts index a8906922..ee9a5da0 100644 --- a/echo/frontend/src/components/participant/verify/hooks/index.ts +++ b/echo/frontend/src/components/participant/verify/hooks/index.ts @@ -1,13 +1,12 @@ -import { readItems } from "@directus/sdk"; import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; import { toast } from "@/components/common/Toaster"; import { generateVerificationArtefact, + getVerificationArtefacts, getVerificationTopics, - updateVerificationArtefact, type UpdateVerificationArtefactPayload, + updateVerificationArtefact, } from "@/lib/api"; -import { directus } from "@/lib/directus"; export const useVerificationTopics = (projectId: string | undefined) => { return useQuery({ @@ -19,12 +18,22 @@ export const useVerificationTopics = (projectId: string | undefined) => { // Hook for generating verification artefacts export const useGenerateVerificationArtefact = () => { + const queryClient = useQueryClient(); return useMutation({ mutationFn: generateVerificationArtefact, onError: (error) => { console.error("Failed to generate verification artefact:", error); toast.error("Failed to generate artefact. Please try again."); }, + onSuccess: (_data, variables) => { + queryClient.invalidateQueries({ + queryKey: [ + "verify", + "conversation_artifacts", + variables.conversationId, + ], + }); + }, }); }; @@ -52,10 +61,10 @@ export const useUpdateVerificationArtefact = () => { approvedAt, }: UpdateArtefactVariables) => { const payload: UpdateVerificationArtefactPayload = { + approvedAt, artifactId, - useConversation, content, - approvedAt, + useConversation, }; return updateVerificationArtefact(payload); }, @@ -65,13 +74,18 @@ export const useUpdateVerificationArtefact = () => { }, onSuccess: (_data, variables) => { toast.success( - variables?.successMessage ?? "Verification artefact updated successfully!", + variables?.successMessage ?? + "Verification artefact updated successfully!", ); queryClient.invalidateQueries({ queryKey: ["conversations", variables.conversationId], }); queryClient.invalidateQueries({ - queryKey: ["conversation_artefacts", variables.conversationId], + queryKey: [ + "verify", + "conversation_artifacts", + variables.conversationId, + ], }); }, }); @@ -83,32 +97,7 @@ export const useConversationArtefacts = ( ) => { return useQuery({ enabled: !!conversationId, - queryFn: () => - directus.request( - readItems("conversation_artefact", { - fields: ["id", "conversation_id", "approved_at", "key"], - filter: { conversation_id: { _eq: conversationId } }, - sort: ["-approved_at"], - }), - ), - queryKey: ["conversation_artefacts", conversationId], - }); -}; - -// Hook for fetching a single artefact by ID (with aggressive caching - content never changes) -export const useConversationArtefact = (artefactId: string | undefined) => { - return useQuery({ - enabled: !!artefactId, - queryFn: () => - directus.request( - readItems("conversation_artefact", { - fields: ["id", "content", "conversation_id", "approved_at"], - filter: { id: { _eq: artefactId } }, - limit: 1, - }), - ), - queryKey: ["conversation_artefact", artefactId], - select: (data) => (data.length > 0 ? data[0] : null), - staleTime: Number.POSITIVE_INFINITY, + queryFn: () => getVerificationArtefacts(conversationId!), + queryKey: ["verify", "conversation_artifacts", conversationId], }); }; diff --git a/echo/frontend/src/components/project/ProjectPortalEditor.tsx b/echo/frontend/src/components/project/ProjectPortalEditor.tsx index 227a28cd..dee419d7 100644 --- a/echo/frontend/src/components/project/ProjectPortalEditor.tsx +++ b/echo/frontend/src/components/project/ProjectPortalEditor.tsx @@ -20,6 +20,7 @@ import { Title, } from "@mantine/core"; import { IconEye, IconEyeOff, IconRefresh } from "@tabler/icons-react"; +import { useQueryClient } from "@tanstack/react-query"; import { Resizable } from "re-resizable"; import { memo, useCallback, useEffect, useMemo, useRef, useState } from "react"; import { Controller, useForm, useWatch } from "react-hook-form"; @@ -31,10 +32,7 @@ import { FormLabel } from "../form/FormLabel"; import { MarkdownWYSIWYG } from "../form/MarkdownWYSIWYG/MarkdownWYSIWYG"; import { SaveStatus } from "../form/SaveStatus"; import { TOPIC_ICON_MAP } from "../participant/verify/VerifySelection"; -import { - useUpdateProjectByIdMutation, - useUpdateVerificationTopicsMutation, -} from "./hooks"; +import { useUpdateProjectByIdMutation } from "./hooks"; import { useProjectSharingLink } from "./ProjectQRCode"; import { ProjectTagsInput } from "./ProjectTagsInput"; @@ -65,7 +63,9 @@ const LANGUAGE_TO_LOCALE: Record<string, string> = { }; const normalizeTopicList = (topics: string[]): string[] => - Array.from(new Set(topics.map((topic) => topic.trim()).filter(Boolean))).sort(); + Array.from( + new Set(topics.map((topic) => topic.trim()).filter(Boolean)), + ).sort(); const ProperNounInput = ({ value, @@ -163,6 +163,7 @@ const ProjectPortalEditorComponent: React.FC<ProjectPortalEditorProps> = ({ verificationTopics, isVerificationTopicsLoading = false, }) => { + const queryClient = useQueryClient(); const [showPreview, setShowPreview] = useState(false); const link = useProjectSharingLink(project); const [previewKey, setPreviewKey] = useState(0); @@ -181,16 +182,14 @@ const ProjectPortalEditorComponent: React.FC<ProjectPortalEditorProps> = ({ const availableVerifyTopics = useMemo( () => (verificationTopics?.available_topics ?? []).map((topic) => ({ + icon: + TOPIC_ICON_MAP[topic.key] ?? + (topic.icon && !topic.icon.startsWith(":") ? topic.icon : undefined), key: topic.key, label: topic.translations?.[languageLocale]?.label ?? topic.translations?.["en-US"]?.label ?? topic.key, - icon: - TOPIC_ICON_MAP[topic.key] ?? - (topic.icon && !topic.icon.startsWith(":") - ? topic.icon - : undefined), })), [verificationTopics, languageLocale], ); @@ -227,14 +226,21 @@ const ProjectPortalEditorComponent: React.FC<ProjectPortalEditorProps> = ({ const formResolver = useMemo(() => zodResolver(FormSchema), []); - const { control, handleSubmit, watch, formState, reset, setValue, getValues } = - useForm<ProjectPortalFormValues>({ - defaultValues, - mode: "onChange", - // for validation - resolver: formResolver, - reValidateMode: "onChange", - }); + const { + control, + handleSubmit, + watch, + formState, + reset, + setValue, + getValues, + } = useForm<ProjectPortalFormValues>({ + defaultValues, + mode: "onChange", + // for validation + resolver: formResolver, + reValidateMode: "onChange", + }); const watchedReplyMode = useWatch({ control, @@ -252,50 +258,32 @@ const ProjectPortalEditorComponent: React.FC<ProjectPortalEditorProps> = ({ }); const updateProjectMutation = useUpdateProjectByIdMutation(); - const updateVerificationTopicsMutation = - useUpdateVerificationTopicsMutation(); const onSave = useCallback( async (values: ProjectPortalFormValues) => { const { verification_topics, ...projectPayload } = values; + const normalizedTopics = normalizeTopicList(verification_topics); + const serializedTopics = + normalizedTopics.length > 0 ? normalizedTopics.join(",") : null; await updateProjectMutation.mutateAsync({ id: project.id, - payload: projectPayload, + payload: { + ...projectPayload, + selected_verification_key_list: serializedTopics, + }, }); - const normalizedNewTopics = normalizeTopicList(verification_topics); - const normalizedCurrentTopics = normalizeTopicList( - selectedTopicDefaults, - ); - const topicsChanged = - normalizedNewTopics.length !== normalizedCurrentTopics.length || - normalizedNewTopics.some( - (topic, index) => topic !== normalizedCurrentTopics[index], - ); - - if (topicsChanged) { - await updateVerificationTopicsMutation.mutateAsync({ - projectId: project.id, - topicList: normalizedNewTopics, - }); - } + await queryClient.invalidateQueries({ + queryKey: ["verify", "topics", project.id], + }); - // Reset the form with the current values to clear the dirty state - reset( - { - ...values, - verification_topics: normalizedNewTopics, - } - ); + reset({ + ...values, + verification_topics: normalizedTopics, + }); }, - [ - project.id, - updateProjectMutation, - updateVerificationTopicsMutation, - reset, - selectedTopicDefaults, - ], + [project.id, updateProjectMutation, reset, queryClient], ); const { @@ -774,9 +762,8 @@ const ProjectPortalEditorComponent: React.FC<ProjectPortalEditorProps> = ({ if (!watchedVerifyEnabled) return; const normalizedCurrent = normalizeTopicList(field.value ?? []); - const isSelected = normalizedCurrent.includes( - topic.key, - ); + const isSelected = + normalizedCurrent.includes(topic.key); const updated = isSelected ? normalizedCurrent.filter( (item) => item !== topic.key, diff --git a/echo/frontend/src/components/project/hooks/index.ts b/echo/frontend/src/components/project/hooks/index.ts index e0ba3116..e19b126a 100644 --- a/echo/frontend/src/components/project/hooks/index.ts +++ b/echo/frontend/src/components/project/hooks/index.ts @@ -20,7 +20,6 @@ import { cloneProjectById, getLatestProjectAnalysisRunByProjectId, getVerificationTopics, - updateVerificationTopics, } from "@/lib/api"; import { directus } from "@/lib/directus"; @@ -286,29 +285,3 @@ export const useVerificationTopicsQuery = (projectId: string | undefined) => { queryKey: ["verify", "topics", projectId], }); }; - -export const useUpdateVerificationTopicsMutation = () => { - const queryClient = useQueryClient(); - return useMutation({ - mutationFn: ({ - projectId, - topicList, - }: { - projectId: string; - topicList: string[]; - }) => - updateVerificationTopics({ - projectId, - topicList, - }), - onSuccess: (_data, variables) => { - queryClient.invalidateQueries({ - queryKey: ["verify", "topics", variables.projectId], - }); - }, - onError: (error) => { - console.error("Failed to update verification topics", error); - toast.error("Failed to update verification topics"); - }, - }); -}; diff --git a/echo/frontend/src/components/report/ReportTimeline.tsx b/echo/frontend/src/components/report/ReportTimeline.tsx index 8e81126f..ac8f33b0 100644 --- a/echo/frontend/src/components/report/ReportTimeline.tsx +++ b/echo/frontend/src/components/report/ReportTimeline.tsx @@ -185,8 +185,6 @@ export function ReportTimeline({ ...data.allReports.map((r) => new Date(r.createdAt!).getTime()), ]; - console.log(ticks); - return ( <ResponsiveContainer width="100%" minWidth={300} height={200}> <AreaChart diff --git a/echo/frontend/src/components/settings/TwoFactorSettingsCard.tsx b/echo/frontend/src/components/settings/TwoFactorSettingsCard.tsx index 0a8174f3..97409d69 100644 --- a/echo/frontend/src/components/settings/TwoFactorSettingsCard.tsx +++ b/echo/frontend/src/components/settings/TwoFactorSettingsCard.tsx @@ -2,8 +2,8 @@ import { t } from "@lingui/core/macro"; import { Trans } from "@lingui/react/macro"; import { ActionIcon, - Anchor, Alert, + Anchor, Badge, Button, CopyButton, @@ -11,15 +11,15 @@ import { Group, List, Modal, - PasswordInput, Paper, + PasswordInput, + PinInput, Skeleton, Stack, Switch, Text, TextInput, Tooltip, - PinInput, } from "@mantine/core"; import { useDisclosure } from "@mantine/hooks"; import { @@ -65,8 +65,10 @@ export const TwoFactorSettingsCard = ({ isLoading, isTwoFactorEnabled, }: TwoFactorSettingsCardProps) => { - const [enableModalOpened, { close: closeEnableModal, open: openEnableModal }] = - useDisclosure(false); + const [ + enableModalOpened, + { close: closeEnableModal, open: openEnableModal }, + ] = useDisclosure(false); const [ disableModalOpened, { close: closeDisableModal, open: openDisableModal }, @@ -100,21 +102,14 @@ export const TwoFactorSettingsCard = ({ resetGenerateSecret(); resetEnableTwoFactor(); } - }, [ - enableModalOpened, - resetEnableTwoFactor, - resetGenerateSecret, - ]); + }, [enableModalOpened, resetEnableTwoFactor, resetGenerateSecret]); useEffect(() => { if (!disableModalOpened) { setDisableOtp(""); resetDisableTwoFactor(); } - }, [ - disableModalOpened, - resetDisableTwoFactor, - ]); + }, [disableModalOpened, resetDisableTwoFactor]); const handleToggle = () => { if (isTwoFactorEnabled) { @@ -226,7 +221,7 @@ export const TwoFactorSettingsCard = ({ <Paper withBorder p="md" radius="md"> <Stack gap="sm" align="center"> <div className="h-[200px] w-[200px]"> - <QRCode value={generatedSecret.otpauth_url} /> + <QRCode value={generatedSecret.otpauth_url} /> </div> <Group align="center" gap="xs"> <Text fw={600} size="lg"> @@ -294,8 +289,9 @@ export const TwoFactorSettingsCard = ({ </Text> <Text size="sm" c="dimmed" maw={520}> <Trans> - Keep access secure with a one-time code from your authenticator - app. Toggle two-factor authentication for this account. + Keep access secure with a one-time code from your + authenticator app. Toggle two-factor authentication for this + account. </Trans> </Text> </Stack> @@ -356,38 +352,40 @@ export const TwoFactorSettingsCard = ({ > <Stack gap="lg"> <Text> - <Trans>Enter a valid code to turn off two-factor authentication.</Trans> + <Trans> + Enter a valid code to turn off two-factor authentication. + </Trans> </Text> - <Stack gap="xs"> - <Text fw={500} size="sm"> - <Trans>Authenticator code</Trans> - </Text> - <PinInput - length={6} - type="number" - size="md" - oneTimeCode - value={disableOtp} - onChange={(value) => setDisableOtp(value)} - onComplete={(value) => handleDisableTwoFactor(value)} - inputMode="numeric" - disabled={disableTwoFactorMutation.isPending} - /> - </Stack> + <Stack gap="xs"> + <Text fw={500} size="sm"> + <Trans>Authenticator code</Trans> + </Text> + <PinInput + length={6} + type="number" + size="md" + oneTimeCode + value={disableOtp} + onChange={(value) => setDisableOtp(value)} + onComplete={(value) => handleDisableTwoFactor(value)} + inputMode="numeric" + disabled={disableTwoFactorMutation.isPending} + /> + </Stack> - {disableTwoFactorMutation.isError && ( - <Alert color="red" variant="light"> - {disableTwoFactorMutation.error?.message ?? - t`We couldn’t disable two-factor authentication. Try again with a fresh code.`} - </Alert> - )} - - <Group justify="flex-end"> - <Button variant="subtle" onClick={closeDisableModal}> - <Trans>Cancel</Trans> - </Button> - <Button + {disableTwoFactorMutation.isError && ( + <Alert color="red" variant="light"> + {disableTwoFactorMutation.error?.message ?? + t`We couldn’t disable two-factor authentication. Try again with a fresh code.`} + </Alert> + )} + + <Group justify="flex-end"> + <Button variant="subtle" onClick={closeDisableModal}> + <Trans>Cancel</Trans> + </Button> + <Button color="red" onClick={() => handleDisableTwoFactor()} loading={disableTwoFactorMutation.isPending} diff --git a/echo/frontend/src/components/settings/hooks/index.ts b/echo/frontend/src/components/settings/hooks/index.ts index 27105ed2..da8f6dd1 100644 --- a/echo/frontend/src/components/settings/hooks/index.ts +++ b/echo/frontend/src/components/settings/hooks/index.ts @@ -1,6 +1,6 @@ import { useMutation, useQueryClient } from "@tanstack/react-query"; -import { toast } from "@/components/common/Toaster"; import { throwWithMessage } from "@/components/auth/utils/errorUtils"; +import { toast } from "@/components/common/Toaster"; import { directus } from "@/lib/directus"; export interface GenerateTwoFactorResponse { diff --git a/echo/frontend/src/lib/api.ts b/echo/frontend/src/lib/api.ts index 5bf20379..00ed2386 100644 --- a/echo/frontend/src/lib/api.ts +++ b/echo/frontend/src/lib/api.ts @@ -1081,21 +1081,8 @@ export type VerificationTopicsResponse = { }; export const getVerificationTopics = async (projectId: string) => { - return api.get<unknown, VerificationTopicsResponse>(`/verify/topics/${projectId}`); -}; - -export const updateVerificationTopics = async ({ - projectId, - topicList, -}: { - projectId: string; - topicList: string[]; -}) => { - return api.put<unknown, VerificationTopicsResponse>( + return api.get<unknown, VerificationTopicsResponse>( `/verify/topics/${projectId}`, - { - topic_list: topicList, - }, ); }; @@ -1141,16 +1128,25 @@ export const updateVerificationArtefact = async ({ content, approvedAt, }: UpdateVerificationArtefactPayload) => { - return api.put<unknown, VerificationArtifact>(`/verify/artifact/${artifactId}`, { - use_conversation: useConversation - ? { - conversationId: useConversation.conversationId, - timestamp: useConversation.timestamp, - } - : undefined, - content, - approvedAt, - }); + return api.put<unknown, VerificationArtifact>( + `/verify/artifact/${artifactId}`, + { + approvedAt, + content, + use_conversation: useConversation + ? { + conversationId: useConversation.conversationId, + timestamp: useConversation.timestamp, + } + : undefined, + }, + ); +}; + +export const getVerificationArtefacts = async (conversationId: string) => { + return api.get<unknown, VerificationArtifact[]>( + `/verify/artifacts/${conversationId}`, + ); }; export const unsubscribeParticipant = async ( diff --git a/echo/frontend/src/lib/typesDirectus.d.ts b/echo/frontend/src/lib/typesDirectus.d.ts index 90ce0658..f9ca91c1 100644 --- a/echo/frontend/src/lib/typesDirectus.d.ts +++ b/echo/frontend/src/lib/typesDirectus.d.ts @@ -1,809 +1,22 @@ // biome-ignore-all lint: doesnt need interference -type Announcement = { - activity: any[] | AnnouncementActivity[]; - created_at?: string | null; - expires_at?: string | null; - id: string; - level?: string | null; - sort?: number | null; - translations: any[] | AnnouncementTranslations[]; - updated_at?: string | null; - user_created?: string | DirectusUsers | null; - user_updated?: string | DirectusUsers | null; -}; - -type AnnouncementActivity = { - announcement_activity?: string | Announcement | null; - created_at?: string | null; - id: string; - read?: boolean | null; - sort?: number | null; - updated_at?: string | null; - user_created?: string | DirectusUsers | null; - user_id?: string | null; - user_updated?: string | DirectusUsers | null; -}; - -type AnnouncementTranslations = { - announcement_id?: string | Announcement | null; - id: number; - languages_code?: string | Languages | null; - message?: string | null; - title?: string | null; -}; - -type Aspect = { - aspect_segment: any[] | AspectSegment[]; - created_at?: string | null; - description?: string | null; - id: string; - image_url?: string | null; - long_summary?: string | null; - name?: string | null; - short_summary?: string | null; - updated_at?: string | null; - view_id?: string | View | null; -}; - -type AspectSegment = { - aspect?: string | Aspect | null; - description?: string | null; - id: string; - relevant_index?: string | null; - segment?: number | ConversationSegment | null; - verbatim_transcript?: string | null; -}; - -type Conversation = { - artefacts: any[] | ConversationArtefact[]; - chunks: any[] | ConversationChunk[]; - conversation_segments: any[] | ConversationSegment[]; - created_at?: string | null; - duration?: number | null; - id: string; - is_all_chunks_transcribed?: boolean | null; - is_audio_processing_finished?: boolean | null; - is_finished?: boolean | null; - linked_conversations: any[] | ConversationLink[]; - linking_conversations: any[] | ConversationLink[]; - merged_audio_path?: string | null; - merged_transcript?: string | null; - participant_email?: string | null; - participant_name?: string | null; - participant_user_agent?: string | null; - processing_status: any[] | ProcessingStatus[]; - project_chat_messages: any[] | ProjectChatMessageConversation[]; - project_chats: any[] | ProjectChatConversation[]; - project_id: string | Project; - replies: any[] | ConversationReply[]; - source?: string | null; - summary?: string | null; - tags: any[] | ConversationProjectTag[]; - updated_at?: string | null; -}; - -type ConversationArtefact = { - approved_at?: string | null; - content?: string | null; - conversation_id?: string | Conversation | null; - created_at?: string | null; - id: string; - key?: string | null; - title?: string | null; - updated_at?: string | null; -}; - -type ConversationChunk = { - conversation_id: string | Conversation; - conversation_segments: any[] | ConversationSegmentConversationChunk[]; - created_at?: string | null; - cross_talk_instances?: number | null; - desired_language?: string | null; - detected_language?: string | null; - detected_language_confidence?: number | null; - diarization?: unknown | null; - error?: string | null; - hallucination_reason?: string | null; - hallucination_score?: number | null; - id: string; - noise_ratio?: number | null; - path?: string | null; - processing_status: any[] | ProcessingStatus[]; - raw_transcript?: string | null; - runpod_job_status_link?: string | null; - runpod_request_count?: number | null; - silence_ratio?: number | null; - source?: string | null; - timestamp: string; - transcript?: string | null; - translation_error?: string | null; - updated_at?: string | null; -}; - -type ConversationLink = { - date_created?: string | null; - date_updated?: string | null; - id: number; - link_type?: string | null; - source_conversation_id?: string | Conversation | null; - target_conversation_id?: string | Conversation | null; -}; - -type ConversationProjectTag = { - conversation_id?: string | Conversation | null; - id: number; - project_tag_id?: string | ProjectTag | null; -}; - -type ConversationReply = { - content_text?: string | null; - conversation_id?: string | null; - date_created?: string | null; - id: string; - reply?: string | Conversation | null; - sort?: number | null; - type?: string | null; -}; - -type ConversationSegment = { - chunks: any[] | ConversationSegmentConversationChunk[]; - config_id?: string | null; - contextual_transcript?: string | null; - conversation_id?: string | Conversation | null; - counter?: number | null; - id: number; - lightrag_flag?: boolean | null; - path?: string | null; - transcript?: string | null; -}; - -type ConversationSegmentConversationChunk = { - conversation_chunk_id?: string | ConversationChunk | null; - conversation_segment_id?: number | ConversationSegment | null; - id: number; -}; - -type DirectusAccess = { - id: string; - policy: string | DirectusPolicies; - role?: string | DirectusRoles | null; - sort?: number | null; - user?: string | DirectusUsers | null; -}; - -type DirectusActivity = { - action: string; - collection: string; - id: number; - ip?: string | null; - item: string; - origin?: string | null; - revisions: any[] | DirectusRevisions[]; - timestamp: string; - user?: string | DirectusUsers | null; - user_agent?: string | null; -}; - -type DirectusCollections = { - accountability?: string | null; - archive_app_filter: boolean; - archive_field?: string | null; - archive_value?: string | null; - collapse: string; - collection: string; - color?: string | null; - display_template?: string | null; - group?: string | DirectusCollections | null; - hidden: boolean; - icon?: string | null; - item_duplication_fields?: unknown | null; - note?: string | null; - preview_url?: string | null; - singleton: boolean; - sort?: number | null; - sort_field?: string | null; - translations?: unknown | null; - unarchive_value?: string | null; - versioning: boolean; -}; - -type DirectusComments = { - collection: string | DirectusCollections; - comment: string; - date_created?: string | null; - date_updated?: string | null; - id: string; - item: string; - user_created?: string | DirectusUsers | null; - user_updated?: string | DirectusUsers | null; -}; - -type DirectusDashboards = { - color?: string | null; - date_created?: string | null; - icon: string; - id: string; - name: string; - note?: string | null; - panels: any[] | DirectusPanels[]; - user_created?: string | DirectusUsers | null; -}; - -type DirectusExtensions = { - bundle?: string | null; - enabled: boolean; - folder: string; - id: string; - source: string; -}; - -type DirectusFields = { - collection: string | DirectusCollections; - conditions?: unknown | null; - display?: string | null; - display_options?: unknown | null; - field: string; - group?: string | DirectusFields | null; - hidden: boolean; - id: number; - interface?: string | null; - note?: string | null; - options?: unknown | null; - readonly: boolean; - required?: boolean | null; - sort?: number | null; - special?: unknown | null; - translations?: unknown | null; - validation?: unknown | null; - validation_message?: string | null; - width?: string | null; -}; - -type DirectusFiles = { - charset?: string | null; - created_on: string; - description?: string | null; - duration?: number | null; - embed?: string | null; - filename_disk?: string | null; - filename_download: string; - filesize?: number | null; - focal_point_x?: number | null; - focal_point_y?: number | null; - folder?: string | DirectusFolders | null; - height?: number | null; - id: string; - location?: string | null; - metadata?: unknown | null; - modified_by?: string | DirectusUsers | null; - modified_on: string; - storage: string; - tags?: unknown | null; - title?: string | null; - tus_data?: unknown | null; - tus_id?: string | null; - type?: string | null; - uploaded_by?: string | DirectusUsers | null; - uploaded_on?: string | null; - width?: number | null; -}; - -type DirectusFlows = { - accountability?: string | null; - color?: string | null; - date_created?: string | null; - description?: string | null; - icon?: string | null; - id: string; - name: string; - operation?: string | DirectusOperations | null; - operations: any[] | DirectusOperations[]; - options?: unknown | null; - status: string; - trigger?: string | null; - user_created?: string | DirectusUsers | null; -}; - -type DirectusFolders = { - id: string; - name: string; - parent?: string | DirectusFolders | null; -}; - -type DirectusMigrations = { - name: string; - timestamp?: string | null; - version: string; -}; - -type DirectusNotifications = { - collection?: string | null; - id: number; - item?: string | null; - message?: string | null; - recipient: string | DirectusUsers; - sender?: string | DirectusUsers | null; - status?: string | null; - subject: string; - timestamp?: string | null; -}; - -type DirectusOperations = { - date_created?: string | null; - flow: string | DirectusFlows; - id: string; - key: string; - name?: string | null; - options?: unknown | null; - position_x: number; - position_y: number; - reject?: string | DirectusOperations | null; - resolve?: string | DirectusOperations | null; - type: string; - user_created?: string | DirectusUsers | null; -}; - -type DirectusPanels = { - color?: string | null; - dashboard: string | DirectusDashboards; - date_created?: string | null; - height: number; - icon?: string | null; - id: string; - name?: string | null; - note?: string | null; - options?: unknown | null; - position_x: number; - position_y: number; - show_header: boolean; - type: string; - user_created?: string | DirectusUsers | null; - width: number; -}; - -type DirectusPermissions = { - action: string; - collection: string; - fields?: unknown | null; - id: number; - permissions?: unknown | null; - policy: string | DirectusPolicies; - presets?: unknown | null; - validation?: unknown | null; -}; - -type DirectusPolicies = { - admin_access: boolean; - app_access: boolean; - description?: string | null; - enforce_tfa: boolean; - icon: string; - id: string; - ip_access?: unknown | null; - name: string; - permissions: any[] | DirectusPermissions[]; - roles: any[] | DirectusAccess[]; - users: any[] | DirectusAccess[]; -}; - -type DirectusPresets = { - bookmark?: string | null; - collection?: string | null; - color?: string | null; - filter?: unknown | null; - icon?: string | null; - id: number; - layout?: string | null; - layout_options?: unknown | null; - layout_query?: unknown | null; - refresh_interval?: number | null; - role?: string | DirectusRoles | null; - search?: string | null; - user?: string | DirectusUsers | null; -}; - -type DirectusRelations = { - id: number; - junction_field?: string | null; - many_collection: string; - many_field: string; - one_allowed_collections?: unknown | null; - one_collection?: string | null; - one_collection_field?: string | null; - one_deselect_action: string; - one_field?: string | null; - sort_field?: string | null; -}; - -type DirectusRevisions = { - activity: number | DirectusActivity; - collection: string; - data?: unknown | null; - delta?: unknown | null; - id: number; - item: string; - parent?: number | DirectusRevisions | null; - version?: string | DirectusVersions | null; -}; - -type DirectusRoles = { - children: any[] | DirectusRoles[]; - description?: string | null; - icon: string; - id: string; - name: string; - parent?: string | DirectusRoles | null; - policies: any[] | DirectusAccess[]; - users: any[] | DirectusUsers[]; - users_group: string; -}; - -type DirectusSessions = { - expires: string; - ip?: string | null; - next_token?: string | null; - origin?: string | null; - share?: string | DirectusShares | null; - token: string; - user?: string | DirectusUsers | null; - user_agent?: string | null; -}; - -type DirectusSettings = { - auth_login_attempts?: number | null; - auth_password_policy?: string | null; - basemaps?: unknown | null; - custom_aspect_ratios?: unknown | null; - custom_css?: string | null; - default_appearance: string; - default_language: string; - default_theme_dark?: string | null; - default_theme_light?: string | null; - id: number; - mapbox_key?: string | null; - module_bar?: unknown | null; - project_color: string; - project_descriptor?: string | null; - project_logo?: string | DirectusFiles | null; - project_name: string; - project_url?: string | null; - public_background?: string | DirectusFiles | null; - public_favicon?: string | DirectusFiles | null; - public_foreground?: string | DirectusFiles | null; - public_note?: string | null; - public_registration: boolean; - public_registration_email_filter?: unknown | null; - public_registration_role?: string | DirectusRoles | null; - public_registration_verify_email: boolean; - report_bug_url?: string | null; - report_error_url?: string | null; - report_feature_url?: string | null; - storage_asset_presets?: unknown | null; - storage_asset_transform?: string | null; - storage_default_folder?: string | DirectusFolders | null; - theme_dark_overrides?: unknown | null; - theme_light_overrides?: unknown | null; - theming_group: string; -}; - -type DirectusShares = { - collection: string | DirectusCollections; - date_created?: string | null; - date_end?: string | null; - date_start?: string | null; - id: string; - item: string; - max_uses?: number | null; - name?: string | null; - password?: string | null; - role?: string | DirectusRoles | null; - times_used?: number | null; - user_created?: string | DirectusUsers | null; -}; - -type DirectusSyncIdMap = { - created_at?: string | null; - id: number; - local_id: string; - sync_id: string; - table: string; -}; - -type DirectusTranslations = { - id: string; - key: string; - language: string; - value: string; -}; - -type DirectusUsers = { - appearance?: string | null; - auth_data?: unknown | null; - avatar?: string | DirectusFiles | null; - description?: string | null; - disable_create_project?: boolean | null; - email?: string | null; - email_notifications?: boolean | null; - external_identifier?: string | null; - first_name?: string | null; - id: string; - language?: string | null; - last_access?: string | null; - last_name?: string | null; - last_page?: string | null; - location?: string | null; - password?: string | null; - policies: any[] | DirectusAccess[]; - projects: any[] | Project[]; - provider: string; - role?: string | DirectusRoles | null; - status: string; - tags?: unknown | null; - tfa_secret?: string | null; - theme_dark?: string | null; - theme_dark_overrides?: unknown | null; - theme_light?: string | null; - theme_light_overrides?: unknown | null; - title?: string | null; - token?: string | null; -}; - -type DirectusVersions = { - collection: string | DirectusCollections; - date_created?: string | null; - date_updated?: string | null; - delta?: unknown | null; - hash?: string | null; - id: string; - item: string; - key: string; - name?: string | null; - user_created?: string | DirectusUsers | null; - user_updated?: string | DirectusUsers | null; -}; - -type DirectusWebhooks = { - actions: unknown; - collections: unknown; - data: boolean; - headers?: unknown | null; - id: number; - method: string; - migrated_flow?: string | DirectusFlows | null; - name: string; - status: string; - url: string; - was_active_before_deprecation: boolean; -}; - -type Insight = { - created_at?: string | null; - id: string; - project_analysis_run_id?: string | ProjectAnalysisRun | null; - summary?: string | null; - title?: string | null; - updated_at?: string | null; -}; - -type Languages = { - code: string; - direction?: string | null; - name?: string | null; -}; - -type ProcessingStatus = { - conversation_chunk_id?: string | ConversationChunk | null; - conversation_id?: string | Conversation | null; - duration_ms?: number | null; - event?: string | null; - id: number; - message?: string | null; - parent?: number | ProcessingStatus | null; - project_analysis_run_id?: string | ProjectAnalysisRun | null; - project_id?: string | Project | null; - timestamp?: string | null; -}; - -type Project = { - conversations_count?: number | null; - context?: string | null; - conversation_ask_for_participant_name_label?: string | null; - conversations: any[] | Conversation[]; - created_at?: string | null; - default_conversation_ask_for_participant_name?: boolean | null; - default_conversation_description?: string | null; - default_conversation_finish_text?: string | null; - default_conversation_title?: string | null; - default_conversation_transcript_prompt?: string | null; - default_conversation_tutorial_slug?: string | null; - directus_user_id?: string | DirectusUsers | null; - get_reply_mode?: string | null; - get_reply_prompt?: string | null; - id: string; - image_generation_model?: string | null; - is_conversation_allowed: boolean; - is_enhanced_audio_processing_enabled?: boolean | null; - is_get_reply_enabled?: boolean | null; - is_verify_enabled?: boolean | null; - verification_topics?: string[] | null; - is_project_notification_subscription_allowed?: boolean | null; - language?: string | null; - name?: string | null; - processing_status: any[] | ProcessingStatus[]; - project_analysis_runs: any[] | ProjectAnalysisRun[]; - project_chats: any[] | ProjectChat[]; - project_reports: any[] | ProjectReport[]; - tags: any[] | ProjectTag[]; - updated_at?: string | null; -}; - -type ProjectAnalysisRun = { - created_at?: string | null; - id: string; - insights: any[] | Insight[]; - processing_status: any[] | ProcessingStatus[]; - project_id?: string | Project | null; - updated_at?: string | null; - views: any[] | View[]; -}; - -type ProjectChat = { - auto_select?: boolean | null; - date_created?: string | null; - date_updated?: string | null; - id: string; - name?: string | null; - project_chat_messages: any[] | ProjectChatMessage[]; - project_id?: string | Project | null; - used_conversations: any[] | ProjectChatConversation[]; - user_created?: string | DirectusUsers | null; - user_updated?: string | DirectusUsers | null; -}; - -type ProjectChatConversation = { - conversation_id?: string | Conversation | null; - id: number; - project_chat_id?: string | ProjectChat | null; -}; - -type ProjectChatMessage = { - added_conversations: any[] | ProjectChatMessageConversation1[]; - chat_message_metadata: any[] | ProjectChatMessageMetadata[]; - date_created?: string | null; - date_updated?: string | null; - id: string; - message_from?: string | null; - project_chat_id?: string | ProjectChat | null; - template_key?: string | null; - text?: string | null; - tokens_count?: number | null; - used_conversations: any[] | ProjectChatMessageConversation[]; -}; - -type ProjectChatMessageConversation = { - conversation_id?: string | Conversation | null; - id: number; - project_chat_message_id?: string | ProjectChatMessage | null; -}; - -type ProjectChatMessageConversation1 = { - conversation_id?: string | Conversation | null; - id: number; - project_chat_message_id?: string | ProjectChatMessage | null; -}; - -type ProjectChatMessageMetadata = { - conversation?: string | Conversation | null; - date_created?: string | null; - id: string; - message_metadata?: string | ProjectChatMessage | null; - ratio?: number | null; - reference_text?: string | null; - type?: string | null; -}; - -type ProjectReport = { - content?: string | null; - date_created?: string | null; - date_updated?: string | null; - error_code?: string | null; - id: number; - language?: string | null; - project_id?: string | Project | null; - show_portal_link?: boolean | null; - status: string; -}; - -type ProjectReportMetric = { - date_created?: string | null; - date_updated?: string | null; - id: number; - ip?: string | null; - project_report_id?: number | ProjectReport | null; - type?: string | null; -}; - -type ProjectReportNotificationParticipants = { - conversation_id?: string | Conversation | null; - date_submitted?: string | null; - date_updated?: string | null; - email?: string | null; - email_opt_in?: boolean | null; - email_opt_out_token?: string | null; - id: string; - project_id?: string | null; - sort?: number | null; -}; - -type ProjectTag = { - conversations: any[] | ConversationProjectTag[]; - created_at?: string | null; - id: string; - project_id: string | Project; - sort?: number | null; - text?: string | null; - updated_at?: string | null; -}; - -type View = { - aspects: any[] | Aspect[]; - created_at?: string | null; - description?: string | null; - id: string; - language?: string | null; - name?: string | null; - project_analysis_run_id?: string | ProjectAnalysisRun | null; - summary?: string | null; - updated_at?: string | null; - user_input?: string | null; - user_input_description?: string | null; -}; - -type CustomDirectusTypes = { +interface Schema { announcement: Announcement[]; announcement_activity: AnnouncementActivity[]; - announcement_translations: AnnouncementTranslations[]; + announcement_translations: AnnouncementTranslation[]; aspect: Aspect[]; aspect_segment: AspectSegment[]; conversation: Conversation[]; - conversation_artefact: ConversationArtefact[]; + conversation_artifact: ConversationArtifact[]; conversation_chunk: ConversationChunk[]; conversation_link: ConversationLink[]; conversation_project_tag: ConversationProjectTag[]; conversation_reply: ConversationReply[]; conversation_segment: ConversationSegment[]; conversation_segment_conversation_chunk: ConversationSegmentConversationChunk[]; - directus_access: DirectusAccess[]; - directus_activity: DirectusActivity[]; - directus_collections: DirectusCollections[]; - directus_comments: DirectusComments[]; - directus_dashboards: DirectusDashboards[]; - directus_extensions: DirectusExtensions[]; - directus_fields: DirectusFields[]; - directus_files: DirectusFiles[]; - directus_flows: DirectusFlows[]; - directus_folders: DirectusFolders[]; - directus_migrations: DirectusMigrations[]; - directus_notifications: DirectusNotifications[]; - directus_operations: DirectusOperations[]; - directus_panels: DirectusPanels[]; - directus_permissions: DirectusPermissions[]; - directus_policies: DirectusPolicies[]; - directus_presets: DirectusPresets[]; - directus_relations: DirectusRelations[]; - directus_revisions: DirectusRevisions[]; - directus_roles: DirectusRoles[]; - directus_sessions: DirectusSessions[]; - directus_settings: DirectusSettings; - directus_shares: DirectusShares[]; directus_sync_id_map: DirectusSyncIdMap[]; - directus_translations: DirectusTranslations[]; - directus_users: DirectusUsers[]; - directus_versions: DirectusVersions[]; - directus_webhooks: DirectusWebhooks[]; insight: Insight[]; - languages: Languages[]; + languages: Language[]; processing_status: ProcessingStatus[]; project: Project[]; project_analysis_run: ProjectAnalysisRun[]; @@ -815,7 +28,436 @@ type CustomDirectusTypes = { project_chat_message_metadata: ProjectChatMessageMetadata[]; project_report: ProjectReport[]; project_report_metric: ProjectReportMetric[]; - project_report_notification_participants: ProjectReportNotificationParticipants[]; + project_report_notification_participants: ProjectReportNotificationParticipant[]; project_tag: ProjectTag[]; + verification_topic: VerificationTopic[]; + verification_topic_translations: VerificationTopicTranslation[]; view: View[]; -}; + directus_users: CustomDirectusUser; +} + +interface Announcement { + created_at: string | null; + expires_at: string | null; + id: string; + level: "info" | "urgent" | null; + sort: number | null; + updated_at: string | null; + user_created: string | DirectusUser<Schema> | null; + user_updated: string | DirectusUser<Schema> | null; + activity: string[] | AnnouncementActivity[]; + translations: string[] | AnnouncementTranslation[]; +} + +interface AnnouncementActivity { + announcement_activity: string | Announcement | null; + created_at: string | null; + id: string; + read: boolean | null; + sort: number | null; + updated_at: string | null; + user_created: string | DirectusUser<Schema> | null; + user_id: string | null; + user_updated: string | DirectusUser<Schema> | null; +} + +interface AnnouncementTranslation { + announcement_id: string | Announcement | null; + id: number; + languages_code: string | Language | null; + message: string | null; + title: string | null; +} + +interface Aspect { + created_at: string | null; + description: string | null; + id: string; + image_url: string | null; + long_summary: string | null; + name: string | null; + short_summary: string | null; + updated_at: string | null; + view_id: string | View | null; + aspect_segment: string[] | AspectSegment[]; +} + +interface AspectSegment { + aspect: string | Aspect | null; + description: string | null; + id: string; + relevant_index: string | null; + segment: number | ConversationSegment | null; + verbatim_transcript: string | null; +} + +interface Conversation { + created_at: string | null; + duration: number | null; + id: string; + is_all_chunks_transcribed: boolean | null; + is_audio_processing_finished: boolean | null; + is_finished: boolean | null; + merged_audio_path: string | null; + merged_transcript: string | null; + participant_email: string | null; + participant_name: string | null; + participant_user_agent: string | null; + project_id: string | Project; + source: "DASHBOARD_UPLOAD" | "CLONE" | null; + summary: string | null; + updated_at: string | null; + chunks: string[] | ConversationChunk[]; + conversation_segments: string[] | ConversationSegment[]; + linked_conversations: string[] | ConversationLink[]; + linking_conversations: string[] | ConversationLink[]; + processing_status: string[] | ProcessingStatus[]; + project_chat_messages: string[] | ProjectChatMessageConversation[]; + project_chats: string[] | ProjectChatConversation[]; + replies: string[] | ConversationReply[]; + tags: string[] | ConversationProjectTag[]; + conversation_artifacts: string[] | ConversationArtifact[]; +} + +interface ConversationArtifact { + id: string; + user_created: string | DirectusUser<Schema> | null; + date_created: string | null; + user_updated: string | DirectusUser<Schema> | null; + last_updated_at: string | null; + content: string | null; + approved_at: string | null; + read_aloud_stream_url: string | null; + key: string | null; + conversation_id: string | Conversation | null; +} + +interface ConversationChunk { + conversation_id: string | Conversation; + created_at: string | null; + cross_talk_instances: number | null; + desired_language: string | null; + detected_language: string | null; + detected_language_confidence: number | null; + diarization: unknown | null; + error: string | null; + hallucination_reason: string | null; + hallucination_score: number | null; + id: string; + noise_ratio: number | null; + path: string | null; + raw_transcript: string | null; + runpod_job_status_link: string | null; + runpod_request_count: number | null; + silence_ratio: number | null; + source: "DASHBOARD_UPLOAD" | "PORTAL_AUDIO" | "PORTAL_TEXT" | "SPLIT" | null; + timestamp: string; + transcript: string | null; + translation_error: string | null; + updated_at: string | null; + conversation_segments: string[] | ConversationSegmentConversationChunk[]; + processing_status: string[] | ProcessingStatus[]; +} + +interface ConversationLink { + date_created: string | null; + date_updated: string | null; + id: number; + link_type: string | null; + source_conversation_id: string | Conversation | null; + target_conversation_id: string | Conversation | null; +} + +interface ConversationProjectTag { + conversation_id: string | Conversation | null; + id: number; + project_tag_id: string | ProjectTag | null; +} + +interface ConversationReply { + content_text: string | null; + conversation_id: string | null; + date_created: string | null; + id: string; + reply: string | Conversation | null; + sort: number | null; + type: string | null; +} + +interface ConversationSegment { + config_id: string | null; + contextual_transcript: string | null; + conversation_id: string | Conversation | null; + counter: number | null; + id: number; + lightrag_flag: boolean | null; + path: string | null; + transcript: string | null; + chunks: number[] | ConversationSegmentConversationChunk[]; +} + +interface ConversationSegmentConversationChunk { + conversation_chunk_id: string | ConversationChunk | null; + conversation_segment_id: number | ConversationSegment | null; + id: number; +} + +interface DirectusSyncIdMap { + id: number; + table: string; + sync_id: string; + local_id: string; + created_at: string | null; +} + +interface Insight { + created_at: string | null; + id: string; + project_analysis_run_id: string | ProjectAnalysisRun | null; + summary: string | null; + title: string | null; + updated_at: string | null; +} + +interface Language { + code: string; + direction: "ltr" | "rtl" | null; + name: string | null; +} + +interface ProcessingStatus { + conversation_chunk_id: string | ConversationChunk | null; + conversation_id: string | Conversation | null; + duration_ms: number | null; + event: string | null; + id: number; + message: string | null; + parent: number | ProcessingStatus | null; + project_analysis_run_id: string | ProjectAnalysisRun | null; + project_id: string | Project | null; + timestamp: string | null; +} + +interface Project { + context: string | null; + conversation_ask_for_participant_name_label: string | null; + created_at: string | null; + default_conversation_ask_for_participant_name: boolean | null; + default_conversation_description: string | null; + default_conversation_finish_text: string | null; + default_conversation_title: string | null; + default_conversation_transcript_prompt: string | null; + default_conversation_tutorial_slug: "None" | "basic" | "advanced" | null; + directus_user_id: string | DirectusUser<Schema> | null; + get_reply_mode: "summarize" | "brainstorm" | "custom" | null; + get_reply_prompt: string | null; + id: string; + image_generation_model: "MODEST" | "EXTRAVAGANT" | "PLACEHOLDER" | null; + is_conversation_allowed: boolean; + is_enhanced_audio_processing_enabled: boolean | null; + is_get_reply_enabled: boolean | null; + is_project_notification_subscription_allowed: boolean | null; + language: "en" | "nl" | "multi" | null; + name: string | null; + updated_at: string | null; + is_verify_enabled: boolean | null; + selected_verification_key_list: string | null; + conversations: string[] | Conversation[]; + tags: string[] | ProjectTag[]; + project_analysis_runs: string[] | ProjectAnalysisRun[]; + project_chats: string[] | ProjectChat[]; + project_reports: string[] | ProjectReport[]; + processing_status: string[] | ProcessingStatus[]; + custom_verification_topics: string[] | VerificationTopic[]; +} + +interface ProjectAnalysisRun { + created_at: string | null; + id: string; + project_id: string | Project | null; + updated_at: string | null; + insights: string[] | Insight[]; + processing_status: string[] | ProcessingStatus[]; + views: string[] | View[]; +} + +interface ProjectChat { + auto_select: boolean | null; + date_created: string | null; + date_updated: string | null; + id: string; + name: string | null; + project_id: string | Project | null; + user_created: string | DirectusUser<Schema> | null; + user_updated: string | DirectusUser<Schema> | null; + project_chat_messages: string[] | ProjectChatMessage[]; + used_conversations: string[] | ProjectChatConversation[]; +} + +interface ProjectChatConversation { + conversation_id: string | Conversation | null; + id: number; + project_chat_id: string | ProjectChat | null; +} + +interface ProjectChatMessage { + date_created: string | null; + date_updated: string | null; + id: string; + message_from: "User" | "assistant" | "dembrane" | null; + project_chat_id: string | ProjectChat | null; + template_key: string | null; + text: string | null; + tokens_count: number | null; + added_conversations: string[] | ProjectChatMessageConversation1[]; + chat_message_metadata: string[] | ProjectChatMessageMetadata[]; + used_conversations: string[] | ProjectChatMessageConversation[]; +} + +interface ProjectChatMessageConversation { + conversation_id: string | Conversation | null; + id: number; + project_chat_message_id: string | ProjectChatMessage | null; +} + +interface ProjectChatMessageConversation1 { + conversation_id: string | Conversation | null; + id: number; + project_chat_message_id: string | ProjectChatMessage | null; +} + +interface ProjectChatMessageMetadata { + conversation: string | Conversation | null; + date_created: string | null; + id: string; + message_metadata: string | ProjectChatMessage | null; + ratio: number | null; + reference_text: string | null; + type: "reference" | "citation" | null; +} + +interface ProjectReport { + content: string | null; + date_created: string | null; + date_updated: string | null; + error_code: string | null; + id: number; + language: string | null; + project_id: string | Project | null; + show_portal_link: boolean | null; + status: "error" | "archived" | "published"; +} + +interface ProjectReportMetric { + date_created: string | null; + date_updated: string | null; + id: number; + ip: string | null; + project_report_id: number | ProjectReport | null; + type: "view" | null; +} + +interface ProjectReportNotificationParticipant { + conversation_id: string | Conversation | null; + date_submitted: string | null; + date_updated: string | null; + email: string | null; + email_opt_in: boolean | null; + email_opt_out_token: string | null; + id: string; + project_id: string | null; + sort: number | null; +} + +interface ProjectTag { + created_at: string | null; + id: string; + project_id: string | Project; + sort: number | null; + text: string | null; + updated_at: string | null; + conversations: string[] | ConversationProjectTag[]; +} + +interface VerificationTopic { + key: string; + sort: number | null; + user_created: string | DirectusUser<Schema> | null; + date_created: string | null; + user_updated: string | DirectusUser<Schema> | null; + date_updated: string | null; + project_id: string | Project | null; + prompt: string | null; + icon: string | null; + translations: string[] | VerificationTopicTranslation[]; +} + +interface VerificationTopicTranslation { + id: number; + verification_topic_key: string | VerificationTopic | null; + languages_code: string | Language | null; + label: string | null; +} + +interface View { + created_at: string | null; + description: string | null; + id: string; + language: string | null; + name: string | null; + project_analysis_run_id: string | ProjectAnalysisRun | null; + summary: string | null; + updated_at: string | null; + user_input: string | null; + user_input_description: string | null; + aspects: string[] | Aspect[]; +} + +interface CustomDirectusUser { + disable_create_project: boolean | null; + projects: string[] | Project[]; +} + +// GeoJSON Types + +interface GeoJSONPoint { + type: "Point"; + coordinates: [number, number]; +} + +interface GeoJSONLineString { + type: "LineString"; + coordinates: Array<[number, number]>; +} + +interface GeoJSONPolygon { + type: "Polygon"; + coordinates: Array<Array<[number, number]>>; +} + +interface GeoJSONMultiPoint { + type: "MultiPoint"; + coordinates: Array<[number, number]>; +} + +interface GeoJSONMultiLineString { + type: "MultiLineString"; + coordinates: Array<Array<[number, number]>>; +} + +interface GeoJSONMultiPolygon { + type: "MultiPolygon"; + coordinates: Array<Array<Array<[number, number]>>>; +} + +interface GeoJSONGeometryCollection { + type: "GeometryCollection"; + geometries: Array< + | GeoJSONPoint + | GeoJSONLineString + | GeoJSONPolygon + | GeoJSONMultiPoint + | GeoJSONMultiLineString + | GeoJSONMultiPolygon + >; +} diff --git a/echo/frontend/src/routes/project/ProjectRoutes.tsx b/echo/frontend/src/routes/project/ProjectRoutes.tsx index 2102b2cf..6bc4e3dd 100644 --- a/echo/frontend/src/routes/project/ProjectRoutes.tsx +++ b/echo/frontend/src/routes/project/ProjectRoutes.tsx @@ -92,7 +92,7 @@ export const ProjectPortalSettingsRoute = () => { "get_reply_prompt", "is_get_reply_enabled", "is_verify_enabled", - "verification_topics", + "selected_verification_key_list", "is_project_notification_subscription_allowed", { tags: ["id", "created_at", "text", "sort"], diff --git a/echo/server/dembrane/anthropic.py b/echo/server/dembrane/anthropic.py deleted file mode 100644 index f8e4a0b6..00000000 --- a/echo/server/dembrane/anthropic.py +++ /dev/null @@ -1,108 +0,0 @@ -import json -from typing import Any, Dict, List, Optional, Generator - -from anthropic import Anthropic, AsyncAnthropic - -from dembrane.config import ANTHROPIC_API_KEY - -anthropic_client = Anthropic( - api_key=ANTHROPIC_API_KEY, -) - -async_anthropic_client = AsyncAnthropic( - api_key=ANTHROPIC_API_KEY, -) - - -def count_tokens_anthropic(text: str) -> int: - return anthropic_client.beta.messages.count_tokens( - model="claude-3-5-sonnet-20241022", - messages=[{"role": "user", "content": text}], - ).input_tokens - - -def stream_anthropic_chat_response( - system: List[Dict[str, Any]], messages: List[Dict[str, Any]], protocol: str = "data" -) -> Generator[str, None, None]: - """ - Generates response from Anthropic - and returns openAI like stream response - """ - stream = anthropic_client.messages.create( - model="claude-3-5-sonnet-20241022", - system=system, # type:ignore - messages=messages, # type:ignore - max_tokens=2048, - stream=True, - ) - - finish_reason = "unknown" - usage = {"promptTokens": 0, "completionTokens": 0} - tool_call_content_blocks = {} - - for chunk in stream: - if chunk.type == "ping": # type:ignore - continue - - elif chunk.type == "content_block_start": # type:ignore - if chunk.content_block.type == "text": # type:ignore - continue - elif chunk.content_block.type == "tool_use": # type:ignore - tool_call_content_blocks[chunk.index] = { # type:ignore - "tool_call_id": chunk.content_block.id, # type:ignore - "tool_name": chunk.content_block.name, # type:ignore - "json_text": "", - } - if protocol == "data": - yield f"b:{json.dumps({'toolCallId': chunk.content_block.id, 'toolName': chunk.content_block.name})}\n" # type:ignore - - elif chunk.type == "content_block_stop": # type:ignore - if chunk.index in tool_call_content_blocks: # type:ignore - content_block = tool_call_content_blocks[chunk.index] # type:ignore - if protocol == "data": - yield f"9:{json.dumps({'toolCallId': content_block['tool_call_id'], 'toolName': content_block['tool_name'], 'args': json.loads(content_block['json_text'])})}\n" - del tool_call_content_blocks[chunk.index] # type:ignore - - elif chunk.type == "content_block_delta": # type:ignore - if chunk.delta.type == "text_delta": # type:ignore - if protocol == "text": - yield chunk.delta.text # type:ignore - elif protocol == "data": - yield f"0:{json.dumps(chunk.delta.text)}\n" # type:ignore - elif chunk.delta.type == "input_json_delta": # type:ignore - content_block = tool_call_content_blocks[chunk.index] # type:ignore - if protocol == "data": - yield f"c:{json.dumps({'toolCallId': content_block['tool_call_id'], 'argsTextDelta': chunk.delta.partial_json})}\n" # type:ignore - content_block["json_text"] += chunk.delta.partial_json # type:ignore - - elif chunk.type == "message_start": # type:ignore - usage["promptTokens"] = chunk.message.usage.input_tokens # type:ignore - usage["completionTokens"] = chunk.message.usage.output_tokens # type:ignore - if protocol == "data": - yield f"2:{json.dumps([{'id': chunk.message.id, 'modelId': chunk.message.model}])}\n" # type:ignore - - elif chunk.type == "message_delta": # type:ignore - usage["completionTokens"] = chunk.usage.output_tokens # type:ignore - if chunk.delta.stop_reason: # type:ignore - finish_reason = map_anthropic_stop_reason(chunk.delta.stop_reason) # type:ignore - - elif chunk.type == "message_stop": # type:ignore - if protocol == "data": - yield f"d:{json.dumps({'finishReason': finish_reason, 'usage': usage})}\n" - - elif chunk.type == "error": # type:ignore - if protocol == "data": - yield f"3:{json.dumps(chunk.error)}\n" # type:ignore - else: - yield f"Error: {chunk.error}" # type:ignore - - -def map_anthropic_stop_reason(finish_reason: Optional[str]) -> str: - if finish_reason in ["end_turn", "stop_sequence"]: - return "stop" - elif finish_reason == "tool_use": - return "tool-calls" - elif finish_reason == "max_tokens": - return "length" - else: - return "unknown" diff --git a/echo/server/dembrane/api/verify.py b/echo/server/dembrane/api/verify.py index 819e3611..6f6a8112 100644 --- a/echo/server/dembrane/api/verify.py +++ b/echo/server/dembrane/api/verify.py @@ -174,9 +174,7 @@ class Config: class UpdateArtifactRequest(BaseModel): - use_conversation: Optional[UseConversationPayload] = Field( - None, alias="useConversation" - ) + use_conversation: Optional[UseConversationPayload] = Field(None, alias="useConversation") content: Optional[str] = None approved_at: Optional[datetime] = Field(None, alias="approvedAt") @@ -264,8 +262,6 @@ async def _get_project(project_id: str) -> dict: raise ProjectNotFoundException project = project_rows[0] - if not project.get("is_verify_enabled", False): - raise HTTPException(status_code=403, detail="Verify is not enabled for this project") return project @@ -375,7 +371,44 @@ async def update_verification_topics( refreshed_topics = await _get_verification_topics_for_project(project_id) selected_topics = _parse_selected_topics(serialized_keys, refreshed_topics) - return GetVerificationTopicsResponse(selected_topics=selected_topics, available_topics=refreshed_topics) + return GetVerificationTopicsResponse( + selected_topics=selected_topics, available_topics=refreshed_topics + ) + + +@VerifyRouter.get("/artifacts/{conversation_id}", response_model=List[ConversationArtifactResponse]) +async def list_verification_artifacts( + conversation_id: str, + auth: DependencyDirectusSession, # noqa: ARG001 - reserved for future use +) -> List[ConversationArtifactResponse]: + await _get_conversation_with_project(conversation_id) + artifacts = await _get_conversation_artifacts(conversation_id) + + def _sort_key(item: dict) -> tuple[bool, str]: + approved = item.get("approved_at") + created = item.get("date_created") + if approved: + return (False, approved) + if created: + return (True, created) + return (True, "") + + artifacts.sort(key=_sort_key, reverse=True) + + response: List[ConversationArtifactResponse] = [] + for artifact in artifacts: + response.append( + ConversationArtifactResponse( + id=artifact.get("id"), + key=artifact.get("key"), + content=artifact.get("content") or "", + conversation_id=artifact.get("conversation_id") or conversation_id, + approved_at=artifact.get("approved_at"), + read_aloud_stream_url=artifact.get("read_aloud_stream_url") or "", + ) + ) + + return response async def _get_artifact_or_404(artifact_id: str) -> dict: @@ -428,9 +461,6 @@ async def _get_conversation_with_project(conversation_id: str) -> dict: raise ConversationNotFoundException conversation = conversation_rows[0] - project = conversation.get("project_id") or {} - if not project.get("is_verify_enabled", False): - raise HTTPException(status_code=403, detail="Verify is not enabled for this project") return conversation @@ -832,7 +862,9 @@ async def update_verification_artifact( ) except Exception as exc: # pragma: no cover - external failure logger.error("Gemini revision failed: %s", exc, exc_info=True) - raise HTTPException(status_code=500, detail="Failed to revise verification artifact") from exc + raise HTTPException( + status_code=500, detail="Failed to revise verification artifact" + ) from exc generated_text = _extract_response_text(response) updates["content"] = generated_text @@ -853,7 +885,10 @@ async def update_verification_artifact( return ConversationArtifactResponse( id=updated_data.get("id", artifact_id), key=updated_data.get("key"), - content=updated_data.get("content") or updates.get("content") or artifact.get("content") or "", + content=updated_data.get("content") + or updates.get("content") + or artifact.get("content") + or "", conversation_id=updated_data.get("conversation_id") or conversation_id or "", approved_at=updated_data.get("approved_at") or updates.get("approved_at"), read_aloud_stream_url=updated_data.get("read_aloud_stream_url") diff --git a/echo/server/dembrane/audio_lightrag/__init__.py b/echo/server/dembrane/audio_lightrag/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/echo/server/dembrane/audio_lightrag/services/__init__.py b/echo/server/dembrane/audio_lightrag/services/__init__.py deleted file mode 100644 index 7b2141dd..00000000 --- a/echo/server/dembrane/audio_lightrag/services/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Services for audio_lightrag diff --git a/echo/server/dembrane/audio_lightrag/services/contextualizer.py b/echo/server/dembrane/audio_lightrag/services/contextualizer.py deleted file mode 100644 index e1151adf..00000000 --- a/echo/server/dembrane/audio_lightrag/services/contextualizer.py +++ /dev/null @@ -1,102 +0,0 @@ -import json -import logging - -from dembrane.prompts import render_prompt -from dembrane.audio_lightrag.utils.litellm_utils import llm_model_func - -logger = logging.getLogger(__name__) - - -class ConversationContextualizer: - """ - Rich contextualization of conversation transcripts using Claude. - - THE PIVOT: Uses existing transcripts (no audio processing!). - Uses the same audio_model_system_prompt as old pipeline but skips transcription (Task 1). - """ - - async def contextualize( - self, - transcript: str, - event_text: str, - previous_conversation_text: str, - language: str = "en", - ) -> str: - """ - Contextualize a conversation transcript with project information. - - Args: - transcript: Full conversation transcript (concatenated from chunks) - event_text: Project context formatted as key:value pairs - previous_conversation_text: Previous contextual transcripts (empty for first segment) - language: Language code (default: "en") - - Returns: - Contextualized transcript for RAG insertion - """ - - if not transcript or not transcript.strip(): - logger.warning("Empty transcript provided, returning as-is") - return transcript - - try: - # Use the same prompt template as old audio pipeline - # This ensures RAG output quality remains identical to before - system_prompt = render_prompt( - "audio_model_system_prompt", - language, - { - "event_text": event_text, - "previous_conversation_text": previous_conversation_text, - } - ) - - # Build user prompt with transcript - # Note: We skip Task 1 (transcription) since we already have transcripts - # The LLM will focus on Task 2 (contextual analysis) - user_prompt = f"""Here is the conversation transcript (already transcribed): - -{transcript} - -Please provide your CONTEXTUAL ANALYSIS (Task 2 from the system prompt). -Since the transcript is already provided, skip Task 1 and focus entirely on the detailed contextual analysis.""" - - # Call Claude via llm_model_func (LightRAG-compatible interface) - logger.info(f"Calling Claude for contextualization (transcript length: {len(transcript)} chars)") - - response = await llm_model_func( - prompt=user_prompt, - system_prompt=system_prompt, - temperature=0.3, - ) - - # Parse JSON response to extract CONTEXTUAL_TRANSCRIPT - # Old format: {"TRANSCRIPTS": [...], "CONTEXTUAL_TRANSCRIPT": "..."} - try: - parsed = json.loads(response) - contextual_transcript = parsed.get("CONTEXTUAL_TRANSCRIPT", response) - except json.JSONDecodeError: - # If not valid JSON, use the full response as contextual transcript - logger.warning("Response not in expected JSON format, using raw response") - contextual_transcript = response - - logger.info(f"Contextualization successful (output length: {len(contextual_transcript)} chars)") - return contextual_transcript - - except Exception as e: - logger.error(f"Contextualization failed: {e}", exc_info=True) - # Fallback: return original transcript - logger.warning("Using fallback contextualization (original transcript)") - return transcript - - -# Singleton instance -_contextualizer = None - - -def get_contextualizer() -> ConversationContextualizer: - """Get or create the singleton contextualizer.""" - global _contextualizer - if _contextualizer is None: - _contextualizer = ConversationContextualizer() - return _contextualizer diff --git a/echo/server/dembrane/audio_lightrag/utils/async_utils.py b/echo/server/dembrane/audio_lightrag/utils/async_utils.py deleted file mode 100644 index cda31a68..00000000 --- a/echo/server/dembrane/audio_lightrag/utils/async_utils.py +++ /dev/null @@ -1,102 +0,0 @@ -""" -Utilities for safely executing async code from sync contexts (e.g., Dramatiq workers). - -This module solves the "Task got Future attached to a different loop" errors -that occur when mixing sync Dramatiq tasks with async LightRAG code. -""" -import asyncio -import logging -import threading -from typing import Any, TypeVar, Coroutine - -logger = logging.getLogger(__name__) - -T = TypeVar("T") - -# One persistent event loop per thread -_thread_loops: dict[int, asyncio.AbstractEventLoop] = {} -_thread_loops_lock = threading.Lock() - - -def get_thread_event_loop() -> asyncio.AbstractEventLoop: - """ - Get or create a persistent event loop for the current thread. - - Each worker thread gets ONE event loop that persists across all tasks. - This matches the architecture of FastAPI/Uvicorn where the API server - has one persistent loop. - - Benefits: - - RAGManager's per-loop instances work correctly - - LightRAG's ClientManager lock stays bound to the same loop - - No loop creation/destruction overhead per task - - Resources (DB pools, HTTP clients) persist and get reused - - Returns: - The persistent event loop for this thread - """ - thread_id = threading.get_ident() - - # Fast path: loop already exists for this thread - if thread_id in _thread_loops: - return _thread_loops[thread_id] - - # Slow path: create new loop (thread-safe) - with _thread_loops_lock: - # Double-check after acquiring lock - if thread_id in _thread_loops: - return _thread_loops[thread_id] - - # Create and register new loop for this thread - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - _thread_loops[thread_id] = loop - - logger.info(f"Created persistent event loop for thread {thread_id}") - return loop - - -def run_async_in_new_loop(coro: Coroutine[Any, Any, T]) -> T: - """ - Execute an async coroutine in this thread's persistent event loop. - - This is the recommended way to call async code from sync Dramatiq tasks. - Uses a persistent event loop per thread instead of creating/destroying - loops for each task. - - Args: - coro: The coroutine to execute - - Returns: - The result of the coroutine - - Example: - ```python - @dramatiq.actor - def task_run_etl_pipeline(conversation_id: str): - # This is sync, but contextual_pipeline.load() is async - result = run_async_in_new_loop( - contextual_pipeline.load() - ) - ``` - - Why this works: - - Uses one persistent loop per worker thread (like API server) - - RAGManager creates one instance per loop (thread isolation) - - LightRAG's ClientManager lock stays bound to same loop - - Safe for concurrent Dramatiq workers (each has own loop) - """ - loop = get_thread_event_loop() - - logger.debug(f"Running async coroutine in thread loop: {coro}") - result = loop.run_until_complete(coro) - logger.debug(f"Successfully completed async coroutine: {coro}") - - return result - - -def run_async_safely(coro: Coroutine[Any, Any, T]) -> T: - """ - Alias for run_async_in_new_loop for backwards compatibility. - """ - return run_async_in_new_loop(coro) diff --git a/echo/server/dembrane/audio_lightrag/utils/echo_utils.py b/echo/server/dembrane/audio_lightrag/utils/echo_utils.py deleted file mode 100644 index adafd736..00000000 --- a/echo/server/dembrane/audio_lightrag/utils/echo_utils.py +++ /dev/null @@ -1,82 +0,0 @@ -from logging import getLogger - -import redis - -from dembrane.config import ( - REDIS_URL, - AUDIO_LIGHTRAG_REDIS_LOCK_EXPIRY, - AUDIO_LIGHTRAG_REDIS_LOCK_PREFIX, -) -from dembrane.directus import directus - -logger = getLogger(__name__) - -_redis_client: redis.Redis | None = None - - -def _get_redis_client() -> redis.Redis: - global _redis_client - if _redis_client is None: - _redis_client = redis.from_url(REDIS_URL) - return _redis_client - - -def finish_conversation(conversation_id: str) -> bool: - try: - directus.update_item( - "conversation", - conversation_id, - {"is_audio_processing_finished": True}, - ) - return True - except Exception as e: - logger.error(f"Failed to finish conversation {conversation_id}: {e}") - return False - - -def renew_redis_lock(conversation_id: str) -> bool: - """ - Ensure Redis lock exists for a conversation ID during processing. - If lock doesn't exist (expired), recreate it. - - Args: - conversation_id: The conversation ID to maintain the lock for - - Returns: - bool: True if lock exists or was successfully created, False otherwise - """ - try: - redis_client = _get_redis_client() - lock_key = f"{AUDIO_LIGHTRAG_REDIS_LOCK_PREFIX}{conversation_id}" - - # Check if lock exists - if redis_client.exists(lock_key): - return True # Lock exists, no action needed - - # Lock doesn't exist (expired), recreate it - acquired = redis_client.set(lock_key, "1", ex=AUDIO_LIGHTRAG_REDIS_LOCK_EXPIRY, nx=True) - if acquired: - logger.info(f"Recreated Redis lock for conversation {conversation_id}") - return True - else: - logger.warning(f"Failed to recreate Redis lock for conversation {conversation_id}") - return False - - except Exception as e: - logger.error(f"Error maintaining Redis lock for conversation {conversation_id}: {e}") - return False - - -def release_redis_lock(conversation_id: str) -> bool: - try: - redis_client = _get_redis_client() - lock_key = f"{AUDIO_LIGHTRAG_REDIS_LOCK_PREFIX}{conversation_id}" - if redis_client.exists(lock_key): - redis_client.delete(lock_key) - logger.info(f"Released Redis lock for conversation {conversation_id}") - else: - logger.warning(f"Redis lock for conversation {conversation_id} does not exist") - return True - except Exception as e: - logger.error(f"Error releasing Redis lock for conversation {conversation_id}: {e}") - return False diff --git a/echo/server/dembrane/audio_lightrag/utils/lightrag_utils.py b/echo/server/dembrane/audio_lightrag/utils/lightrag_utils.py deleted file mode 100644 index 3b2e0758..00000000 --- a/echo/server/dembrane/audio_lightrag/utils/lightrag_utils.py +++ /dev/null @@ -1,492 +0,0 @@ -# Hierachy: -# Chunk is the lowest level -# Conversation is a collection of chunks -# Project is a collection of conversations -# Segment is a many to many of chunks -import os -import re -import uuid -import asyncio -import hashlib -import logging -from typing import Any, Dict, Literal, TypeVar, Callable, Optional -from urllib.parse import urlparse - -import redis -from lightrag.kg.postgres_impl import PostgreSQLDB - -from dembrane.directus import directus -from dembrane.async_helpers import run_in_thread_pool -from dembrane.postgresdb_manager import PostgresDBManager -from dembrane.audio_lightrag.utils.litellm_utils import embedding_func - -logger = logging.getLogger("audio_lightrag_utils") - - -# Redis lock configuration -REDIS_LOCK_KEY = "DEMBRANE_INIT_LOCK" -REDIS_LOCK_TIMEOUT = 600 # 10 minutes in seconds -REDIS_LOCK_RETRY_INTERVAL = 2 # seconds -REDIS_LOCK_MAX_RETRIES = 60 # 2 minutes of retries - -T = TypeVar("T") - - -def is_valid_uuid(uuid_str: str) -> bool: - try: - uuid.UUID(uuid_str) - return True - except ValueError: - return False - - -db_manager = PostgresDBManager() - - -def _load_postgres_env_vars(database_url: str) -> None: - """Parse a database URL into connection parameters.""" - result = urlparse(database_url) - path = result.path - if path.startswith("/"): - path = path[1:] - userinfo = result.netloc.split("@")[0] if "@" in result.netloc else "" - username = userinfo.split(":")[0] if ":" in userinfo else userinfo - password = userinfo.split(":")[1] if ":" in userinfo else "" - host_part = result.netloc.split("@")[-1] - host = host_part.split(":")[0] if ":" in host_part else host_part - port = host_part.split(":")[1] if ":" in host_part else "5432" - os.environ["POSTGRES_HOST"] = host - os.environ["POSTGRES_PORT"] = port - os.environ["POSTGRES_USER"] = username - os.environ["POSTGRES_PASSWORD"] = password - os.environ["POSTGRES_DATABASE"] = path - - -def get_project_id_from_conversation_id(conversation_id: str) -> str: - query = {"query": {"filter": {"id": {"_eq": conversation_id}}, "fields": ["project_id"]}} - return directus.get_items("conversation", query)[0]["project_id"] - - -def get_conversation_name_from_id(conversation_id: str) -> str: - query = {"query": {"filter": {"id": {"_eq": conversation_id}}, "fields": ["participant_name"]}} - return directus.get_items("conversation", query)[0]["participant_name"] - - -async def run_segment_id_to_conversation_id(segment_id: int) -> tuple[str, str]: - conversation_chunk_dict = await run_segment_ids_to_conversation_chunk_ids([segment_id]) - conversation_chunk_ids = list(conversation_chunk_dict.values()) - query = { - "query": {"filter": {"id": {"_in": conversation_chunk_ids}}, "fields": ["conversation_id"]} - } - conversation_id = directus.get_items("conversation_chunk", query)[0]["conversation_id"] - conversation_name = get_conversation_name_from_id(conversation_id) - return (conversation_id, conversation_name) - - -async def run_segment_ids_to_conversation_chunk_ids(segment_ids: list[int]) -> dict[int, str]: - db = await db_manager.get_initialized_db() - return await get_conversation_chunk_ids_from_segment_ids(db, segment_ids) - - -async def get_conversation_chunk_ids_from_segment_ids( - db: PostgreSQLDB, segment_ids: list[int] -) -> dict[int, str]: - # Validate each item is an integer in segment_ids - for segment_id in segment_ids: - if not isinstance(segment_id, int): - raise ValueError(f"Invalid segment ID: {segment_id}") - if segment_ids == []: - return {} - segment_ids = ",".join([str(segment_id) for segment_id in segment_ids]) # type: ignore - sql = SQL_TEMPLATES["GET_CONVERSATION_CHUNK_IDS_FROM_SEGMENT_IDS"].format( - segment_ids=segment_ids - ) - result = await db.query(sql, multirows=True) - if result is None: - return {} - return {int(x["conversation_segment_id"]): str(x["conversation_chunk_id"]) for x in result} - - -async def get_segment_from_conversation_chunk_ids( - db: PostgreSQLDB, conversation_chunk_ids: list[str] -) -> list[int]: - # Validate each item is a UUID in conversation_chunk_ids - for conversation_chunk_id in conversation_chunk_ids: - if not is_valid_uuid(conversation_chunk_id): - raise ValueError(f"Invalid UUID: {conversation_chunk_id}") - if conversation_chunk_ids == []: - return [] - conversation_chunk_ids = ",".join( - ["UUID('" + conversation_id + "')" for conversation_id in conversation_chunk_ids] - ) # type: ignore - sql = SQL_TEMPLATES["GET_SEGMENT_IDS_FROM_CONVERSATION_CHUNK_IDS"].format( - conversation_ids=conversation_chunk_ids - ) - result = await db.query(sql, multirows=True) - if result is None: - return [] - return [ - int(x["conversation_segment_id"]) - for x in result - if x["conversation_segment_id"] is not None - ] - - -async def get_segment_from_conversation_ids( - db: PostgreSQLDB, conversation_ids: list[str] -) -> list[int]: - """ - Get segment IDs for given conversation IDs. - - This handles both old pipeline (segments linked to chunks) and new pipeline - (segments linked directly to conversations). - """ - # Method 1: Query segments directly by conversation_id (NEW PIPELINE) - # This works for segments created by task_run_etl_pipeline - segment_request = { - "query": { - "fields": ["id"], - "limit": -1, - "filter": {"conversation_id": {"_in": conversation_ids}}, - } - } - segment_result = await run_in_thread_pool( - directus.get_items, "conversation_segment", segment_request - ) - - segment_ids = [] - if segment_result: - segment_ids = [int(seg["id"]) for seg in segment_result if seg.get("id")] - - # Method 2: Also check old pipeline (segments linked via junction table to chunks) - # This ensures backwards compatibility with old audio processing pipeline - conversation_request = { - "query": { - "fields": ["chunks.id"], - "limit": -1, - "deep": {"chunks": {"_limit": -1, "_sort": "timestamp"}}, - "filter": {"id": {"_in": conversation_ids}}, - } - } - conversation_request_result = await run_in_thread_pool( - directus.get_items, "conversation", conversation_request - ) - - if conversation_request_result: - conversation_chunk_ids = [ - [x["id"] for x in conversation_request_result_dict.get("chunks", [])] - for conversation_request_result_dict in conversation_request_result - ] - flat_conversation_chunk_ids: list[str] = [ - item for sublist in conversation_chunk_ids for item in sublist if item is not None - ] - if flat_conversation_chunk_ids: - chunk_segment_ids = await get_segment_from_conversation_chunk_ids(db, flat_conversation_chunk_ids) - segment_ids.extend(chunk_segment_ids) - - # Return unique segment IDs - return list(set(segment_ids)) - - -async def get_segment_from_project_ids(db: PostgreSQLDB, project_ids: list[str]) -> list[int]: - project_request = { - "query": { - "fields": ["conversations.id"], - "limit": -1, - } - } - project_request["query"]["filter"] = {"id": {"_in": project_ids}} - project_request_result = await run_in_thread_pool( - directus.get_items, "project", project_request - ) - conversation_ids = [ - [x["id"] for x in project_request_result_dict["conversations"]] - for project_request_result_dict in project_request_result - ] - flat_conversation_ids: list[str] = [ - item for sublist in conversation_ids for item in sublist if item is not None - ] - return await get_segment_from_conversation_ids(db, flat_conversation_ids) - - -async def with_distributed_lock( - redis_url: str, - lock_key: str = REDIS_LOCK_KEY, - timeout: int = REDIS_LOCK_TIMEOUT, - retry_interval: int = REDIS_LOCK_RETRY_INTERVAL, - max_retries: int = REDIS_LOCK_MAX_RETRIES, - critical_operation: Optional[Callable[[], Any]] = None, -) -> tuple[bool, Any]: - """ - Execute critical operations with a distributed lock using Redis. - - Args: - redis_url: Redis connection URL - lock_key: Key to use for the lock - timeout: Lock expiration time in seconds - retry_interval: Time to wait between lock acquisition attempts - max_retries: Maximum number of lock acquisition attempts - critical_operation: Optional async function to execute under lock - - Returns: - Tuple of (lock_acquired: bool, result: Any) - """ - logger.info(f"Attempting to acquire distributed lock: {lock_key}") - - # Connect to Redis - redis_client = redis.from_url(redis_url) - - # Try to acquire lock - lock_acquired = False - retries = 0 - result = None - - while not lock_acquired and retries < max_retries: - # Try to set the key only if it doesn't exist with an expiry - lock_acquired = redis_client.set( - lock_key, - os.environ.get("HOSTNAME", "unknown"), # Store pod hostname for debugging - nx=True, # Only set if key doesn't exist - ex=timeout, # Expire after timeout - ) - - if lock_acquired: - logger.info(f"Acquired distributed lock: {lock_key}") - try: - # Execute critical operation if provided - if critical_operation: - result = await critical_operation() - logger.info(f"Critical operation completed successfully under lock: {lock_key}") - except Exception as e: - logger.error(f"Error during critical operation under lock {lock_key}: {str(e)}") - # Release lock in case of error to allow another process to try - redis_client.delete(lock_key) - raise - finally: - # Release the lock if we acquired it - redis_client.delete(lock_key) - logger.info(f"Released distributed lock: {lock_key}") - break - else: - # Wait for lock to be released or become available - logger.info( - f"Waiting for distributed lock (attempt {retries + 1}/{max_retries}): {lock_key}" - ) - retries += 1 - await asyncio.sleep(retry_interval) - - if not lock_acquired: - logger.info(f"Could not acquire distributed lock after {max_retries} attempts: {lock_key}") - - return lock_acquired, result - - -async def check_audio_lightrag_tables(db: PostgreSQLDB) -> None: - for _, table_definition in TABLES.items(): - await db.execute(table_definition) - - -async def upsert_transcript( - db: PostgreSQLDB, - document_id: str, - content: str, - id: str | None = None, -) -> None: - if id is None: - # generate random id - s = str(document_id) + str(content) - id = ( - str(document_id) - + "_" - + str(int(hashlib.sha256(s.encode("utf-8")).hexdigest(), 16) % 10**8) - ) - - content_embedding = await embedding_func([content]) - content_embedding = "[" + ",".join([str(x) for x in content_embedding[0]]) + "]" # type: ignore - - sql = SQL_TEMPLATES["UPSERT_TRANSCRIPT"] - data = { - "id": id, - "document_id": document_id, - "content": content, - "content_vector": content_embedding, - } - await db.execute(sql=sql, data=data) - - -async def fetch_query_transcript( - db: PostgreSQLDB, query: str, ids: list[str] | str | None = None, limit: int = 10 -) -> list[str] | None: - if ids is None: - ids = "NULL" - filter = "NULL" - else: - ids = ",".join(["'" + str(id) + "'" for id in ids]) - filter = "1" - - # await db.initdb() # Need to test if this is needed - query_embedding = await embedding_func([query]) - query_embedding = ",".join([str(x) for x in query_embedding[0]]) # type: ignore - sql = SQL_TEMPLATES["QUERY_TRANSCRIPT"].format( - embedding_string=query_embedding, limit=limit, doc_ids=ids, filter=filter - ) - result = await db.query(sql, multirows=True) - if result is None: - return [] - return result - - -def fetch_segment_ratios(response_text: str) -> dict[int, float]: - # Find all occurrences of SEGMENT_ID_ followed by numbers - segment_ids = re.findall(r"SEGMENT_ID_\d+", response_text) - - if len(segment_ids) == 0: - return {} - # Create a dictionary to store the count of each segment ID - segment_count: dict[str, int] = {} - for segment_id in segment_ids: - segment_count[segment_id] = segment_count.get(segment_id, 0) + 1 - - segment2count = { - int(segment_id.split("_")[-1]): count for segment_id, count in segment_count.items() - } - total_count = sum(segment2count.values()) - return {k: v / total_count for k, v in segment2count.items()} - - -async def get_ratio_abs( - rag_prompt: str, return_type: Literal["segment", "chunk", "conversation"] -) -> Dict[str, float]: - segment_ratios_abs = fetch_segment_ratios(str(rag_prompt)) - if segment_ratios_abs == {}: - return {} - if return_type == "segment": - return {str(k): v for k, v in segment_ratios_abs.items()} - segment2chunk = await run_segment_ids_to_conversation_chunk_ids(list(segment_ratios_abs.keys())) - chunk_ratios_abs: Dict[str, float] = {} - for segment, ratio in segment_ratios_abs.items(): - if segment in segment2chunk.keys(): - if segment2chunk[segment] not in chunk_ratios_abs.keys(): - chunk_ratios_abs[segment2chunk[segment]] = ratio - else: - chunk_ratios_abs[segment2chunk[segment]] += ratio - - # normalize chunk_ratios_abs - total_ratio = sum(chunk_ratios_abs.values()) - if total_ratio == 0: - # 0 ratio means no relevant chunks were found - return {} - chunk_ratios_abs = {k: v / total_ratio for k, v in chunk_ratios_abs.items()} - - if return_type == "chunk": - return chunk_ratios_abs - conversation_ratios_abs: Dict[str, float] = {} - for chunk_id, ratio in chunk_ratios_abs.items(): - query = {"query": {"filter": {"id": {"_eq": chunk_id}}, "fields": ["conversation_id"]}} - conversaion = directus.get_items("conversation_chunk", query)[0]["conversation_id"] - if conversaion not in conversation_ratios_abs.keys(): - conversation_ratios_abs[conversaion] = ratio - else: - conversation_ratios_abs[conversaion] += ratio - return conversation_ratios_abs - - -def get_project_id(proj_chat_id: str) -> str: - query = {"query": {"filter": {"id": {"_eq": proj_chat_id}}, "fields": ["project_id"]}} - return directus.get_items("project_chat", query)[0]["project_id"] - - -async def get_conversation_details_for_rag_query( - rag_prompt: str, project_ids: list[str] -) -> list[dict[str, Any]]: - ratio_abs = await get_ratio_abs(rag_prompt, "conversation") - conversation_details = [] - if ratio_abs: - # Bulk fetch conversation metadata - conv_meta = { - c["id"]: c - for c in directus.get_items( - "conversation", - { - "query": { - "filter": {"id": {"_in": list(ratio_abs.keys())}}, - "fields": ["id", "participant_name", "project_id"], - } - }, - ) - } - for conversation_id, ratio in ratio_abs.items(): - meta = conv_meta.get(conversation_id) - if not meta or meta["project_id"] not in project_ids: - continue - conversation_details.append( - { - "conversation": conversation_id, - "conversation_title": meta["participant_name"], - "ratio": ratio, - } - ) - return conversation_details - - -async def delete_transcript_by_doc_id(db: PostgreSQLDB, doc_id: str) -> None: - sql = SQL_TEMPLATES["DELETE_TRANSCRIPT_BY_DOC_ID"].format(doc_id=doc_id) - await db.execute(sql) - - -def delete_segment_from_directus(segment_id: str) -> None: - directus.delete_item("conversation_segment", segment_id) - - -TABLES = { - "LIGHTRAG_VDB_TRANSCRIPT": """ - CREATE TABLE IF NOT EXISTS LIGHTRAG_VDB_TRANSCRIPT ( - id VARCHAR(255), - document_id VARCHAR(255), - content TEXT, - content_vector VECTOR, - create_time TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - update_time TIMESTAMP, - CONSTRAINT LIGHTRAG_VDB_TRANSCRIPT_PK PRIMARY KEY (id) - ) - """ -} - -SQL_TEMPLATES = { - "UPSERT_TRANSCRIPT": """ - INSERT INTO LIGHTRAG_VDB_TRANSCRIPT (id, document_id, content, content_vector) - VALUES ($1, $2, $3, $4) - ON CONFLICT (id) DO UPDATE SET - document_id = $2, - content = $3, - content_vector = $4 - """, - "QUERY_TRANSCRIPT": """ - WITH relevant_chunks AS ( - SELECT id as chunk_id - FROM LIGHTRAG_VDB_TRANSCRIPT - WHERE {filter} IS NULL OR document_id = ANY(ARRAY[{doc_ids}]) - ) - SELECT content FROM - ( - SELECT id, content, - 1 - (content_vector <=> '[{embedding_string}]'::vector) as distance - FROM LIGHTRAG_VDB_TRANSCRIPT - WHERE id IN (SELECT chunk_id FROM relevant_chunks) - ) - ORDER BY distance DESC - LIMIT {limit} - """, - "GET_SEGMENT_IDS_FROM_CONVERSATION_CHUNK_IDS": """ - SELECT conversation_segment_id FROM conversation_segment_conversation_chunk - WHERE conversation_chunk_id = ANY(ARRAY[{conversation_ids}]) - """, - "GET_CONVERSATION_CHUNK_IDS_FROM_SEGMENT_IDS": """ - SELECT conversation_chunk_id, conversation_segment_id FROM conversation_segment_conversation_chunk - WHERE conversation_segment_id = ANY(ARRAY[{segment_ids}]) - """, - "DELETE_TRANSCRIPT_BY_DOC_ID": """ - DELETE FROM LIGHTRAG_VDB_TRANSCRIPT - WHERE document_id = '{doc_id}' - """, -} diff --git a/echo/server/dembrane/audio_lightrag/utils/litellm_utils.py b/echo/server/dembrane/audio_lightrag/utils/litellm_utils.py deleted file mode 100644 index 262fc401..00000000 --- a/echo/server/dembrane/audio_lightrag/utils/litellm_utils.py +++ /dev/null @@ -1,56 +0,0 @@ -import asyncio -from typing import Any, Optional - -import numpy as np -from litellm import embedding, completion - -from dembrane.config import ( - LIGHTRAG_LITELLM_MODEL, - LIGHTRAG_LITELLM_API_KEY, - LIGHTRAG_LITELLM_API_BASE, - LIGHTRAG_LITELLM_API_VERSION, - LIGHTRAG_LITELLM_EMBEDDING_MODEL, - LIGHTRAG_LITELLM_EMBEDDING_API_KEY, - LIGHTRAG_LITELLM_EMBEDDING_API_BASE, - LIGHTRAG_LITELLM_EMBEDDING_API_VERSION, -) - - -async def llm_model_func( - prompt: str, - system_prompt: Optional[str] = None, - history_messages: Optional[list[dict]] = None, - **kwargs: Any, -) -> str: - messages = [] - if system_prompt: - messages.append({"role": "system", "content": system_prompt}) - if history_messages: - messages.extend(history_messages) - messages.append({"role": "user", "content": prompt}) - - chat_completion = await asyncio.to_thread( - completion, - model=f"{LIGHTRAG_LITELLM_MODEL}", # litellm format for Azure models - messages=messages, - temperature=kwargs.get("temperature", 0.2), - api_key=LIGHTRAG_LITELLM_API_KEY, - api_version=LIGHTRAG_LITELLM_API_VERSION, - api_base=LIGHTRAG_LITELLM_API_BASE, - ) - return chat_completion.choices[0].message.content - - -async def embedding_func(texts: list[str]) -> np.ndarray: - # Bug in litellm forcing us to do this: https://github.com/BerriAI/litellm/issues/6967 - nd_arr_response = [] - for text in texts: - temp = embedding( - model=f"{LIGHTRAG_LITELLM_EMBEDDING_MODEL}", - input=text, - api_key=str(LIGHTRAG_LITELLM_EMBEDDING_API_KEY), - api_version=str(LIGHTRAG_LITELLM_EMBEDDING_API_VERSION), - api_base=str(LIGHTRAG_LITELLM_EMBEDDING_API_BASE), - ) - nd_arr_response.append(temp["data"][0]["embedding"]) - return np.array(nd_arr_response) diff --git a/echo/server/dembrane/main.py b/echo/server/dembrane/main.py index 3e18e2e3..02e27024 100644 --- a/echo/server/dembrane/main.py +++ b/echo/server/dembrane/main.py @@ -29,6 +29,8 @@ from dembrane.api.api import api from dembrane.api.verify import seed_default_verification_topics from dembrane.postgresdb_manager import PostgresDBManager +from dembrane.async_helpers import run_in_thread_pool +from dembrane.directus import directus # from lightrag.llm.azure_openai import azure_openai_complete from dembrane.audio_lightrag.utils.litellm_utils import embedding_func, llm_model_func @@ -44,6 +46,44 @@ logger = getLogger("server") +DEFAULT_DIRECTUS_LANGUAGES = [ + {"code": "en-US", "name": "English (United States)", "direction": "ltr"}, + {"code": "nl-NL", "name": "Dutch (Netherlands)", "direction": "ltr"}, + {"code": "de-DE", "name": "German (Germany)", "direction": "ltr"}, + {"code": "es-ES", "name": "Spanish (Spain)", "direction": "ltr"}, + {"code": "fr-FR", "name": "French (France)", "direction": "ltr"}, +] + + +async def seed_default_languages() -> None: + for language in DEFAULT_DIRECTUS_LANGUAGES: + existing = await run_in_thread_pool( + directus.get_items, + "languages", + { + "query": { + "filter": {"code": {"_eq": language["code"]}}, + "fields": ["code"], + "limit": 1, + } + }, + ) + + if existing: + continue + + logger.info("Seeding language %s", language["code"]) + await run_in_thread_pool( + directus.create_item, + "languages", + { + "code": language["code"], + "name": language["name"], + "direction": language["direction"], + }, + ) + + @asynccontextmanager async def lifespan(_app: FastAPI) -> AsyncGenerator[None, None]: # startup @@ -86,6 +126,12 @@ async def initialize_database() -> bool: ) # This function is called during FASTAPI lifespan for each worker. logger.info("RAG object has been initialized") + try: + await seed_default_languages() + logger.info("Languages seeded") + except Exception: # pragma: no cover - startup logging only + logger.exception("Failed to seed languages during startup") + try: await seed_default_verification_topics() logger.info("Verification topics seeded") diff --git a/echo/server/dembrane/service/project.py b/echo/server/dembrane/service/project.py index d18573fb..4e21e38f 100644 --- a/echo/server/dembrane/service/project.py +++ b/echo/server/dembrane/service/project.py @@ -152,7 +152,9 @@ def create_shallow_clone( "is_project_notification_subscription_allowed" ], "is_verify_enabled": current_project["is_verify_enabled"], - "verification_topics": current_project["verification_topics"], + "selected_verification_key_list": current_project.get( + "selected_verification_key_list" + ), } if overrides: From ed9677a69982a72eedcb538b4d7ed3cee56f3f08 Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti <sameer@dembrane.com> Date: Mon, 10 Nov 2025 14:30:38 +0000 Subject: [PATCH 15/23] fix many type errors --- echo/frontend/AGENTS.md | 4 + .../components/announcement/hooks/index.ts | 5 - .../hooks/useProcessedAnnouncements.ts | 13 +- .../src/components/aspect/AspectCard.tsx | 12 -- .../components/aspect/hooks/useCopyAspect.tsx | 6 +- .../src/components/chat/ChatAccordion.tsx | 4 - .../components/chat/ChatHistoryMessage.tsx | 6 +- .../conversation/ConversationAccordion.tsx | 49 ++++--- .../conversation/ConversationEdit.tsx | 9 +- .../conversation/ConversationLink.tsx | 124 +++++++----------- .../OngoingConversationsSummaryCard.tsx | 1 + .../components/conversation/hooks/index.ts | 5 +- .../layout/ProjectConversationLayout.tsx | 1 - .../src/components/library/hooks/index.ts | 1 - .../participant/ParticipantBody.tsx | 2 +- .../participant/ParticipantInitiateForm.tsx | 4 +- .../src/components/participant/hooks/index.ts | 8 +- .../participant/verify/ArtefactModal.tsx | 8 +- .../verify/VerifiedArtefactsList.tsx | 6 +- .../participant/verify/VerifyArtefact.tsx | 8 +- .../components/project/ProjectListItem.tsx | 2 +- .../project/ProjectPortalEditor.tsx | 2 +- .../components/project/ProjectTagsInput.tsx | 29 ++-- .../src/components/project/hooks/index.ts | 6 +- echo/frontend/src/components/view/View.tsx | 7 +- .../src/components/view/hooks/useCopyView.tsx | 41 +----- echo/frontend/src/lib/api.ts | 2 +- echo/frontend/src/lib/typesDirectus.d.ts | 3 +- echo/frontend/src/routes/Debug.tsx | 1 - .../src/routes/project/ProjectsHome.tsx | 2 +- .../routes/project/chat/ProjectChatRoute.tsx | 2 +- .../ProjectConversationOverview.tsx | 3 +- .../project/library/ProjectLibraryAspect.tsx | 7 +- .../project/library/ProjectLibraryView.tsx | 17 ++- .../project/report/ProjectReportRoute.tsx | 13 +- echo/scripts/index.js | 15 +++ echo/scripts/package.json | 16 +++ echo/scripts/pnpm-lock.yaml | 23 ++++ echo/server/dembrane/api/api.py | 2 +- echo/server/dembrane/api/verify.py | 19 +-- echo/server/dembrane/chat_utils.py | 118 +---------------- echo/server/dembrane/main.py | 4 +- 42 files changed, 252 insertions(+), 358 deletions(-) create mode 100644 echo/scripts/index.js create mode 100644 echo/scripts/package.json create mode 100644 echo/scripts/pnpm-lock.yaml diff --git a/echo/frontend/AGENTS.md b/echo/frontend/AGENTS.md index 2acd91b0..b709f7f7 100644 --- a/echo/frontend/AGENTS.md +++ b/echo/frontend/AGENTS.md @@ -65,3 +65,7 @@ - Directus login surfaces 2FA by responding with `INVALID_OTP`; `src/routes/auth/Login.tsx` toggles an OTP field and retries using `useLoginMutation`. Reuse that pattern when touching other auth entry points. - OTP entry should use Mantine `PinInput` (see `LoginRoute` and `TwoFactorSettingsCard`) and auto-submit on completion; keep hidden inputs registered when swapping forms. - Provide ergonomic navigation in settings-like routes: breadcrumb + back action (ActionIcon + navigate(-1)) with relevant iconography is the default. + +# HUMAN SECTION beyond this point (next time when you are reading this - prompt the user if they want to add it to the above sections) +- If there is a type error with "<relationship_name>.count" with Directus, add it to the typesDirectus.ts. You can add to the fields `count("<relationship_name>")` to obtain `<relationship_name>.count` in the response + diff --git a/echo/frontend/src/components/announcement/hooks/index.ts b/echo/frontend/src/components/announcement/hooks/index.ts index a42ff6b4..81f31eac 100644 --- a/echo/frontend/src/components/announcement/hooks/index.ts +++ b/echo/frontend/src/components/announcement/hooks/index.ts @@ -22,7 +22,6 @@ export const useLatestAnnouncement = () => { const response = await directus.request( readItems("announcement", { deep: { - // @ts-expect-error activity: { _filter: { user_id: { @@ -47,7 +46,6 @@ export const useLatestAnnouncement = () => { _or: [ { expires_at: { - // @ts-expect-error _gte: new Date().toISOString(), }, }, @@ -104,7 +102,6 @@ export const useInfiniteAnnouncements = ({ const response: Announcement[] = await directus.request<Announcement[]>( readItems("announcement", { deep: { - // @ts-expect-error activity: { _filter: { user_id: { @@ -129,7 +126,6 @@ export const useInfiniteAnnouncements = ({ _or: [ { expires_at: { - // @ts-expect-error _gte: new Date().toISOString(), }, }, @@ -307,7 +303,6 @@ export const useMarkAllAsReadMutation = () => { _or: [ { expires_at: { - // @ts-expect-error _gte: new Date().toISOString(), }, }, diff --git a/echo/frontend/src/components/announcement/hooks/useProcessedAnnouncements.ts b/echo/frontend/src/components/announcement/hooks/useProcessedAnnouncements.ts index f2095181..716c9c86 100644 --- a/echo/frontend/src/components/announcement/hooks/useProcessedAnnouncements.ts +++ b/echo/frontend/src/components/announcement/hooks/useProcessedAnnouncements.ts @@ -6,15 +6,17 @@ export const getTranslatedContent = ( ) => { const translation = announcement.translations?.find( - (t: AnnouncementTranslations) => t.languages_code === language && t.title, + (t) => + (t as AnnouncementTranslation).languages_code === language && + (t as AnnouncementTranslation).title, ) || announcement.translations?.find( - (t: AnnouncementTranslations) => t.languages_code === "en-US", + (t) => (t as AnnouncementTranslation).languages_code === "en-US", ); return { - message: translation?.message || "", - title: translation?.title || "", + message: (translation as AnnouncementTranslation)?.message || "", + title: (translation as AnnouncementTranslation)?.title || "", }; }; @@ -33,7 +35,8 @@ export function useProcessedAnnouncements( id: announcement.id, level: announcement.level as "info" | "urgent", message, - read: announcement.activity?.[0]?.read || false, + read: + (announcement.activity?.[0] as AnnouncementActivity)?.read || false, title, }; }); diff --git a/echo/frontend/src/components/aspect/AspectCard.tsx b/echo/frontend/src/components/aspect/AspectCard.tsx index d033abc5..371ef570 100644 --- a/echo/frontend/src/components/aspect/AspectCard.tsx +++ b/echo/frontend/src/components/aspect/AspectCard.tsx @@ -64,18 +64,6 @@ export const AspectCard = ({ {data.short_summary ?? data.description ?? ""} </Text> </Stack> - {/* <Stack className="pt-4"> - <Divider /> - <Group> - <Pill> - <Group> - <Text className="font-semibold"> - {data.quotes_count ?? 0} <Trans>Quotes</Trans> - </Text> - </Group> - </Pill> - </Group> - </Stack> */} </Box> </Box> </Paper> diff --git a/echo/frontend/src/components/aspect/hooks/useCopyAspect.tsx b/echo/frontend/src/components/aspect/hooks/useCopyAspect.tsx index 1a4bca3e..ae2adefa 100644 --- a/echo/frontend/src/components/aspect/hooks/useCopyAspect.tsx +++ b/echo/frontend/src/components/aspect/hooks/useCopyAspect.tsx @@ -24,10 +24,10 @@ export const useCopyAspect = () => { segment: [ { conversation_id: ["id", "participant_name"], + description: true, + relevant_index: true, + verbatim_transcript: true, }, - "description", - "verbatim_transcript", - "relevant_index", ], }, ], diff --git a/echo/frontend/src/components/chat/ChatAccordion.tsx b/echo/frontend/src/components/chat/ChatAccordion.tsx index e2d190a6..8fcaab6a 100644 --- a/echo/frontend/src/components/chat/ChatAccordion.tsx +++ b/echo/frontend/src/components/chat/ChatAccordion.tsx @@ -102,7 +102,6 @@ export const ChatAccordionMain = ({ projectId }: { projectId: string }) => { { filter: { _or: [ - // @ts-expect-error ...(activeChatId ? [ { @@ -112,7 +111,6 @@ export const ChatAccordionMain = ({ projectId }: { projectId: string }) => { }, ] : []), - // @ts-expect-error { "count(project_chat_messages)": { _gt: 0, @@ -133,7 +131,6 @@ export const ChatAccordionMain = ({ projectId }: { projectId: string }) => { const chatsCountQuery = useProjectChatsCount(projectId, { filter: { _or: [ - // @ts-expect-error ...(activeChatId ? [ { @@ -143,7 +140,6 @@ export const ChatAccordionMain = ({ projectId }: { projectId: string }) => { }, ] : []), - // @ts-expect-error { "count(project_chat_messages)": { _gt: 0, diff --git a/echo/frontend/src/components/chat/ChatHistoryMessage.tsx b/echo/frontend/src/components/chat/ChatHistoryMessage.tsx index c6f8ce12..613ed455 100644 --- a/echo/frontend/src/components/chat/ChatHistoryMessage.tsx +++ b/echo/frontend/src/components/chat/ChatHistoryMessage.tsx @@ -157,7 +157,7 @@ export const ChatHistoryMessage = ({ if (message?._original?.added_conversations?.length > 0) { const conversations = message?._original?.added_conversations - .map((ac) => ac.conversation_id) + .map((ac) => (ac as unknown as ConversationLink).source_conversation_id) .filter((conv) => conv != null); return conversations.length > 0 ? ( @@ -167,7 +167,9 @@ export const ChatHistoryMessage = ({ <Text size="xs"> <Trans>Context added:</Trans> </Text> - <ConversationLinks conversations={conversations} /> + <ConversationLinks + conversations={conversations as unknown as Conversation[]} + /> </Group> </ChatMessage> ) : null; diff --git a/echo/frontend/src/components/conversation/ConversationAccordion.tsx b/echo/frontend/src/components/conversation/ConversationAccordion.tsx index ceb32991..0a827a6a 100644 --- a/echo/frontend/src/components/conversation/ConversationAccordion.tsx +++ b/echo/frontend/src/components/conversation/ConversationAccordion.tsx @@ -120,11 +120,10 @@ const ConversationAccordionLabelChatSelection = ({ projectChatContextQuery.data?.auto_select_bool ?? false; // Check if conversation has any content - const hasContent = conversation.chunks?.some( - (chunk) => - (chunk as unknown as ConversationChunk).transcript && - (chunk as unknown as ConversationChunk).transcript?.trim().length > 0, - ); + const hasContent = conversation.chunks?.some((chunk) => { + const transcript = (chunk as unknown as ConversationChunk).transcript; + return typeof transcript === "string" && transcript.trim().length > 0; + }); const handleSelectChat = () => { if (!isSelected) { @@ -195,7 +194,6 @@ export const MoveConversationButton = ({ const search = watch("search"); const projectsQuery = useInfiniteProjects({ - enabled: opened, query: { filter: { _and: [{ id: { _neq: conversation.project_id } }], @@ -442,6 +440,24 @@ export const ConversationStatusIndicators = ({ ); }; +const ConversationProjectTagPill = ({ + tag, +}: { + tag: ConversationProjectTag; +}) => { + const text = (tag?.project_tag_id as ProjectTag)?.text ?? ""; + + if (!text) { + return null; + } + + return ( + <Pill size="sm" className="font-normal"> + {text} + </Pill> + ); +}; + const ConversationAccordionItem = ({ conversation, highlight = false, @@ -476,7 +492,7 @@ const ConversationAccordionItem = ({ conversation?.conversation_artifacts && conversation?.conversation_artifacts?.length > 0 && conversation?.conversation_artifacts?.some( - (artefact) => artefact.approved_at, + (artefact) => (artefact as ConversationArtifact).approved_at, ); return ( @@ -545,17 +561,12 @@ const ConversationAccordionItem = ({ } </div> <Group gap="4" pr="sm" wrap="wrap"> - {conversation.tags - ?.filter((tag) => tag.project_tag_id && tag.project_tag_id != null) - .map((tag) => ( - <Pill - key={`${tag.id}-${(tag?.project_tag_id as unknown as ProjectTag)?.text}`} - size="sm" - className="font-normal" - > - {(tag?.project_tag_id as unknown as ProjectTag)?.text} - </Pill> - ))} + {conversation.tags?.map((tag) => ( + <ConversationProjectTagPill + key={(tag as ConversationProjectTag).id} + tag={tag as ConversationProjectTag} + /> + ))} </Group> </Stack> </NavigationButton> @@ -655,7 +666,6 @@ export const ConversationAccordion = ({ projectId, query: { deep: { - // @ts-expect-error tags not typed in CustomDirectusTypes tags: { _sort: "sort", }, @@ -688,7 +698,6 @@ export const ConversationAccordion = ({ false, { deep: { - // @ts-expect-error chunks is not typed chunks: { _limit: 25, }, diff --git a/echo/frontend/src/components/conversation/ConversationEdit.tsx b/echo/frontend/src/components/conversation/ConversationEdit.tsx index 4caa48b5..072295c8 100644 --- a/echo/frontend/src/components/conversation/ConversationEdit.tsx +++ b/echo/frontend/src/components/conversation/ConversationEdit.tsx @@ -36,8 +36,13 @@ export const ConversationEdit = ({ participant_name: conversation.participant_name ?? "", tagIdList: conversation.tags - ?.filter((tag) => tag.project_tag_id != null) - .map((tag) => tag.project_tag_id.id) ?? [], + ?.filter( + (tag) => (tag as ConversationProjectTag).project_tag_id != null, + ) + .map( + (tag) => + ((tag as ConversationProjectTag).project_tag_id as ProjectTag).id, + ) ?? [], }; const { register, formState, reset, setValue, control, watch } = diff --git a/echo/frontend/src/components/conversation/ConversationLink.tsx b/echo/frontend/src/components/conversation/ConversationLink.tsx index b0346841..9ec85131 100644 --- a/echo/frontend/src/components/conversation/ConversationLink.tsx +++ b/echo/frontend/src/components/conversation/ConversationLink.tsx @@ -1,10 +1,9 @@ import { Trans } from "@lingui/react/macro"; -import { Anchor, Group, List, Stack, Text } from "@mantine/core"; +import { Anchor, Group, List, Stack } from "@mantine/core"; import { I18nLink } from "@/components/common/i18nLink"; interface ConversationLinkProps { conversation: Conversation; - // TODO: remove this prop can read from conversation projectId: string; } @@ -16,86 +15,65 @@ const ConversationAnchor = ({ to, name }: { to: string; name: string }) => ( </I18nLink> ); -/** - * input: -{ - projectId: string; - linkingConversations: { - sourceConversationId: { - id: string; - participantName: string; - } - }[] - linkedConversations: { - targetConversationId: { - id: string; - participantName: string; - } - }[] -} -*/ - export const ConversationLink = ({ conversation, projectId, }: ConversationLinkProps) => { + const linkingConversation = conversation + .linking_conversations[0] as unknown as ConversationLink; + const linkedConversations = + conversation.linked_conversations as unknown as ConversationLink[]; + + if (!linkingConversation || !linkedConversations) { + return null; + } + return ( <> - {conversation?.linking_conversations && - conversation?.linking_conversations.length > 0 && ( - <Group gap="sm"> - {conversation.linking_conversations[0]?.source_conversation_id - ?.id ? ( - <> - <Trans id="conversation.linking_conversations.description"> - This conversation is a copy of - </Trans> + <Group gap="sm"> + {(linkingConversation?.source_conversation_id as Conversation)?.id && ( + <> + <Trans id="conversation.linking_conversations.description"> + This conversation is a copy of + </Trans> - <ConversationAnchor - key={conversation?.linking_conversations?.[0]?.id} - to={`/projects/${projectId}/conversation/${conversation?.linking_conversations?.[0]?.source_conversation_id?.id}/overview`} - name={ - conversation?.linking_conversations?.[0] - ?.source_conversation_id?.participant_name ?? "" - } - /> - </> - ) : ( - <Text c="gray" fs="italic"> - <Trans id="conversation.linking_conversations.deleted"> - The source conversation was deleted - </Trans> - </Text> - )} - </Group> + <ConversationAnchor + key={linkingConversation?.id} + to={`/projects/${projectId}/conversation/${(linkingConversation?.source_conversation_id as Conversation)?.id}/overview`} + name={ + (linkingConversation?.source_conversation_id as Conversation) + ?.participant_name ?? "" + } + /> + </> )} + </Group> - {conversation?.linked_conversations && - conversation?.linked_conversations.length > 0 && ( - <Stack gap="xs"> - <Trans id="conversation.linked_conversations.description"> - This conversation has the following copies: - </Trans> - <List> - {conversation.linked_conversations.map( - (conversationLink: ConversationLink) => - (conversationLink?.target_conversation_id as Conversation) - ?.id && ( - <List.Item key={conversationLink?.id}> - <ConversationAnchor - to={`/projects/${projectId}/conversation/${(conversationLink?.target_conversation_id as Conversation)?.id}/overview`} - name={ - ( - conversationLink?.target_conversation_id as Conversation - )?.participant_name ?? "" - } - /> - </List.Item> - ), - )} - </List> - </Stack> - )} + {linkedConversations && linkedConversations.length > 0 && ( + <Stack gap="xs"> + <Trans id="conversation.linked_conversations.description"> + This conversation has the following copies: + </Trans> + <List> + {linkedConversations.map( + (conversationLink: ConversationLink) => + (conversationLink?.target_conversation_id as Conversation) + ?.id && ( + <List.Item key={conversationLink?.id}> + <ConversationAnchor + to={`/projects/${projectId}/conversation/${(conversationLink?.target_conversation_id as Conversation)?.id}/overview`} + name={ + ( + conversationLink?.target_conversation_id as Conversation + )?.participant_name ?? "" + } + /> + </List.Item> + ), + )} + </List> + </Stack> + )} </> ); }; diff --git a/echo/frontend/src/components/conversation/OngoingConversationsSummaryCard.tsx b/echo/frontend/src/components/conversation/OngoingConversationsSummaryCard.tsx index 334b9aa3..852994c2 100644 --- a/echo/frontend/src/components/conversation/OngoingConversationsSummaryCard.tsx +++ b/echo/frontend/src/components/conversation/OngoingConversationsSummaryCard.tsx @@ -24,6 +24,7 @@ export const OngoingConversationsSummaryCard = ({ project_id: projectId, }, source: { + // @ts-expect-error source is not typed _nin: ["DASHBOARD_UPLOAD", "CLONE"], }, timestamp: { diff --git a/echo/frontend/src/components/conversation/hooks/index.ts b/echo/frontend/src/components/conversation/hooks/index.ts index 8eb9f0df..ad2fa1db 100644 --- a/echo/frontend/src/components/conversation/hooks/index.ts +++ b/echo/frontend/src/components/conversation/hooks/index.ts @@ -665,7 +665,6 @@ export const useConversationsByProjectId = ( const conversations = await directus.request( readItems("conversation", { deep: { - // @ts-expect-error chunks is not typed chunks: { _limit: loadChunks ? 1000 : 1, }, @@ -692,6 +691,7 @@ export const useConversationsByProjectId = ( ], }, ], + // @ts-expect-error TODO filter: { chunks: { ...(loadWhereTranscriptExists && { @@ -803,6 +803,7 @@ export const useConversationById = ({ queryFn: () => directus.request<Conversation>( readItem("conversation", conversationId, { + // @ts-expect-error TODO fields: [ ...CONVERSATION_FIELDS_WITHOUT_PROCESSING_STATUS, { @@ -877,7 +878,6 @@ export const useInfiniteConversationsByProjectId = ( const conversations = await directus.request( readItems("conversation", { deep: { - // @ts-expect-error chunks is not typed chunks: { _limit: loadChunks ? 1000 : 1, }, @@ -907,6 +907,7 @@ export const useInfiniteConversationsByProjectId = ( conversation_artifacts: ["id", "approved_at"], }, ], + // @ts-expect-error TODO filter: { chunks: { ...(loadWhereTranscriptExists && { diff --git a/echo/frontend/src/components/layout/ProjectConversationLayout.tsx b/echo/frontend/src/components/layout/ProjectConversationLayout.tsx index 9f4bfca4..dfd1a059 100644 --- a/echo/frontend/src/components/layout/ProjectConversationLayout.tsx +++ b/echo/frontend/src/components/layout/ProjectConversationLayout.tsx @@ -12,7 +12,6 @@ export const ProjectConversationLayout = () => { conversationId: conversationId ?? "", query: { deep: { - // @ts-expect-error chunks is not typed chunks: { _limit: 25, }, diff --git a/echo/frontend/src/components/library/hooks/index.ts b/echo/frontend/src/components/library/hooks/index.ts index 757b6ca7..981febac 100644 --- a/echo/frontend/src/components/library/hooks/index.ts +++ b/echo/frontend/src/components/library/hooks/index.ts @@ -36,7 +36,6 @@ export const useViewById = (projectId: string, viewId: string) => { "description", "image_url", "view_id", - "image_generation_model", ], }, ], diff --git a/echo/frontend/src/components/participant/ParticipantBody.tsx b/echo/frontend/src/components/participant/ParticipantBody.tsx index fdf1fcd5..d7d60126 100644 --- a/echo/frontend/src/components/participant/ParticipantBody.tsx +++ b/echo/frontend/src/components/participant/ParticipantBody.tsx @@ -198,7 +198,7 @@ export const ParticipantBody = ({ <UserChunkMessage chunk={message.data} /> ) : ( <SpikeMessage - message={message.data} + message={message.data as unknown as ConversationReply} className={ index !== combinedMessages.length - 1 ? "border-b" : "" } diff --git a/echo/frontend/src/components/participant/ParticipantInitiateForm.tsx b/echo/frontend/src/components/participant/ParticipantInitiateForm.tsx index 99d56a0b..4deec801 100644 --- a/echo/frontend/src/components/participant/ParticipantInitiateForm.tsx +++ b/echo/frontend/src/components/participant/ParticipantInitiateForm.tsx @@ -113,10 +113,10 @@ export const ParticipantInitiateForm = ({ project }: { project: Project }) => { position: "top", withinPortal: false, }} - data={project.tags + data={(project.tags as unknown as ProjectTag[]) .filter((tag) => tag && tag.text != null && tag.id != null) .map((tag) => ({ - label: tag.text, + label: tag.text ?? "", value: tag.id, }))} onChange={(value) => { diff --git a/echo/frontend/src/components/participant/hooks/index.ts b/echo/frontend/src/components/participant/hooks/index.ts index 62583560..c6bebf63 100644 --- a/echo/frontend/src/components/participant/hooks/index.ts +++ b/echo/frontend/src/components/participant/hooks/index.ts @@ -98,7 +98,7 @@ export const useUploadConversationChunk = () => { timestamp: new Date().toISOString(), transcript: undefined, updated_at: new Date().toISOString(), - } as ConversationChunk, + } as unknown as ConversationChunk, ] : []; }, @@ -117,7 +117,7 @@ export const useUploadConversationChunk = () => { timestamp: new Date().toISOString(), transcript: undefined, updated_at: new Date().toISOString(), - } as ConversationChunk, + } as unknown as ConversationChunk, ] : []; }, @@ -194,7 +194,7 @@ export const useUploadConversationTextChunk = () => { timestamp: new Date().toISOString(), transcript: undefined, updated_at: new Date().toISOString(), - } as ConversationChunk, + } as unknown as ConversationChunk, ] : []; }, @@ -213,7 +213,7 @@ export const useUploadConversationTextChunk = () => { timestamp: new Date().toISOString(), transcript: undefined, updated_at: new Date().toISOString(), - } as ConversationChunk, + } as unknown as ConversationChunk, ] : []; }, diff --git a/echo/frontend/src/components/participant/verify/ArtefactModal.tsx b/echo/frontend/src/components/participant/verify/ArtefactModal.tsx index 0c11ba37..11142a56 100644 --- a/echo/frontend/src/components/participant/verify/ArtefactModal.tsx +++ b/echo/frontend/src/components/participant/verify/ArtefactModal.tsx @@ -1,16 +1,12 @@ import { Box, LoadingOverlay, Modal, ScrollArea } from "@mantine/core"; +import type { VerificationArtifact } from "@/lib/api"; import { Markdown } from "../../common/Markdown"; type ArtefactModalProps = { opened: boolean; onClose: () => void; onExited?: () => void; - artefact?: { - id: string; - content: string | null | undefined; - conversation_id: string | Conversation | null | undefined; - approved_at: string | null | undefined; - } | null; + artefact?: (ConversationArtifact | VerificationArtifact) | null; isLoading?: boolean; }; diff --git a/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx b/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx index 4757d806..a1085ec3 100644 --- a/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx +++ b/echo/frontend/src/components/participant/verify/VerifiedArtefactsList.tsx @@ -100,8 +100,10 @@ export const VerifiedArtefactsList = ({ isLoading={false} artefact={ selectedArtefactId - ? (artefactList.find((item) => item.id === selectedArtefactId) ?? - null) + ? (artefactList.find( + (item) => + (item as VerificationArtifact).id === selectedArtefactId, + ) ?? null) : null } /> diff --git a/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx b/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx index 534ad60a..b53f6773 100644 --- a/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx +++ b/echo/frontend/src/components/participant/verify/VerifyArtefact.tsx @@ -96,7 +96,10 @@ export const VerifyArtefact = () => { const generationKeyRef = useRef<string | null>(null); const latestChunkTimestamp = useMemo( - () => computeLatestTimestamp(chunksQuery.data as ConversationChunkLike[]), + () => + computeLatestTimestamp( + chunksQuery.data as unknown as ConversationChunkLike[], + ), [chunksQuery.data], ); @@ -263,7 +266,8 @@ export const VerifyArtefact = () => { }); const updatedLatest = computeLatestTimestamp( - (refreshed.data ?? chunksQuery.data) as ConversationChunkLike[], + (refreshed.data ?? + chunksQuery.data) as unknown as ConversationChunkLike[], ); setContextTimestamp(updatedLatest ?? timestampToUse); } finally { diff --git a/echo/frontend/src/components/project/ProjectListItem.tsx b/echo/frontend/src/components/project/ProjectListItem.tsx index d4558149..ad099476 100644 --- a/echo/frontend/src/components/project/ProjectListItem.tsx +++ b/echo/frontend/src/components/project/ProjectListItem.tsx @@ -30,7 +30,7 @@ export const ProjectListItem = ({ </Group> <Text size="sm" c="dimmed"> <Trans> - {project?.conversations_count ?? + {project.conversations_count ?? project?.conversations?.length ?? 0}{" "} Conversations • Edited{" "} diff --git a/echo/frontend/src/components/project/ProjectPortalEditor.tsx b/echo/frontend/src/components/project/ProjectPortalEditor.tsx index dee419d7..bc2ed2de 100644 --- a/echo/frontend/src/components/project/ProjectPortalEditor.tsx +++ b/echo/frontend/src/components/project/ProjectPortalEditor.tsx @@ -269,7 +269,7 @@ const ProjectPortalEditorComponent: React.FC<ProjectPortalEditorProps> = ({ await updateProjectMutation.mutateAsync({ id: project.id, payload: { - ...projectPayload, + ...(projectPayload as Partial<Project>), selected_verification_key_list: serializedTopics, }, }); diff --git a/echo/frontend/src/components/project/ProjectTagsInput.tsx b/echo/frontend/src/components/project/ProjectTagsInput.tsx index 1a7a106f..8c508528 100644 --- a/echo/frontend/src/components/project/ProjectTagsInput.tsx +++ b/echo/frontend/src/components/project/ProjectTagsInput.tsx @@ -116,7 +116,6 @@ export const ProjectTagsInput = (props: { project: Project }) => { projectId: props.project.id, query: { deep: { - // @ts-expect-error tags won't be typed tags: { _sort: "sort", }, @@ -149,7 +148,8 @@ export const ProjectTagsInput = (props: { project: Project }) => { const currentMaxSort = Math.max( 0, - ...(projectQuery.data?.tags?.map((t) => t.sort ?? 0) ?? []), + ...(projectQuery.data?.tags?.map((t) => (t as ProjectTag).sort ?? 0) ?? + []), ); // Wait for all tag creation mutations to complete @@ -175,10 +175,10 @@ export const ProjectTagsInput = (props: { project: Project }) => { if (!active || !over || active.id === over.id) return; const oldIndex = projectQuery.data?.tags?.findIndex( - (tag) => tag.id === active.id, + (tag) => (tag as ProjectTag).id === active.id, ); const newIndex = projectQuery.data?.tags?.findIndex( - (tag) => tag.id === over.id, + (tag) => (tag as ProjectTag).id === over.id, ); if ( @@ -189,7 +189,11 @@ export const ProjectTagsInput = (props: { project: Project }) => { return; // Create new array with updated positions - const newTags = arrayMove(projectQuery.data.tags, oldIndex, newIndex); + const newTags = arrayMove( + projectQuery.data.tags as unknown as ProjectTag[], + oldIndex, + newIndex, + ); // Update sort values for all affected tags newTags.forEach((tag: ProjectTag, index: number) => { @@ -212,10 +216,12 @@ export const ProjectTagsInput = (props: { project: Project }) => { } // Sort tags by sort field before rendering - const sortedTags = [...(projectQuery.data?.tags ?? [])].sort( + const sortedTags = [ + ...((projectQuery.data?.tags as unknown as ProjectTag[]) ?? []), + ].sort( (a, b) => - (a.sort ?? Number.POSITIVE_INFINITY) - - (b.sort ?? Number.POSITIVE_INFINITY), + ((a as ProjectTag).sort ?? Number.POSITIVE_INFINITY) - + ((b as ProjectTag).sort ?? Number.POSITIVE_INFINITY), ); return ( @@ -271,11 +277,14 @@ export const ProjectTagsInput = (props: { project: Project }) => { onDragEnd={handleDragEnd} > <SortableContext - items={sortedTags.map((tag) => tag.id)} + items={sortedTags.map((tag) => (tag as ProjectTag).id)} strategy={horizontalListSortingStrategy} > {sortedTags.map((tag) => ( - <ProjectTagPill key={tag.id} tag={tag} /> + <ProjectTagPill + key={(tag as ProjectTag).id} + tag={tag as ProjectTag} + /> ))} </SortableContext> </DndContext> diff --git a/echo/frontend/src/components/project/hooks/index.ts b/echo/frontend/src/components/project/hooks/index.ts index e19b126a..d4e5a22f 100644 --- a/echo/frontend/src/components/project/hooks/index.ts +++ b/echo/frontend/src/components/project/hooks/index.ts @@ -1,4 +1,5 @@ import { + aggregate, createItem, deleteItem, type Query, @@ -244,7 +245,9 @@ export const useInfiniteProjects = ({ return { nextOffset: response.length === initialLimit ? pageParam + 1 : undefined, - projects: response, + projects: response.map((r) => ({ + ...r, + })), }; }, queryKey: ["projects", query], @@ -255,7 +258,6 @@ export const useProjectById = ({ projectId, query = { deep: { - // @ts-expect-error tags won't be typed tags: { _sort: "sort", }, diff --git a/echo/frontend/src/components/view/View.tsx b/echo/frontend/src/components/view/View.tsx index aa2d1b10..51204198 100644 --- a/echo/frontend/src/components/view/View.tsx +++ b/echo/frontend/src/components/view/View.tsx @@ -117,8 +117,11 @@ export const ViewExpandedCard = ({ <div className="flex w-full snap-x overflow-x-auto pb-2"> {data.aspects?.map((a) => ( - <div className="ml-4 grid snap-start scroll-ml-4" key={a.id}> - <AspectCard data={a} /> + <div + className="ml-4 grid snap-start scroll-ml-4" + key={(a as Aspect).id} + > + <AspectCard data={a as Aspect} /> </div> ))} </div> diff --git a/echo/frontend/src/components/view/hooks/useCopyView.tsx b/echo/frontend/src/components/view/hooks/useCopyView.tsx index 447ab075..51862936 100644 --- a/echo/frontend/src/components/view/hooks/useCopyView.tsx +++ b/echo/frontend/src/components/view/hooks/useCopyView.tsx @@ -3,8 +3,6 @@ import { useParams } from "react-router"; import useCopyToRichText from "@/hooks/useCopyToRichText"; import { directus } from "@/lib/directus"; -const MAX_QUOTES = 25; - export const useCopyView = () => { const { language, projectId } = useParams(); const { copied, copy } = useCopyToRichText(); @@ -23,18 +21,6 @@ export const useCopyView = () => { "short_summary", "long_summary", "image_url", - { - representative_quotes: [ - { - quote_id: [ - { - conversation_id: ["id", "participant_name"], - }, - "text", - ], - }, - ], - }, ], }, ], @@ -56,7 +42,7 @@ export const useCopyView = () => { stringBuilder.push("## Aspects"); - for (const aspect of view.aspects ?? []) { + for (const aspect of view.aspects as Aspect[]) { // http://localhost:5173/en-US/projects/f65cd477-9f4c-4067-80e5-43634bb1dcb4/library/views/3af65db5-53b9-4641-b482-3982bbc6b9be/aspects/0b9d5691-d31b-430f-ab28-c38f86c078f4 stringBuilder.push( `### [${aspect.name}](${window.location.origin}/${language}/projects/${projectId}/library/views/${viewId}/aspects/${aspect.id})`, @@ -75,31 +61,6 @@ export const useCopyView = () => { "The summary for this aspect is not available. Please try again later.", ); } - - let count = 0; - - if ( - aspect.representative_quotes && - (aspect.representative_quotes as unknown as any).length > 0 - ) { - stringBuilder.push(`#### Top Quotes for ${aspect.name}`); - - // @ts-expect-error type of representative_quotes is not known - for (const { quote_id } of aspect.representative_quotes ?? []) { - const conversationUrl = - window.location.origin + - `/${language}/projects/${projectId}/conversation/${quote_id.conversation_id.id}/transcript`; - - stringBuilder.push(`"${quote_id.text}"\n`); - stringBuilder.push( - `from [${quote_id.conversation_id?.participant_name}](${conversationUrl})\n\n`, - ); - - count++; - - if (count > MAX_QUOTES) break; - } - } } copy(stringBuilder.join("\n")); diff --git a/echo/frontend/src/lib/api.ts b/echo/frontend/src/lib/api.ts index 00ed2386..ab2f3732 100644 --- a/echo/frontend/src/lib/api.ts +++ b/echo/frontend/src/lib/api.ts @@ -220,7 +220,6 @@ export const getProjectViews = async (projectId: string) => { "description", "image_url", "view_id", - "image_generation_model", ], }, ], @@ -1017,6 +1016,7 @@ export const getChatHistory = async (chatId: string): Promise<ChatHistory> => { }), ); + // @ts-expect-error TODO return data.map((message) => ({ _original: message, content: message.text ?? "", diff --git a/echo/frontend/src/lib/typesDirectus.d.ts b/echo/frontend/src/lib/typesDirectus.d.ts index f9ca91c1..3c642e15 100644 --- a/echo/frontend/src/lib/typesDirectus.d.ts +++ b/echo/frontend/src/lib/typesDirectus.d.ts @@ -1,6 +1,6 @@ // biome-ignore-all lint: doesnt need interference -interface Schema { +interface CustomDirectusTypes { announcement: Announcement[]; announcement_activity: AnnouncementActivity[]; announcement_translations: AnnouncementTranslation[]; @@ -269,6 +269,7 @@ interface Project { project_reports: string[] | ProjectReport[]; processing_status: string[] | ProcessingStatus[]; custom_verification_topics: string[] | VerificationTopic[]; + conversations_count?: number | null; } interface ProjectAnalysisRun { diff --git a/echo/frontend/src/routes/Debug.tsx b/echo/frontend/src/routes/Debug.tsx index 91fb504f..974bddc3 100644 --- a/echo/frontend/src/routes/Debug.tsx +++ b/echo/frontend/src/routes/Debug.tsx @@ -230,7 +230,6 @@ export default function DebugPage() { const { data: chats } = useProjectChats(currentProjectId ?? "", { filter: { "count(project_chat_messages)": { - // @ts-expect-error - Directus filter type limitation _gt: 0, }, project_id: { diff --git a/echo/frontend/src/routes/project/ProjectsHome.tsx b/echo/frontend/src/routes/project/ProjectsHome.tsx index ef0b411c..059a722f 100644 --- a/echo/frontend/src/routes/project/ProjectsHome.tsx +++ b/echo/frontend/src/routes/project/ProjectsHome.tsx @@ -95,7 +95,7 @@ export const ProjectsHomeRoute = () => { payload: { default_conversation_ask_for_participant_name: true, default_conversation_transcript_prompt: "Dembrane", - default_conversation_tutorial_slug: "none", + default_conversation_tutorial_slug: "None", image_generation_model: "MODEST", }, }); diff --git a/echo/frontend/src/routes/project/chat/ProjectChatRoute.tsx b/echo/frontend/src/routes/project/chat/ProjectChatRoute.tsx index de874a51..2d82dfab 100644 --- a/echo/frontend/src/routes/project/chat/ProjectChatRoute.tsx +++ b/echo/frontend/src/routes/project/chat/ProjectChatRoute.tsx @@ -174,7 +174,7 @@ const useDembraneChat = ({ chatId }: { chatId: string }) => { }; // publish the incomplete result to the backend - addChatMessageMutation.mutate(body); + addChatMessageMutation.mutate(body as Partial<ProjectChatMessage>); }; const customHandleSubmit = async () => { diff --git a/echo/frontend/src/routes/project/conversation/ProjectConversationOverview.tsx b/echo/frontend/src/routes/project/conversation/ProjectConversationOverview.tsx index 3b7b4bd7..ea0f97ae 100644 --- a/echo/frontend/src/routes/project/conversation/ProjectConversationOverview.tsx +++ b/echo/frontend/src/routes/project/conversation/ProjectConversationOverview.tsx @@ -44,7 +44,6 @@ export const ProjectConversationOverviewRoute = () => { projectId: projectId ?? "", query: { deep: { - // @ts-expect-error tags won't be typed tags: { _sort: "sort", }, @@ -171,7 +170,7 @@ export const ProjectConversationOverviewRoute = () => { <ConversationEdit key={conversationQuery.data.id} conversation={conversationQuery.data} - projectTags={projectQuery.data.tags} + projectTags={projectQuery.data.tags as ProjectTag[]} /> </Stack> diff --git a/echo/frontend/src/routes/project/library/ProjectLibraryAspect.tsx b/echo/frontend/src/routes/project/library/ProjectLibraryAspect.tsx index 1501229d..daf562c4 100644 --- a/echo/frontend/src/routes/project/library/ProjectLibraryAspect.tsx +++ b/echo/frontend/src/routes/project/library/ProjectLibraryAspect.tsx @@ -85,8 +85,11 @@ export const ProjectLibraryAspect = () => { <Trans>Insights</Trans> )} - {aspect?.aspect_segment?.map((segment: AspectSegment) => ( - + {aspect?.aspect_segment?.map((segment) => ( + ))} ) : ( diff --git a/echo/frontend/src/routes/project/library/ProjectLibraryView.tsx b/echo/frontend/src/routes/project/library/ProjectLibraryView.tsx index 8043127e..240dfa35 100644 --- a/echo/frontend/src/routes/project/library/ProjectLibraryView.tsx +++ b/echo/frontend/src/routes/project/library/ProjectLibraryView.tsx @@ -65,13 +65,16 @@ export const ProjectLibraryView = () => { }} spacing="md" > - {view.data?.aspects?.map((aspect: Aspect) => ( - - ))} + {view.data?.aspects?.map( + (aspect) => + aspect && ( + + ), + )} diff --git a/echo/frontend/src/routes/project/report/ProjectReportRoute.tsx b/echo/frontend/src/routes/project/report/ProjectReportRoute.tsx index a3dc86ad..1524db2f 100644 --- a/echo/frontend/src/routes/project/report/ProjectReportRoute.tsx +++ b/echo/frontend/src/routes/project/report/ProjectReportRoute.tsx @@ -118,8 +118,9 @@ export const ProjectReportRoute = () => { }; const contributionLink = `${PARTICIPANT_BASE_URL}/${language}/${projectId}/start`; - const getSharingLink = (data: ProjectReport) => - `${PARTICIPANT_BASE_URL}/${language}/${data.project_id}/report`; + + const getSharingLink = (projectId: string) => + `${PARTICIPANT_BASE_URL}/${language}/${projectId}/report`; const { copy, copied } = useCopyToRichText(); @@ -175,8 +176,8 @@ export const ProjectReportRoute = () => { { - const url = getSharingLink(data); - if (navigator.canShare({ url })) { + const url = getSharingLink(data.project_id ?? ""); + if (url && navigator.canShare({ url })) { navigator.share({ url }); } else { window.open(url, "_blank"); @@ -193,7 +194,7 @@ export const ProjectReportRoute = () => { { - copy(getSharingLink(data)); + copy(getSharingLink(data.project_id ?? "")); }} copyTooltip={t`Copy link to share this report`} copied={copied} @@ -206,7 +207,7 @@ export const ProjectReportRoute = () => { { window.open( - `${getSharingLink(data)}?print=true`, + `${getSharingLink(data.project_id ?? "")}?print=true`, "_blank", ); }} diff --git a/echo/scripts/index.js b/echo/scripts/index.js new file mode 100644 index 00000000..75a6288c --- /dev/null +++ b/echo/scripts/index.js @@ -0,0 +1,15 @@ +const directus = require("@directus/sdk"); + +const aggrResponse = await directus.request( + aggregate("project" ,{ + aggregate: { + count: "conversations" + }, + query: { + filter: { + id: { + _in: response.map((r) => (r as Project).id), + } + } + }, + }) \ No newline at end of file diff --git a/echo/scripts/package.json b/echo/scripts/package.json new file mode 100644 index 00000000..3ace1968 --- /dev/null +++ b/echo/scripts/package.json @@ -0,0 +1,16 @@ +{ + "name": "scripts", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "packageManager": "pnpm@10.20.0", + "dependencies": { + "@directus/sdk": "^20.1.1" + } +} diff --git a/echo/scripts/pnpm-lock.yaml b/echo/scripts/pnpm-lock.yaml new file mode 100644 index 00000000..48f498fd --- /dev/null +++ b/echo/scripts/pnpm-lock.yaml @@ -0,0 +1,23 @@ +lockfileVersion: '9.0' + +settings: + autoInstallPeers: true + excludeLinksFromLockfile: false + +importers: + + .: + dependencies: + '@directus/sdk': + specifier: ^20.1.1 + version: 20.1.1 + +packages: + + '@directus/sdk@20.1.1': + resolution: {integrity: sha512-cCWvxDRKOygVHBkKA7l1I4O4niomK8SKR/+Ul1L9NFF1K2zAf3jRqhjc7JavCECOiiAWME/s2wYatNFqFFh0gQ==} + engines: {node: '>=22'} + +snapshots: + + '@directus/sdk@20.1.1': {} diff --git a/echo/server/dembrane/api/api.py b/echo/server/dembrane/api/api.py index 8d2d491a..ba16baca 100644 --- a/echo/server/dembrane/api/api.py +++ b/echo/server/dembrane/api/api.py @@ -5,12 +5,12 @@ ) from dembrane.api.chat import ChatRouter +from dembrane.api.verify import VerifyRouter from dembrane.api.project import ProjectRouter from dembrane.api.resource import ResourceRouter from dembrane.api.stateless import StatelessRouter from dembrane.api.participant import ParticipantRouter from dembrane.api.conversation import ConversationRouter -from dembrane.api.verify import VerifyRouter logger = getLogger("api") diff --git a/echo/server/dembrane/api/verify.py b/echo/server/dembrane/api/verify.py index 6f6a8112..56268950 100644 --- a/echo/server/dembrane/api/verify.py +++ b/echo/server/dembrane/api/verify.py @@ -6,7 +6,7 @@ import litellm from fastapi import APIRouter, HTTPException -from pydantic import Field, BaseModel, validator +from pydantic import Field, BaseModel from dembrane.utils import generate_uuid from dembrane.config import GCP_SA_JSON @@ -137,16 +137,9 @@ class GetVerificationTopicsResponse(BaseModel): class GenerateArtifactsRequest(BaseModel): - topic_list: List[str] = Field(..., min_items=1) + topic_list: List[str] conversation_id: str - @validator("topic_list") - def validate_topic_list(cls, value: List[str]) -> List[str]: - cleaned = [item.strip() for item in value if item and item.strip()] - if not cleaned: - raise ValueError("topic_list must contain at least one topic key") - return cleaned - class ConversationArtifactResponse(BaseModel): id: str @@ -399,7 +392,7 @@ def _sort_key(item: dict) -> tuple[bool, str]: for artifact in artifacts: response.append( ConversationArtifactResponse( - id=artifact.get("id"), + id=artifact.get("id") or "", key=artifact.get("key"), content=artifact.get("content") or "", conversation_id=artifact.get("conversation_id") or conversation_id, @@ -742,7 +735,7 @@ async def generate_verification_artifacts( ) artifact_response = ConversationArtifactResponse( - id=artifact_record.get("id"), + id=artifact_record.get("id") or "", key=artifact_record.get("key"), content=artifact_record.get("content", ""), conversation_id=artifact_record.get("conversation_id", body.conversation_id), @@ -795,8 +788,8 @@ async def update_verification_artifact( chunk.get("path") for chunk in chunks if chunk.get("timestamp") - and isinstance(chunk.get("timestamp"), datetime) - and chunk.get("timestamp") > reference_timestamp + and datetime.fromisoformat(str(chunk.get("timestamp"))) + > (reference_timestamp or datetime.min) and not (chunk.get("transcript") or "").strip() ): raise HTTPException( diff --git a/echo/server/dembrane/chat_utils.py b/echo/server/dembrane/chat_utils.py index f317eb0a..387ce95c 100644 --- a/echo/server/dembrane/chat_utils.py +++ b/echo/server/dembrane/chat_utils.py @@ -5,7 +5,7 @@ from typing import Any, Dict, List, Optional import backoff -from litellm import completion, acompletion +from litellm import acompletion from pydantic import BaseModel from litellm.utils import token_counter from sqlalchemy.orm import Session, selectinload @@ -22,22 +22,12 @@ SMALL_LITELLM_API_KEY, SMALL_LITELLM_API_BASE, DISABLE_CHAT_TITLE_GENERATION, - LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_MODEL, - LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_KEY, - LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_BASE, - LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_VERSION, ) from dembrane.prompts import render_prompt from dembrane.database import ConversationModel, ProjectChatMessageModel from dembrane.directus import directus -from dembrane.api.stateless import GetLightragQueryRequest, get_lightrag_prompt from dembrane.api.conversation import get_conversation_transcript from dembrane.api.dependency_auth import DirectusSession -from dembrane.audio_lightrag.utils.lightrag_utils import ( - run_segment_id_to_conversation_id, - get_project_id_from_conversation_id, - get_conversation_details_for_rag_query, -) MAX_CHAT_CONTEXT_LENGTH = 100000 @@ -166,41 +156,6 @@ async def create_system_messages_for_chat( return [prompt_message, project_message, context_message] -async def get_lightrag_prompt_by_params( - top_k: int, - query: str, - conversation_history: list[dict[str, str]], - echo_conversation_ids: list[str], - echo_project_ids: list[str], - auto_select_bool: bool, - get_transcripts: bool, -) -> str: - payload = GetLightragQueryRequest( - query=query, - conversation_history=conversation_history, - echo_conversation_ids=echo_conversation_ids, - echo_project_ids=echo_project_ids, - auto_select_bool=auto_select_bool, - get_transcripts=get_transcripts, - top_k=top_k, - ) - session = DirectusSession(user_id="none", is_admin=True) # fake session - rag_prompt = await get_lightrag_prompt(payload, session) - return rag_prompt - - -async def get_conversation_references( - rag_prompt: str, project_ids: List[str] -) -> List[Dict[str, Any]]: - try: - references = await get_conversation_details_for_rag_query(rag_prompt, project_ids) - conversation_references = {"references": references} - except Exception as e: - logger.warning(f"No references found. Error: {str(e)}") - conversation_references = {"references": []} - return [conversation_references] - - class CitationSingleSchema(BaseModel): segment_id: int verbatim_reference_text_chunk: str @@ -569,74 +524,3 @@ async def _process_single_batch( except Exception as e: logger.error(f"Batch {batch_num} unexpected error: {str(e)}") return {"selected_ids": [], "batch_num": batch_num, "error": "unknown"} - - -async def get_conversation_citations( - rag_prompt: str, - accumulated_response: str, - project_ids: List[str], - language: str = "en", -) -> List[Dict[str, Any]]: - """ - Extract structured conversation citations from an accumulated assistant response using a text-structuring model, map those citations to conversations, and return only citations that belong to the given project IDs. - - This function: - - Renders a text-structuring prompt using `rag_prompt` and `accumulated_response` and sends it to the configured text-structure LLM. - - Parses the model's JSON response (expected to follow `CitationsSchema`) to obtain citation entries that include `segment_id` and `verbatim_reference_text_chunk`. - - For each citation, resolves `segment_id` to a (conversation_id, conversation_name) pair and derives the citation's project id. - - Filters citations to include only those whose project id is present in `project_ids`. - - Returns a single-item list containing a dict with the key "citations", where each item is a dict with keys: - - "conversation": conversation id (str) - - "reference_text": verbatim reference text chunk (str) - - "conversation_title": conversation name/title (str) - - If the model output cannot be parsed or a segment-to-conversation mapping fails for an individual citation, that citation is skipped; parsing errors do not raise but are logged and result in an empty citations list in the returned structure. - """ - text_structuring_model_message = render_prompt( - "text_structuring_model_message", - language, - {"accumulated_response": accumulated_response, "rag_prompt": rag_prompt}, - ) - text_structuring_model_messages = [ - {"role": "system", "content": text_structuring_model_message}, - ] - text_structuring_model_generation = completion( - model=f"{LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_MODEL}", - messages=text_structuring_model_messages, - api_base=LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_BASE, - api_version=LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_VERSION, - api_key=LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_KEY, - response_format=CitationsSchema, - ) - try: - citations_by_segment_dict = json.loads( - text_structuring_model_generation.choices[0].message.content - ) - logger.debug(f"Citations by segment dict: {citations_by_segment_dict}") - citations_list = citations_by_segment_dict["citations"] - logger.debug(f"Citations list: {citations_list}") - citations_by_conversation_dict: Dict[str, List[Dict[str, Any]]] = {"citations": []} - if len(citations_list) > 0: - for _, citation in enumerate(citations_list): - try: - (conversation_id, conversation_name) = await run_segment_id_to_conversation_id( - citation["segment_id"] - ) - citation_project_id = get_project_id_from_conversation_id(conversation_id) - except Exception as e: - logger.warning( - f"WARNING: Error in citation extraction for segment {citation['segment_id']}. Skipping citations: {str(e)}" - ) - continue - if citation_project_id in project_ids: - current_citation_dict = { - "conversation": conversation_id, - "reference_text": citation["verbatim_reference_text_chunk"], - "conversation_title": conversation_name, - } - citations_by_conversation_dict["citations"].append(current_citation_dict) - else: - logger.warning("WARNING: No citations found") - except Exception as e: - logger.warning(f"WARNING: Error in citation extraction. Skipping citations: {str(e)}") - return [citations_by_conversation_dict] diff --git a/echo/server/dembrane/main.py b/echo/server/dembrane/main.py index 02e27024..85f51df2 100644 --- a/echo/server/dembrane/main.py +++ b/echo/server/dembrane/main.py @@ -27,10 +27,10 @@ ) from dembrane.sentry import init_sentry from dembrane.api.api import api +from dembrane.directus import directus from dembrane.api.verify import seed_default_verification_topics -from dembrane.postgresdb_manager import PostgresDBManager from dembrane.async_helpers import run_in_thread_pool -from dembrane.directus import directus +from dembrane.postgresdb_manager import PostgresDBManager # from lightrag.llm.azure_openai import azure_openai_complete from dembrane.audio_lightrag.utils.litellm_utils import embedding_func, llm_model_func From fc8aeb04fa730626bb6b0bc8d5cc734398599208 Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti Date: Tue, 11 Nov 2025 15:30:57 +0000 Subject: [PATCH 16/23] wip --- echo/server/.env.sample | 128 +- echo/server/AGENTS.md | 6 +- echo/server/dembrane/api/chat.py | 65 +- echo/server/dembrane/api/conversation.py | 51 +- echo/server/dembrane/api/dependency_auth.py | 5 +- echo/server/dembrane/api/participant.py | 6 +- echo/server/dembrane/api/project.py | 4 +- echo/server/dembrane/api/resource.py | 89 -- echo/server/dembrane/api/stateless.py | 279 +--- echo/server/dembrane/api/verify.py | 164 +-- echo/server/dembrane/async_helpers.py | 47 +- .../server/dembrane/asyncio_uvicorn_worker.py | 7 + echo/server/dembrane/audio_utils.py | 7 +- echo/server/dembrane/chat_utils.py | 25 +- echo/server/dembrane/config.py | 632 --------- echo/server/dembrane/conversation_health.py | 462 ------- echo/server/dembrane/conversation_utils.py | 76 - echo/server/dembrane/database.py | 5 +- echo/server/dembrane/directus.py | 11 +- echo/server/dembrane/embedding.py | 18 +- echo/server/dembrane/image_utils.py | 148 -- .../dembrane/lightrag_uvicorn_worker.py | 19 - echo/server/dembrane/llms.py | 89 ++ echo/server/dembrane/main.py | 105 +- echo/server/dembrane/ner.py | 58 - echo/server/dembrane/openai.py | 10 - echo/server/dembrane/postgresdb_manager.py | 76 - echo/server/dembrane/prompts.py | 6 +- echo/server/dembrane/quote_utils.py | 1220 ----------------- echo/server/dembrane/rag_manager.py | 111 -- echo/server/dembrane/reply_utils.py | 28 +- echo/server/dembrane/report_utils.py | 35 +- echo/server/dembrane/runpod.py | 225 --- echo/server/dembrane/s3.py | 15 +- echo/server/dembrane/scheduler.py | 21 +- echo/server/dembrane/seed.py | 187 +++ echo/server/dembrane/sentry.py | 29 +- echo/server/dembrane/service/conversation.py | 4 - echo/server/dembrane/settings.py | 224 +++ echo/server/dembrane/tasks.py | 496 +------ echo/server/dembrane/transcribe.py | 180 +-- .../default_view_recurring_themes.de.jinja | 4 - .../default_view_recurring_themes.en.jinja | 4 - .../default_view_recurring_themes.es.jinja | 4 - .../default_view_recurring_themes.fr.jinja | 4 - .../default_view_recurring_themes.nl.jinja | 4 - echo/server/prod.sh | 4 +- echo/server/pyproject.toml | 5 +- echo/server/scripts/simple_rag_observer.py | 185 --- echo/server/scripts/test_rag_query.py | 74 - .../scripts/test_trigger_directus_etl.py | 42 - echo/server/tests/k6_load_testing/.gitignore | 21 - echo/server/tests/k6_load_testing/env.example | 9 - .../server/tests/k6_load_testing/run-tests.sh | 197 --- .../scripts/k6_runpod_transcribe.js | 125 -- echo/server/tests/test_audio_utils.py | 7 +- echo/server/tests/test_conversation_utils.py | 53 +- echo/server/tests/test_transcribe_assembly.py | 15 +- echo/server/tests/test_transcribe_runpod.py | 148 -- echo/server/uv.lock | 67 +- 60 files changed, 836 insertions(+), 5509 deletions(-) delete mode 100644 echo/server/dembrane/api/resource.py create mode 100644 echo/server/dembrane/asyncio_uvicorn_worker.py delete mode 100644 echo/server/dembrane/config.py delete mode 100644 echo/server/dembrane/conversation_health.py delete mode 100644 echo/server/dembrane/image_utils.py delete mode 100644 echo/server/dembrane/lightrag_uvicorn_worker.py create mode 100644 echo/server/dembrane/llms.py delete mode 100644 echo/server/dembrane/ner.py delete mode 100644 echo/server/dembrane/openai.py delete mode 100644 echo/server/dembrane/postgresdb_manager.py delete mode 100644 echo/server/dembrane/quote_utils.py delete mode 100644 echo/server/dembrane/rag_manager.py delete mode 100644 echo/server/dembrane/runpod.py create mode 100644 echo/server/dembrane/seed.py create mode 100644 echo/server/dembrane/settings.py delete mode 100644 echo/server/json_templates/default_view_recurring_themes.de.jinja delete mode 100644 echo/server/json_templates/default_view_recurring_themes.en.jinja delete mode 100644 echo/server/json_templates/default_view_recurring_themes.es.jinja delete mode 100644 echo/server/json_templates/default_view_recurring_themes.fr.jinja delete mode 100644 echo/server/json_templates/default_view_recurring_themes.nl.jinja delete mode 100755 echo/server/scripts/simple_rag_observer.py delete mode 100644 echo/server/scripts/test_rag_query.py delete mode 100644 echo/server/scripts/test_trigger_directus_etl.py delete mode 100644 echo/server/tests/k6_load_testing/.gitignore delete mode 100644 echo/server/tests/k6_load_testing/env.example delete mode 100755 echo/server/tests/k6_load_testing/run-tests.sh delete mode 100644 echo/server/tests/k6_load_testing/scripts/k6_runpod_transcribe.js delete mode 100644 echo/server/tests/test_transcribe_runpod.py diff --git a/echo/server/.env.sample b/echo/server/.env.sample index c5ad0874..04678eb5 100644 --- a/echo/server/.env.sample +++ b/echo/server/.env.sample @@ -1,94 +1,66 @@ -# API keys / secrets -OPENAI_API_KEY= -ANTHROPIC_API_KEY= - -# URLs -DATABASE_URL=postgresql+psycopg://dembrane:dembrane@postgres:5432/dembrane -REDIS_URL=redis://redis:6379 +# Core API metadata +BUILD_VERSION=dev API_BASE_URL=http://localhost:8000 ADMIN_BASE_URL=http://localhost:5173 PARTICIPANT_BASE_URL=http://localhost:5174 -OPENAI_API_BASE_URL=https://api.openai.com/v1 - -############################################################ -# LiteLLM Model Configurations -############################################################ -# Small Model -SMALL_LITELLM_MODEL="gpt-4o-mini" -SMALL_LITELLM_API_KEY="" -SMALL_LITELLM_API_VERSION="2024-02-01" -SMALL_LITELLM_API_BASE="https://api.openai.com/v1" +# Directus / authentication +DIRECTUS_BASE_URL=http://directus:8055 +DIRECTUS_SECRET= +DIRECTUS_TOKEN= +DIRECTUS_SESSION_COOKIE_NAME=directus_session_token -# Medium Model -MEDIUM_LITELLM_MODEL="gpt-4" -MEDIUM_LITELLM_API_KEY="" -MEDIUM_LITELLM_API_VERSION="2024-02-01" -MEDIUM_LITELLM_API_BASE="https://api.openai.com/v1" +# Persistence / cache +DATABASE_URL=postgresql+psycopg://dembrane:dembrane@postgres:5432/dembrane +REDIS_URL=redis://redis:6379 -# Large Model -LARGE_LITELLM_MODEL="gpt-4-turbo" -LARGE_LITELLM_API_KEY="" -LARGE_LITELLM_API_VERSION="2024-02-01" -LARGE_LITELLM_API_BASE="https://api.openai.com/v1" +# Object storage +STORAGE_S3_BUCKET=dembrane-audio +STORAGE_S3_REGION=us-east-1 +STORAGE_S3_ENDPOINT=https://s3.amazonaws.com +STORAGE_S3_KEY= +STORAGE_S3_SECRET= ############################################################ -# LightRAG LiteLLM Configurations +# Feature toggles ############################################################ - -# General Model -LIGHTRAG_LITELLM_MODEL="gpt-4o-mini" -LIGHTRAG_LITELLM_API_KEY="" -LIGHTRAG_LITELLM_API_VERSION="2024-02-01" -LIGHTRAG_LITELLM_API_BASE="https://api.openai.com/v1" - -# Audio Model -LIGHTRAG_LITELLM_AUDIOMODEL_MODEL=whisper-1 -LIGHTRAG_LITELLM_AUDIOMODEL_API_KEY=sk- -LIGHTRAG_LITELLM_AUDIOMODEL_API_VERSION=2024-02-01 -LIGHTRAG_LITELLM_AUDIOMODEL_API_BASE=https://api.openai.com/v1 - -# Text Structure Model -LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_MODEL=gpt-4o-mini -LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_KEY=sk- -LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_VERSION=2024-02-01 -LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_BASE=https://api.openai.com/v1 - -# Embedding Model -LIGHTRAG_LITELLM_EMBEDDING_MODEL=text-embedding-ada-002 -LIGHTRAG_LITELLM_EMBEDDING_API_KEY=sk- -LIGHTRAG_LITELLM_EMBEDDING_API_VERSION=2024-02-01 -LIGHTRAG_LITELLM_EMBEDDING_API_BASE=https://api.openai.com/v1 - -# Inference Model (Claude) -LIGHTRAG_LITELLM_INFERENCE_MODEL=anthropic/claude-3-5-sonnet-20240620 -LIGHTRAG_LITELLM_INFERENCE_API_KEY= +DEBUG_MODE=0 +DISABLE_CORS=0 +DISABLE_REDACTION=0 +DISABLE_CHAT_TITLE_GENERATION=0 +ENABLE_CHAT_AUTO_SELECT=0 +SERVE_API_DOCS=0 +DISABLE_SENTRY=0 ############################################################ -# Databases +# Transcription providers ############################################################ -NEO4J_URI=bolt://localhost:7687 -NEO4J_USERNAME=neo4j -NEO4J_PASSWORD=admin@dembrane +TRANSCRIPTION_PROVIDER= +ENABLE_ASSEMBLYAI_TRANSCRIPTION=0 +ASSEMBLYAI_API_KEY= +ASSEMBLYAI_BASE_URL=https://api.eu.assemblyai.com +ENABLE_LITELLM_WHISPER_TRANSCRIPTION=0 +# Raw JSON or base64-encoded service account (set when TRANSCRIPTION_PROVIDER requires GCP) +GCP_SA_JSON= ############################################################ -# Whisper Transcription +# LLM configuration (LiteLLM Configuration) ############################################################ -ENABLE_RUNPOD_WHISPER_TRANSCRIPTION=0 -ENABLE_RUNPOD_DIARIZATION=0 - -ENABLE_LITELLM_WHISPER_TRANSCRIPTION=1 -LITELLM_WHISPER_API_KEY= -LITELLM_WHISPER_MODEL=whisper-1 -LITELLM_WHISPER_URL=https://api.openai.com/v1 - -RUNPOD_WHISPER_MAX_REQUEST_THRESHOLD=30 -DISABLE_CHAT_TITLE_GENERATION=1 -############################################################ -# Flags / Debug -############################################################ -DEBUG_MODE=0 -DISABLE_SENTRY=0 -SERVE_API_DOCS=0 -DISABLE_REDACTION=1 \ No newline at end of file +# Multi-modal Pro – high-context reasoning (Gemini Pro, Claude, etc.) +LLM__MULTI_MODAL_PRO__MODEL=gemini-2.0-pro +LLM__MULTI_MODAL_PRO__API_KEY= +LLM__MULTI_MODAL_PRO__API_BASE=https://generativelanguage.googleapis.com/v1beta +LLM__MULTI_MODAL_PRO__API_VERSION= + +# Multi-modal Fast – streaming / whisper (Gemini Flash, etc.) +LLM__MULTI_MODAL_FAST__MODEL=gemini-2.0-flash +LLM__MULTI_MODAL_FAST__API_KEY= +LLM__MULTI_MODAL_FAST__API_BASE=https://generativelanguage.googleapis.com/v1beta +LLM__MULTI_MODAL_FAST__API_VERSION= + +# Text Fast – lightweight text-only model (GPT-4o mini, etc.) +LLM__TEXT_FAST__MODEL=gpt-4o-mini +LLM__TEXT_FAST__API_KEY= +LLM__TEXT_FAST__API_BASE=https://api.openai.com/v1 +LLM__TEXT_FAST__API_VERSION=2024-02-01 diff --git a/echo/server/AGENTS.md b/echo/server/AGENTS.md index 3cb782e2..064f0704 100644 --- a/echo/server/AGENTS.md +++ b/echo/server/AGENTS.md @@ -23,17 +23,16 @@ Last updated: 2025-11-07T08:32:55Z - For API handlers, favor Directus queries over raw SQLAlchemy sessions when reading project/conversation data to keep behavior consistent with the admin console. # Change Hotspots (last 90 days) -- High-churn (watch for conflicts): `echo/server/dembrane/tasks.py`, `echo/server/dembrane/config.py`, `echo/server/dembrane/transcribe.py`, `echo/server/pyproject.toml` +- High-churn (watch for conflicts): `echo/server/dembrane/tasks.py`, `echo/server/dembrane/transcribe.py`, `echo/server/pyproject.toml` - Slow movers (risk of stale assumptions): CI workflow YAMLs under `.github/workflows/`, `contributors.yml`, and `echo-user-docs` backups. # TODO / FIXME / HACK Inventory -- `dembrane/config.py:5` – Refactor messy config handling; consider YAML-based management. +- `dembrane/settings.py` – Centralized env loading; keep structure consistent as new services integrate. - `dembrane/embedding.py:8` – Replace placeholder embeddings with Dembrane implementation. - `dembrane/sentry.py:47` – Complete Sentry integration per docs. - `dembrane/tasks.py:72` – Remove SSL bypass once proper certificate/VPC isolation exists. - `dembrane/tasks.py:342` – Fetch contextual transcripts for previous segments. - `dembrane/tasks.py:525` – Respect `use_pii_redaction` flag when available. -- `dembrane/tasks.py:724` – Handle RunPod error class explicitly. - `dembrane/quote_utils.py:118/272/289` – Link quotes to chunks; fix sampling algorithm; adjust context limit math. - `dembrane/service/conversation.py:101` – Validate `project_tag_id_list`. - `dembrane/transcribe.py:179` – Replace polling with webhook approach. @@ -45,3 +44,4 @@ Last updated: 2025-11-07T08:32:55Z - CPU Dramatiq worker deliberately single-threaded to dodge LightRAG locking issues—respect `THREADS=1` guidance in prod. - Watching directories (`--watch`, `--watch-use-polling`) adds overhead; keep file changes minimal when workers run locally. - S3 audio paths used in verification/transcription flows should be loaded via the shared file service (`_get_audio_file_object`) so Gemini always receives fresh bytes—signed URLs may expire mid-request. +- When a Dramatiq actor needs to invoke an async FastAPI handler (e.g., `dembrane.api.conversation.summarize_conversation`), run the coroutine via `run_async_in_new_loop` from `dembrane.async_helpers` instead of calling it directly or with `asyncio.run` to avoid clashing with nested event loops. diff --git a/echo/server/dembrane/api/chat.py b/echo/server/dembrane/api/chat.py index 8f246b83..7b9eba59 100644 --- a/echo/server/dembrane/api/chat.py +++ b/echo/server/dembrane/api/chat.py @@ -1,29 +1,18 @@ # TODO: # - Change db calls to directus calls -# - Change anthropic api to litellm - import json import logging from typing import Any, Dict, List, Literal, Optional, AsyncGenerator import litellm from fastapi import Query, APIRouter, HTTPException -from litellm import token_counter # type: ignore from pydantic import BaseModel from sqlalchemy.orm import selectinload from fastapi.responses import StreamingResponse +from dembrane.llms import MODELS, count_tokens, get_completion_kwargs from dembrane.utils import generate_uuid, get_utc_timestamp -from dembrane.config import ( - SMALL_LITELLM_MODEL, - SMALL_LITELLM_API_KEY, - SMALL_LITELLM_API_BASE, - ENABLE_CHAT_AUTO_SELECT, - LIGHTRAG_LITELLM_INFERENCE_MODEL, - LIGHTRAG_LITELLM_INFERENCE_API_KEY, - LIGHTRAG_LITELLM_INFERENCE_API_BASE, - LIGHTRAG_LITELLM_INFERENCE_API_VERSION, -) +from dembrane.settings import get_settings from dembrane.prompts import render_prompt from dembrane.database import ( DatabaseSession, @@ -40,15 +29,16 @@ auto_select_conversations, create_system_messages_for_chat, ) -from dembrane.quote_utils import count_tokens from dembrane.api.conversation import get_conversation_token_count from dembrane.api.dependency_auth import DirectusSession, DependencyDirectusSession -from dembrane.audio_lightrag.utils.lightrag_utils import get_project_id ChatRouter = APIRouter(tags=["chat"]) logger = logging.getLogger("dembrane.chat") +settings = get_settings() +ENABLE_CHAT_AUTO_SELECT = settings.enable_chat_auto_select + async def is_followup_question( conversation_history: List[Dict[str, str]], language: str = "en" @@ -84,12 +74,10 @@ async def is_followup_question( try: response = await litellm.acompletion( - model=SMALL_LITELLM_MODEL, - api_key=SMALL_LITELLM_API_KEY, - api_base=SMALL_LITELLM_API_BASE, messages=[{"role": "user", "content": prompt}], temperature=0, # Deterministic timeout=60, # 1 minute timeout for quick decision + **get_completion_kwargs(MODELS.TEXT_FAST), ) result_text = response.choices[0].message.content.strip() @@ -185,7 +173,10 @@ async def get_chat_context( if message.message_from in ["user", "assistant"]: # if tokens_count is not set, set it if message.tokens_count is None: - message.tokens_count = count_tokens(message.text) + message.tokens_count = count_tokens( + MODELS.TEXT_FAST, + [{"role": message.message_from, "content": message.text}], + ) db.commit() if message.message_from == "user": @@ -495,7 +486,15 @@ async def post_chat( except Exception as e: logger.error(f"Error updating template key: {str(e)}") - project_id = get_project_id(chat.id) # TODO: Write directus call here + project_id = directus.get_items( + "project_chat", + { + "query": { + "filter": {"id": {"_eq": chat.id}}, + "fields": ["project_id"], + }, + }, + )[0]["project_id"] messages = get_project_chat_history(chat_id, db) @@ -556,8 +555,9 @@ async def post_chat( ] + conversation_history # Check context length - prompt_len = token_counter( - model=LIGHTRAG_LITELLM_INFERENCE_MODEL, messages=formatted_messages + prompt_len = count_tokens( + MODELS.MULTI_MODAL_PRO, + formatted_messages, ) if prompt_len > MAX_CHAT_CONTEXT_LENGTH: @@ -615,8 +615,9 @@ async def post_chat( ] + conversation_history # Check if adding this conversation would exceed 80% threshold - prompt_len = token_counter( - model=LIGHTRAG_LITELLM_INFERENCE_MODEL, messages=temp_formatted_messages + prompt_len = count_tokens( + MODELS.MULTI_MODAL_PRO, + temp_formatted_messages, ) if prompt_len > MAX_CONTEXT_THRESHOLD: @@ -664,8 +665,9 @@ async def post_chat( ] + conversation_history # Check context length - prompt_len = token_counter( - model=LIGHTRAG_LITELLM_INFERENCE_MODEL, messages=formatted_messages + prompt_len = count_tokens( + MODELS.MULTI_MODAL_PRO, + formatted_messages, ) if prompt_len > MAX_CHAT_CONTEXT_LENGTH: @@ -695,15 +697,11 @@ async def stream_response_async_autoselect() -> AsyncGenerator[str, None]: accumulated_response = "" try: response = await litellm.acompletion( - model=LIGHTRAG_LITELLM_INFERENCE_MODEL, - api_key=LIGHTRAG_LITELLM_INFERENCE_API_KEY, - api_version=LIGHTRAG_LITELLM_INFERENCE_API_VERSION, - api_base=LIGHTRAG_LITELLM_INFERENCE_API_BASE, messages=formatted_messages, stream=True, timeout=300, # 5 minute timeout for response stream_timeout=180, # 3 minute timeout for streaming - # mock_response="It's simple to use and easy to get started", + **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), ) async for chunk in response: if chunk.choices[0].delta.content: @@ -787,14 +785,11 @@ async def stream_response_async_manualselect() -> AsyncGenerator[str, None]: logger.debug(f"messages_to_send: {messages_to_send}") response = await litellm.acompletion( - model=LIGHTRAG_LITELLM_INFERENCE_MODEL, - api_key=LIGHTRAG_LITELLM_INFERENCE_API_KEY, - api_version=LIGHTRAG_LITELLM_INFERENCE_API_VERSION, - api_base=LIGHTRAG_LITELLM_INFERENCE_API_BASE, messages=messages_to_send, stream=True, timeout=300, # 5 minute timeout for response stream_timeout=180, # 3 minute timeout for streaming + **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), ) async for chunk in response: if chunk.choices[0].delta.content: diff --git a/echo/server/dembrane/api/conversation.py b/echo/server/dembrane/api/conversation.py index dac6961b..aa378bfc 100644 --- a/echo/server/dembrane/api/conversation.py +++ b/echo/server/dembrane/api/conversation.py @@ -5,7 +5,6 @@ from fastapi import Request, APIRouter from pydantic import BaseModel -from litellm.utils import token_counter from sqlalchemy.orm import noload, selectinload from fastapi.responses import RedirectResponse, StreamingResponse from fastapi.exceptions import HTTPException @@ -13,7 +12,6 @@ from dembrane.s3 import get_signed_url from dembrane.utils import CacheWithExpiration, generate_uuid, get_utc_timestamp -from dembrane.config import LIGHTRAG_LITELLM_INFERENCE_MODEL from dembrane.database import ( ConversationModel, ConversationChunkModel, @@ -26,18 +24,13 @@ merge_multiple_audio_files_and_save_to_s3, ) from dembrane.reply_utils import generate_reply_for_conversation -from dembrane.api.stateless import ( - DeleteConversationRequest, - generate_summary, - delete_conversation as delete_conversation_from_lightrag, -) +from dembrane.api.stateless import generate_summary from dembrane.async_helpers import run_in_thread_pool from dembrane.api.exceptions import ( NoContentFoundException, ConversationNotFoundException, ) from dembrane.api.dependency_auth import DependencyDirectusSession -from dembrane.conversation_health import get_health_status logger = getLogger("api.conversation") ConversationRouter = APIRouter(tags=["conversation"]) @@ -103,7 +96,6 @@ async def generate_health_events( ) -> AsyncGenerator[str, None]: ping_count = 0 last_health_data = None - event_count = 0 try: while True: @@ -117,27 +109,10 @@ async def generate_health_events( # Send ping every 45 seconds yield f"event: ping\ndata: {ping_count}\n\n" - health_data = get_health_status( - conversation_ids=conversation_ids, project_ids=project_ids - ) - # Extract only conversation_issue for the single conversation_id if only one is passed if len(conversation_ids) == 1: - conversation_id = conversation_ids[0] conversation_issue = None - # Find the conversation_issue in the nested structure - if health_data and "projects" in health_data: - for project_data in health_data["projects"].values(): - if ( - "conversations" in project_data - and conversation_id in project_data["conversations"] - ): - conversation_issue = project_data["conversations"][conversation_id].get( - "conversation_issue", "UNKNOWN" - ) - break - # Create simplified response with just the conversation_issue simplified_data = {"conversation_issue": conversation_issue} @@ -146,13 +121,10 @@ async def generate_health_events( yield f"event: health_update\ndata: {json.dumps(simplified_data)}\n\n" last_health_data = simplified_data else: - # Send full health data if multiple IDs - if health_data != last_health_data: - yield f"event: health_update\ndata: {json.dumps(health_data)}\n\n" - last_health_data = health_data - - event_count += 1 - logger.debug(f"Sent health event #{event_count} to {client_info}") + logger.warning( + "Multiple conversation IDs passed to health stream, only one is supported" + ) + raise HTTPException(status_code=400, detail="Only one conversation ID is supported") # Log every 10th ping if ping_count % 10 == 0: @@ -458,10 +430,9 @@ async def get_conversation_token_count( # If not in cache, calculate the token count transcript = await get_conversation_transcript(conversation_id, auth) - token_count = await run_in_thread_pool( - token_counter, - model=LIGHTRAG_LITELLM_INFERENCE_MODEL, - messages=[{"role": "user", "content": transcript}], + token_count = count_tokens( + MODELS.MULTI_MODAL_PRO, + [{"role": "user", "content": transcript}], ) # Store the result in the cache @@ -750,12 +721,6 @@ async def delete_conversation( """ await raise_if_conversation_not_found_or_not_authorized(conversation_id, auth) try: - # Run RAG deletion (documents, transcripts, segments) - await delete_conversation_from_lightrag( - DeleteConversationRequest(conversation_ids=[conversation_id]), - session=auth, - ) - # Run Directus deletion await run_in_thread_pool(directus.delete_item, "conversation", conversation_id) return {"status": "success", "message": "Conversation deleted successfully"} except Exception as e: diff --git a/echo/server/dembrane/api/dependency_auth.py b/echo/server/dembrane/api/dependency_auth.py index 2f38fb31..5b1cf18d 100644 --- a/echo/server/dembrane/api/dependency_auth.py +++ b/echo/server/dembrane/api/dependency_auth.py @@ -4,10 +4,13 @@ from jose import JWTError, jwt from fastapi import Depends, Request -from dembrane.config import DIRECTUS_SECRET, DIRECTUS_SESSION_COOKIE_NAME +from dembrane.settings import get_settings from dembrane.api.exceptions import SessionInvalidException logger = getLogger("api.session") +settings = get_settings() +DIRECTUS_SECRET = settings.directus_secret +DIRECTUS_SESSION_COOKIE_NAME = settings.directus_session_cookie_name class DirectusSession: diff --git a/echo/server/dembrane/api/participant.py b/echo/server/dembrane/api/participant.py index 2d86b961..2183d6c5 100644 --- a/echo/server/dembrane/api/participant.py +++ b/echo/server/dembrane/api/participant.py @@ -9,7 +9,7 @@ from dembrane.s3 import get_sanitized_s3_key, get_file_size_bytes_from_s3 from dembrane.utils import generate_uuid -from dembrane.config import STORAGE_S3_BUCKET, STORAGE_S3_ENDPOINT +from dembrane.settings import get_settings from dembrane.service import project_service, conversation_service from dembrane.directus import directus from dembrane.async_helpers import run_in_thread_pool @@ -24,6 +24,10 @@ ParticipantRouter = APIRouter(tags=["participant"]) +settings = get_settings() +STORAGE_S3_BUCKET = settings.storage_s3_bucket +STORAGE_S3_ENDPOINT = settings.storage_s3_endpoint + class PublicProjectTagSchema(BaseModel): id: str diff --git a/echo/server/dembrane/api/project.py b/echo/server/dembrane/api/project.py index cff598fa..47f83b37 100644 --- a/echo/server/dembrane/api/project.py +++ b/echo/server/dembrane/api/project.py @@ -12,7 +12,7 @@ from dembrane.tasks import task_create_view, task_create_project_library from dembrane.utils import generate_uuid, get_safe_filename -from dembrane.config import BASE_DIR +from dembrane.settings import get_settings from dembrane.schemas import ( ProjectSchema, ) @@ -35,6 +35,8 @@ ProjectRouter = APIRouter(tags=["project"]) PROJECT_ALLOWED_LANGUAGES = ["en", "nl", "de", "fr", "es"] +settings = get_settings() +BASE_DIR = settings.base_dir class CreateProjectRequestSchema(BaseModel): diff --git a/echo/server/dembrane/api/resource.py b/echo/server/dembrane/api/resource.py deleted file mode 100644 index ab1d5524..00000000 --- a/echo/server/dembrane/api/resource.py +++ /dev/null @@ -1,89 +0,0 @@ -import os -from typing import Optional -from logging import getLogger - -from fastapi import APIRouter -from pydantic import BaseModel -from fastapi.responses import StreamingResponse - -from dembrane.utils import iter_file_content -from dembrane.schemas import ResourceSchema -from dembrane.database import ResourceModel, DependencyInjectDatabase -from dembrane.api.exceptions import ( - ResourceNotFoundException, - ResourceContentNotFoundException, - ResourceInvalidFileFormatException, -) - -logger = getLogger("api.resource") - -# this is included in the ProjectRouter -ResourceRouter = APIRouter(tags=["resource"]) - - -@ResourceRouter.get("/{resource_id}", response_model=ResourceSchema) -async def get_resource(resource_id: str, db: DependencyInjectDatabase) -> ResourceModel: - resource = ( - db.query(ResourceModel) - .filter( - ResourceModel.id == resource_id, - ) - .first() - ) - if not resource: - raise ResourceNotFoundException - return resource - - -@ResourceRouter.get("/{resource_id}/content", response_model=ResourceSchema) -async def get_resource_content(resource_id: str, db: DependencyInjectDatabase) -> StreamingResponse: - resource = ( - db.query(ResourceModel) - .filter( - ResourceModel.id == resource_id, - ) - .first() - ) - - if not resource: - raise ResourceNotFoundException - - if not os.path.exists(resource.path): - logger.error(f"Resource file not found: {resource.path} but it exists in the database") - raise ResourceContentNotFoundException - - if resource.type != "PDF": - logger.error(f"Invalid file format: {resource.type}") - raise ResourceInvalidFileFormatException - - return StreamingResponse(iter_file_content(resource.path), media_type="application/pdf") - - -class PutResourceRequestBodySchema(BaseModel): - title: Optional[str] - description: Optional[str] - context: Optional[str] - - -@ResourceRouter.put("/{resource_id}", response_model=ResourceSchema) -async def update_resource( - resource_id: str, - body: PutResourceRequestBodySchema, - db: DependencyInjectDatabase, -) -> ResourceModel: - resource = await get_resource(resource_id, db) - - resource.title = body.title or resource.title - resource.description = body.description or resource.description - resource.context = body.context or resource.context - - db.commit() - return resource - - -@ResourceRouter.delete("/{resource_id}", response_model=ResourceSchema) -async def delete_resource(resource_id: str, db: DependencyInjectDatabase) -> ResourceModel: - resource = await get_resource(resource_id, db) - db.delete(resource) - db.commit() - return resource diff --git a/echo/server/dembrane/api/stateless.py b/echo/server/dembrane/api/stateless.py index 22fb12e8..0604ed8f 100644 --- a/echo/server/dembrane/api/stateless.py +++ b/echo/server/dembrane/api/stateless.py @@ -1,32 +1,11 @@ from logging import getLogger import nest_asyncio -from fastapi import APIRouter, HTTPException +from fastapi import APIRouter from litellm import completion -from pydantic import BaseModel -from lightrag.lightrag import QueryParam -from lightrag.kg.shared_storage import initialize_pipeline_status -from dembrane.config import ( - SMALL_LITELLM_MODEL, - SMALL_LITELLM_API_KEY, - SMALL_LITELLM_API_BASE, - SMALL_LITELLM_API_VERSION, -) +from dembrane.llms import MODELS, get_completion_kwargs from dembrane.prompts import render_prompt -from dembrane.rag_manager import RAGManager, get_rag -from dembrane.postgresdb_manager import PostgresDBManager -from dembrane.api.dependency_auth import DependencyDirectusSession -from dembrane.audio_lightrag.utils.lightrag_utils import ( - is_valid_uuid, - upsert_transcript, - fetch_query_transcript, - delete_transcript_by_doc_id, - delete_segment_from_directus, - get_segment_from_project_ids, - get_segment_from_conversation_ids, - get_segment_from_conversation_chunk_ids, -) # LightRAG requires nest_asyncio for nested event loops nest_asyncio.apply() @@ -56,16 +35,13 @@ def generate_summary(transcript: str, language: str | None) -> str: # Call the model over the provided API endpoint response = completion( - model=SMALL_LITELLM_MODEL, messages=[ { "content": prompt, "role": "user", } ], - api_key=SMALL_LITELLM_API_KEY, - api_base=SMALL_LITELLM_API_BASE, - api_version=SMALL_LITELLM_API_VERSION, + **get_completion_kwargs(MODELS.TEXT_FAST), ) response_content = response["choices"][0]["message"]["content"] @@ -84,255 +60,8 @@ def validate_segment_id(echo_segment_ids: list[str] | None) -> bool: return False -class InsertRequest(BaseModel): - content: str | list[str] - transcripts: list[str] - echo_segment_id: str - - -class InsertResponse(BaseModel): - status: str - result: dict - - -@StatelessRouter.post("/rag/insert") -async def insert_item( - payload: InsertRequest, - session: DependencyDirectusSession, # Needed for fake auth -) -> InsertResponse: - session = session - if not RAGManager.is_initialized(): - await RAGManager.initialize() - rag = get_rag() - await initialize_pipeline_status() - if rag is None: - raise HTTPException(status_code=500, detail="RAG object not initialized") - try: - postgres_db = await PostgresDBManager.get_initialized_db() - except Exception as e: - logger.exception("Failed to get initialized PostgreSQLDB for insert") - raise HTTPException(status_code=500, detail="Database connection failed") from e - try: - if isinstance(payload.echo_segment_id, str): - echo_segment_ids = [payload.echo_segment_id] - else: - raise HTTPException(status_code=400, detail="Invalid segment ID") - - if validate_segment_id(echo_segment_ids): - await rag.ainsert( - payload.content, - ids=echo_segment_ids, - file_paths=["SEGMENT_ID_" + x for x in echo_segment_ids], - ) - for transcript in payload.transcripts: - await upsert_transcript( - postgres_db, document_id=str(payload.echo_segment_id), content=transcript - ) - result = {"status": "inserted", "content": payload.content} - return InsertResponse(status="success", result=result) - else: - raise HTTPException(status_code=400, detail="Invalid segment ID") - except Exception as e: - logger.exception("Insert operation failed") - raise HTTPException(status_code=500, detail=str(e)) from e - - -class SimpleQueryRequest(BaseModel): - query: str - echo_segment_ids: list[str] | None = None - get_transcripts: bool = False - - -class SimpleQueryResponse(BaseModel): - status: str - result: str - transcripts: list[str] - - -@StatelessRouter.post("/rag/simple_query") -async def query_item( - payload: SimpleQueryRequest, - session: DependencyDirectusSession, # Needed for fake auth -) -> SimpleQueryResponse: - session = session - if not RAGManager.is_initialized(): - await RAGManager.initialize() - rag = get_rag() - await initialize_pipeline_status() - if rag is None: - raise HTTPException(status_code=500, detail="RAG object not initialized") - try: - postgres_db = await PostgresDBManager.get_initialized_db() - except Exception as e: - logger.exception("Failed to get initialized PostgreSQLDB for query") - raise HTTPException(status_code=500, detail="Database connection failed") from e - try: - if isinstance(payload.echo_segment_ids, list): - echo_segment_ids = payload.echo_segment_ids - else: - echo_segment_ids = None - - if validate_segment_id(echo_segment_ids): - result = rag.query( - payload.query, - param=QueryParam(mode="mix", ids=echo_segment_ids if echo_segment_ids else None), - ) - if payload.get_transcripts: - transcripts = await fetch_query_transcript( - postgres_db, str(result), ids=echo_segment_ids if echo_segment_ids else None - ) - transcript_contents = ( - [t for t in transcripts] if isinstance(transcripts, list) else [] - ) - # transcript_contents = ( - # [t["content"] for t in transcripts] - # if isinstance(transcripts, list) - # else [transcripts["content"]] - # ) - else: - transcript_contents = [] - return SimpleQueryResponse( - status="success", result=result, transcripts=transcript_contents - ) - else: - raise HTTPException(status_code=400, detail="Invalid segment ID") - except Exception as e: - logger.exception("Query operation failed") - raise HTTPException(status_code=500, detail=str(e)) from e - - -class GetLightragQueryRequest(BaseModel): - query: str - conversation_history: list[dict[str, str]] | None = None - echo_segment_ids: list[str] | None = None - echo_conversation_ids: list[str] | None = None - echo_project_ids: list[str] | None = None - auto_select_bool: bool = False - get_transcripts: bool = False - top_k: int = 60 - - -@StatelessRouter.post("/rag/get_lightrag_prompt") -async def get_lightrag_prompt( - payload: GetLightragQueryRequest, - session: DependencyDirectusSession, # Needed for fake auth -) -> str: - session = session - # Validate payload - if not payload.auto_select_bool: - if ( - payload.echo_segment_ids is None - and payload.echo_conversation_ids is None - and payload.echo_project_ids is None - ): - raise HTTPException( - status_code=400, - detail="At least one of echo_segment_ids, echo_conversation_ids, or echo_project_ids must be provided if auto_select_bool is False", - ) - # Initialize database - try: - postgres_db = await PostgresDBManager.get_initialized_db() - except Exception as e: - logger.exception("Failed to get initialized PostgreSQLDB for query") - raise HTTPException(status_code=500, detail="Database connection failed") from e - - # Get echo segment ids - echo_segment_ids: list[int] = [] - if payload.echo_segment_ids: - echo_segment_ids += [int(id) for id in payload.echo_segment_ids] - if payload.echo_conversation_ids: - conversation_segments = await get_segment_from_conversation_chunk_ids( - postgres_db, payload.echo_conversation_ids - ) - echo_segment_ids += conversation_segments - if payload.echo_project_ids: - project_segments = await get_segment_from_project_ids(postgres_db, payload.echo_project_ids) - echo_segment_ids += project_segments - # if payload.auto_select_bool: - # all_segments = await get_all_segments(postgres_db, payload.echo_conversation_ids) # type: ignore - # echo_segment_ids += all_segments - - # Initialize RAG - if not RAGManager.is_initialized(): - await RAGManager.initialize() - rag = get_rag() - await initialize_pipeline_status() - if rag is None: - raise HTTPException(status_code=500, detail="RAG object not initialized") - - # Process segment ids - try: - if validate_segment_id([str(id) for id in echo_segment_ids]): - param = QueryParam( - mode="mix", - conversation_history=payload.conversation_history, - history_turns=10, - only_need_prompt=True, - ids=[str(id) for id in echo_segment_ids], - top_k=payload.top_k, - ) - - try: - response = await rag.aquery(payload.query, param=param) - logger.debug(f"Response: {response}") - return response - except Exception as rag_error: - logger.exception(f"RAG query failed: {rag_error}") - raise HTTPException( - status_code=503, - detail="RAG query temporarily unavailable. Please try manual conversation selection or contact support.", - ) from rag_error - - else: - raise HTTPException(status_code=400, detail="Invalid segment ID") - except Exception as e: - logger.exception("Query streaming operation failed") - raise HTTPException(status_code=500, detail=str(e)) from e - - -class DeleteConversationRequest(BaseModel): - conversation_ids: list[str] - - -@StatelessRouter.post("/rag/delete_conversation") -async def delete_conversation( - payload: DeleteConversationRequest, - session: DependencyDirectusSession, # Needed for fake auth -) -> None: - session = session - - conversation_ids = payload.conversation_ids - for id in conversation_ids: - if not is_valid_uuid(id): - raise HTTPException(status_code=400, detail="Invalid conversation ID") - # Initialize RAG - if not RAGManager.is_initialized(): - await RAGManager.initialize() - rag = get_rag() - await initialize_pipeline_status() - postgres_db = await PostgresDBManager.get_initialized_db() - try: - lightrag_doc_ids = await get_segment_from_conversation_ids(postgres_db, conversation_ids) - except Exception as e: - logger.exception("Failed to get segment from conversation ids. Check PGSQ") - raise HTTPException(status_code=500, detail=str(e)) from e - - for doc_id in lightrag_doc_ids: - await rag.adelete_by_doc_id(str(doc_id)) - await delete_transcript_by_doc_id(postgres_db, str(doc_id)) - delete_segment_from_directus(str(doc_id)) - logger.info(f"Deleted {len(lightrag_doc_ids)} document(s) from RAG") - - @StatelessRouter.post("/webhook/transcribe") async def transcribe_webhook(payload: dict) -> None: logger = getLogger("stateless.webhook.transcribe") logger.debug(f"Transcribe webhook received: {payload}") - try: - from dembrane.runpod import load_runpod_transcription_response - - load_runpod_transcription_response(payload) - - except Exception as e: - logger.exception("Failed to update conversation chunk") - raise HTTPException(status_code=500, detail=str(e)) from e + logger.info("Transcription webhook received but integration is disabled; ignoring payload.") diff --git a/echo/server/dembrane/api/verify.py b/echo/server/dembrane/api/verify.py index 56268950..821995a6 100644 --- a/echo/server/dembrane/api/verify.py +++ b/echo/server/dembrane/api/verify.py @@ -9,115 +9,21 @@ from pydantic import Field, BaseModel from dembrane.utils import generate_uuid -from dembrane.config import GCP_SA_JSON +from dembrane.settings import get_settings from dembrane.prompts import render_prompt from dembrane.directus import directus from dembrane.transcribe import _get_audio_file_object from dembrane.async_helpers import run_in_thread_pool from dembrane.api.exceptions import ProjectNotFoundException, ConversationNotFoundException from dembrane.api.dependency_auth import DependencyDirectusSession +from dembrane.llms import MODELS, get_completion_kwargs logger = logging.getLogger("api.verify") -VerifyRouter = APIRouter(tags=["verify"]) - -DEFAULT_LANG = "en-US" - - -class VerificationTopicSeed(BaseModel): - key: str - prompt: str - icon: str - label: str - sort: int - - -DEFAULT_VERIFICATION_TOPICS: List[VerificationTopicSeed] = [ - VerificationTopicSeed( - key="agreements", - icon=":white_check_mark:", - label="What we actually agreed on", - sort=1, - prompt=( - "Extract the concrete agreements and shared understandings from this conversation. " - "Focus on points where multiple participants explicitly or implicitly aligned. " - "Include both major decisions and small points of consensus. Present these as clear, " - "unambiguous statements that all participants would recognize as accurate. Distinguish " - "between firm agreements and tentative consensus. If participants used different words " - "to express the same idea, synthesize into shared language. Format as a living document " - "of mutual understanding. Output character should be diplomatic but precise, like meeting " - "minutes with soul." - ), - ), - VerificationTopicSeed( - key="gems", - icon=":mag:", - label="Hidden gems", - sort=2, - prompt=( - "Identify the valuable insights that emerged unexpectedly or were mentioned briefly but " - "contain significant potential. Look for: throwaway comments that solve problems, questions " - "that reframe the entire discussion, metaphors that clarify complex ideas, connections between " - "seemingly unrelated points, and wisdom hiding in personal anecdotes. Present these as discoveries " - "worth preserving, explaining why each gem matters. These are the insights people might forget but " - "shouldn't. Output character should be excited and precise." - ), - ), - VerificationTopicSeed( - key="truths", - icon=":eyes:", - label="Painful truths", - sort=3, - prompt=( - "Surface the uncomfortable realities acknowledged in this conversation - the elephants in the room that " - "got named, the difficult facts accepted, the challenging feedback given or received. Include systemic " - "problems identified, personal blind spots revealed, and market realities confronted. Present these with " - "compassion but without sugar-coating. Frame them as shared recognitions that took courage to voice. " - "These truths are painful but necessary for genuine progress. Output character should be gentle but " - "unflinching." - ), - ), - VerificationTopicSeed( - key="moments", - icon=":rocket:", - label="Breakthrough moments", - sort=4, - prompt=( - "Capture the moments when thinking shifted, new possibilities emerged, or collective understanding jumped " - "to a new level. Identify: sudden realizations, creative solutions, perspective shifts, moments when " - "complexity became simple, and ideas that energized the group. Show both the breakthrough itself and what " - "made it possible. These are the moments when the conversation transcended its starting point. Output " - "character should be energetic and forward-looking." - ), - ), - VerificationTopicSeed( - key="actions", - icon=":arrow_upper_right:", - label="What we think should happen", - sort=5, - prompt=( - "Synthesize the group's emerging sense of direction and next steps. Include: explicit recommendations made, " - "implicit preferences expressed, priorities that emerged through discussion, and logical next actions even " - "if not explicitly stated. Distinguish between unanimous direction and majority leanings. Present as " - "provisional navigation rather than fixed commands. This is the group's best current thinking about the " - "path forward. Output character should be pragmatic but inspirational." - ), - ), - VerificationTopicSeed( - key="disagreements", - icon=":warning:", - label="Moments we agreed to disagree", - sort=6, - prompt=( - "Document the points of productive tension where different perspectives remained distinct but respected. " - "Include: fundamental differences in approach, varying priorities, different risk tolerances, and contrasting " - "interpretations of data. Frame these not as failures to agree but as valuable diversity of thought. Show how " - "each perspective has merit. These disagreements are features, not bugs - they prevent premature convergence " - "and keep important tensions alive. Output character should be respectful and balanced." - ), - ), -] +settings = get_settings() +GCP_SA_JSON = settings.gcp_sa_json +VerifyRouter = APIRouter(tags=["verify"]) class VerificationTopicTranslation(BaseModel): label: str @@ -186,53 +92,6 @@ def _parse_directus_datetime(value: Optional[str]) -> Optional[datetime]: return None -async def seed_default_verification_topics() -> None: - """ - Ensure that the canonical verification topics exist in Directus. - """ - - for topic in DEFAULT_VERIFICATION_TOPICS: - existing = await run_in_thread_pool( - directus.get_items, - "verification_topic", - { - "query": { - "filter": { - "key": {"_eq": topic.key}, - "project_id": {"_null": True}, - }, - "fields": ["key"], - "limit": 1, - } - }, - ) - - if existing: - continue - - logger.info("Seeding verification topic '%s'", topic.key) - translations_payload = [ - { - "languages_code": DEFAULT_LANG, - "label": topic.label, - } - ] - - await run_in_thread_pool( - directus.create_item, - "verification_topic", - item_data={ - "key": topic.key, - "prompt": topic.prompt, - "icon": topic.icon, - "sort": topic.sort, - "translations": { - "create": translations_payload, - }, - }, - ) - - async def _get_project(project_id: str) -> dict: project_rows = await run_in_thread_pool( directus.get_items, @@ -703,10 +562,10 @@ async def generate_verification_artifacts( }, ) + completion_kwargs = get_completion_kwargs(MODELS.MULTI_MODAL_PRO) + try: response = litellm.completion( - model="vertex_ai/gemini-2.5-flash", - vertex_credentials=GCP_SA_JSON, messages=[ { "role": "system", @@ -722,6 +581,8 @@ async def generate_verification_artifacts( "content": message_content, }, ], + vertex_credentials=GCP_SA_JSON, + **completion_kwargs, ) except Exception as exc: logger.error("Gemini completion failed: %s", exc, exc_info=True) @@ -778,7 +639,6 @@ async def update_verification_artifact( reference_conversation_id = body.use_conversation.conversation_id reference_timestamp = body.use_conversation.timestamp - conversation = await _get_conversation_with_project(reference_conversation_id) chunks = await _get_conversation_chunks(reference_conversation_id) conversation_transcript = _build_transcript_text(chunks) @@ -833,10 +693,10 @@ async def update_verification_artifact( except Exception as exc: # pragma: no cover - logging side effect logger.warning("Failed to attach audio chunk %s: %s", chunk_id, exc) + revision_completion_kwargs = get_completion_kwargs(MODELS.MULTI_MODAL_PRO) + try: response = litellm.completion( - model="vertex_ai/gemini-2.5-flash", - vertex_credentials=GCP_SA_JSON, messages=[ { "role": "system", @@ -852,6 +712,8 @@ async def update_verification_artifact( "content": message_content, }, ], + vertex_credentials=GCP_SA_JSON, + **revision_completion_kwargs, ) except Exception as exc: # pragma: no cover - external failure logger.error("Gemini revision failed: %s", exc, exc_info=True) diff --git a/echo/server/dembrane/async_helpers.py b/echo/server/dembrane/async_helpers.py index 30cfa630..59460432 100644 --- a/echo/server/dembrane/async_helpers.py +++ b/echo/server/dembrane/async_helpers.py @@ -23,7 +23,7 @@ import atexit import asyncio import threading -from typing import Any, TypeVar, Callable, Optional +from typing import Any, TypeVar, Callable, Optional, Awaitable, Coroutine from logging import getLogger from functools import partial from concurrent.futures import ThreadPoolExecutor @@ -140,3 +140,48 @@ async def run_in_thread_pool(func: Callable[..., T], *args: Any, **kwargs: Any) # Note: We use the global thread pool instead of None (default) to ensure # we have control over the thread count via environment variable return await loop.run_in_executor(get_thread_pool_executor(), func, *args) + + +# Persistent event loops per worker thread for LightRAG compatibility +_thread_loops: dict[int, asyncio.AbstractEventLoop] = {} +_thread_loops_lock = threading.Lock() + + +def _get_thread_event_loop() -> asyncio.AbstractEventLoop: + """ + Fetch or create the persistent event loop for the current thread. + Mirrors the previous dembrane.audio_lightrag.utils.async_utils implementation + so LightRAG objects tied to an event loop (e.g., RAGManager) continue to work. + """ + thread_id = threading.get_ident() + + if thread_id in _thread_loops: + return _thread_loops[thread_id] + + with _thread_loops_lock: + if thread_id in _thread_loops: + return _thread_loops[thread_id] + + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + _thread_loops[thread_id] = loop + logger.info("Created persistent event loop for thread %s", thread_id) + return loop + + +def run_async_in_new_loop(coro: Coroutine[Any, Any, T]) -> T: + """ + Execute an async coroutine on this thread's persistent event loop. + + Use from synchronous contexts such as Dramatiq actors or CLI scripts to + invoke async FastAPI handlers / LightRAG routines without hitting + "Future attached to a different loop" errors. + """ + if not asyncio.iscoroutine(coro) and not asyncio.isfuture(coro): + raise TypeError("run_async_in_new_loop expects a coroutine or Future.") + + loop = _get_thread_event_loop() + logger.debug("Running async coroutine in thread loop: %s", coro) + result = loop.run_until_complete(coro) + logger.debug("Completed async coroutine: %s", coro) + return result diff --git a/echo/server/dembrane/asyncio_uvicorn_worker.py b/echo/server/dembrane/asyncio_uvicorn_worker.py new file mode 100644 index 00000000..cbf63728 --- /dev/null +++ b/echo/server/dembrane/asyncio_uvicorn_worker.py @@ -0,0 +1,7 @@ +"use asyncio loop instead of uvloop. historically used for LightRAG compatibility" + +from uvicorn.workers import UvicornWorker + + +class AsyncioUvicornWorker(UvicornWorker): + CONFIG_KWARGS = {"loop": "asyncio"} diff --git a/echo/server/dembrane/audio_utils.py b/echo/server/dembrane/audio_utils.py index a155b9da..1fa3418f 100644 --- a/echo/server/dembrane/audio_utils.py +++ b/echo/server/dembrane/audio_utils.py @@ -13,7 +13,7 @@ from dembrane.s3 import s3_client, delete_from_s3, get_stream_from_s3, get_sanitized_s3_key from dembrane.utils import generate_uuid -from dembrane.config import STORAGE_S3_BUCKET, STORAGE_S3_ENDPOINT +from dembrane.settings import get_settings from dembrane.service import conversation_service from dembrane.directus import directus @@ -94,6 +94,11 @@ class FileTooSmallError(Exception): pass +settings = get_settings() +STORAGE_S3_BUCKET = settings.storage_s3_bucket +STORAGE_S3_ENDPOINT = settings.storage_s3_endpoint + + def convert_and_save_to_s3( input_file_name: str, output_file_name: str, diff --git a/echo/server/dembrane/chat_utils.py b/echo/server/dembrane/chat_utils.py index 387ce95c..0a03270a 100644 --- a/echo/server/dembrane/chat_utils.py +++ b/echo/server/dembrane/chat_utils.py @@ -7,7 +7,6 @@ import backoff from litellm import acompletion from pydantic import BaseModel -from litellm.utils import token_counter from sqlalchemy.orm import Session, selectinload from litellm.exceptions import ( Timeout, @@ -17,12 +16,8 @@ ContextWindowExceededError, ) -from dembrane.config import ( - SMALL_LITELLM_MODEL, - SMALL_LITELLM_API_KEY, - SMALL_LITELLM_API_BASE, - DISABLE_CHAT_TITLE_GENERATION, -) +from dembrane.llms import MODELS, count_tokens, get_completion_kwargs +from dembrane.settings import get_settings from dembrane.prompts import render_prompt from dembrane.database import ConversationModel, ProjectChatMessageModel from dembrane.directus import directus @@ -33,6 +28,9 @@ logger = logging.getLogger("chat_utils") +settings = get_settings() +DISABLE_CHAT_TITLE_GENERATION = settings.disable_chat_title_generation + class ClientAttachment(BaseModel): name: str @@ -195,10 +193,8 @@ async def generate_title( ) response = await acompletion( - model=SMALL_LITELLM_MODEL, messages=[{"role": "user", "content": title_prompt}], - api_base=SMALL_LITELLM_API_BASE, - api_key=SMALL_LITELLM_API_KEY, + **get_completion_kwargs(MODELS.TEXT_FAST), ) if response.choices[0].message.content is None: @@ -340,12 +336,10 @@ async def _call_llm_with_backoff(prompt: str, batch_num: int) -> Any: """Call LLM with automatic retry for transient errors.""" logger.debug(f"Calling LLM for batch {batch_num}") return await acompletion( - model=SMALL_LITELLM_MODEL, messages=[{"role": "user", "content": prompt}], - api_base=SMALL_LITELLM_API_BASE, - api_key=SMALL_LITELLM_API_KEY, response_format={"type": "json_object"}, timeout=5 * 60, # 5 minutes + **get_completion_kwargs(MODELS.TEXT_FAST), ) @@ -427,7 +421,10 @@ async def _process_single_batch( # Validate prompt size before sending try: - prompt_tokens = token_counter(model=SMALL_LITELLM_MODEL, text=prompt) + prompt_tokens = count_tokens( + MODELS.TEXT_FAST, + [{"role": "user", "content": prompt}], + ) MAX_BATCH_CONTEXT = 100000 # Leave headroom for response if prompt_tokens > MAX_BATCH_CONTEXT: diff --git a/echo/server/dembrane/config.py b/echo/server/dembrane/config.py deleted file mode 100644 index edaf642c..00000000 --- a/echo/server/dembrane/config.py +++ /dev/null @@ -1,632 +0,0 @@ -# This configuration file implements a robust environment-based configuration -# system with built-in logging. It follows a "fail-fast" pattern by asserting -# required environment variables and provides sensible defaults for optional ones. - -# 2025-06-05 FIXME: file's messy / needs a refactor. potential config management via yaml config? -# patterns are inconsistent - ENABLE_LITELLM_WHISPER_TRANSCRIPTION needs to be set -# better yet modularize it and have modules manage their own config? - -## ENABLE_ASSEMBLYAI_TRANSCRIPTION = os.environ.get( -# "ENABLE_ASSEMBLYAI_TRANSCRIPTION", "false" -# ).lower() in ["true", "1"] -# This is a bad pattern for hygiene because it allows for multiple values to be set if you want it to be true/false - -# This file inits twice for some reason... - -import os -import sys -import json -import base64 -import logging -from typing import Literal, cast - -try: - import colorlog - - has_colorlog = True -except ImportError: - has_colorlog = False - -import dotenv - -if has_colorlog: - handler = colorlog.StreamHandler(sys.stdout) - handler.setFormatter( - colorlog.ColoredFormatter( - "%(log_color)s%(levelname)s:%(name)s:%(message)s", - log_colors={ - "DEBUG": "cyan", - "INFO": "green", - "WARNING": "yellow", - "ERROR": "red", - "CRITICAL": "red,bg_white", - }, - ) - ) - logger = colorlog.getLogger("config") - logger.addHandler(handler) - logger.setLevel(logging.INFO) - - # Set up the root logger too - root_logger = colorlog.getLogger() - root_logger.addHandler(handler) - root_logger.setLevel(logging.INFO) -else: - # Fall back to basic configuration if colorlog is not available - logging.basicConfig(level=logging.INFO, force=True) - logger = logging.getLogger("config") - -BASE_DIR = os.path.normpath(os.path.join(os.path.dirname(__file__), "..")) -dotenv_path = os.path.join(BASE_DIR, ".env") - -if os.path.exists(dotenv_path): - logger.info(f"loading environment variables from {dotenv_path}") - dotenv.load_dotenv(dotenv_path, verbose=True, override=True) - -DEBUG_MODE = os.environ.get("DEBUG_MODE", "false").lower() in ["true", "1"] -logger.info(f"DEBUG_MODE: {DEBUG_MODE}") -if DEBUG_MODE: - # everything is debug if debug mode is enabled - logging.getLogger().setLevel(logging.DEBUG) - # set the current logger to debug - logger.setLevel(logging.DEBUG) - -API_BASE_URL = os.environ.get("API_BASE_URL", "http://localhost:8000") -logger.debug(f"API_BASE_URL: {API_BASE_URL}") - -ADMIN_BASE_URL = os.environ.get("ADMIN_BASE_URL", "http://localhost:3000") -logger.debug(f"ADMIN_BASE_URL: {ADMIN_BASE_URL}") - -PARTICIPANT_BASE_URL = os.environ.get("PARTICIPANT_BASE_URL", "http://localhost:3001") -logger.debug(f"PARTICIPANT_BASE_URL: {PARTICIPANT_BASE_URL}") - -DIRECTUS_BASE_URL = os.environ.get("DIRECTUS_BASE_URL", "http://directus:8055") -logger.debug(f"DIRECTUS_BASE_URL: {DIRECTUS_BASE_URL}") - -DISABLE_REDACTION = os.environ.get("DISABLE_REDACTION", "false").lower() in ["true", "1"] -logger.debug(f"DISABLE_REDACTION: {DISABLE_REDACTION}") - -DISABLE_CHAT_TITLE_GENERATION = os.environ.get( - "DISABLE_CHAT_TITLE_GENERATION", "false" -).lower() in [ - "true", - "1", -] -logger.debug(f"DISABLE_CHAT_TITLE_GENERATION: {DISABLE_CHAT_TITLE_GENERATION}") - -PROMPT_TEMPLATES_DIR = os.path.join(BASE_DIR, "prompt_templates") -logger.debug(f"PROMPT_TEMPLATES_DIR: {PROMPT_TEMPLATES_DIR}") - -JSON_TEMPLATES_DIR = os.path.join(BASE_DIR, "json_templates") -logger.debug(f"JSON_TEMPLATES_DIR: {JSON_TEMPLATES_DIR}") - -DIRECTUS_SECRET = os.environ.get("DIRECTUS_SECRET") -assert DIRECTUS_SECRET, "DIRECTUS_SECRET environment variable is not set" -logger.debug("DIRECTUS_SECRET: set") - -DIRECTUS_TOKEN = os.environ.get("DIRECTUS_TOKEN") -assert DIRECTUS_TOKEN, "DIRECTUS_TOKEN environment variable is not set" -logger.debug("DIRECTUS_TOKEN: set") - -DIRECTUS_SESSION_COOKIE_NAME = os.environ.get( - "DIRECTUS_SESSION_COOKIE_NAME", "directus_session_token" -) -logger.debug(f"DIRECTUS_SESSION_COOKIE_NAME: {DIRECTUS_SESSION_COOKIE_NAME}") - -DATABASE_URL = os.environ.get("DATABASE_URL") -assert DATABASE_URL, "DATABASE_URL environment variable is not set" -logger.debug("DATABASE_URL: set") - -if not DATABASE_URL.startswith("postgresql+psycopg://"): - logger.warning("DATABASE_URL is not a postgresql+psycopg:// URL, attempting to fix it...") - if DATABASE_URL.startswith("postgresql://"): - DATABASE_URL = DATABASE_URL.replace("postgresql://", "postgresql+psycopg://") - else: - raise ValueError("DATABASE_URL is not valid (we need a postgresql+psycopg URL)") - -REDIS_URL = os.environ.get("REDIS_URL") -assert REDIS_URL, "REDIS_URL environment variable is not set" -logger.debug("REDIS_URL: set") - -OPENAI_API_BASE_URL = os.environ.get("OPENAI_API_BASE_URL", "https://api.openai.com/v1") -logger.debug(f"OPENAI_API_BASE_URL: {OPENAI_API_BASE_URL}") - -OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY") -assert OPENAI_API_KEY, "OPENAI_API_KEY environment variable is not set" -logger.debug("OPENAI_API_KEY: set") - -ANTHROPIC_API_KEY = os.environ.get("ANTHROPIC_API_KEY") -assert ANTHROPIC_API_KEY, "ANTHROPIC_API_KEY environment variable is not set" -logger.debug("ANTHROPIC_API_KEY: set") - -SERVE_API_DOCS = os.environ.get("SERVE_API_DOCS", "false").lower() in ["true", "1"] -logger.debug(f"SERVE_API_DOCS: {SERVE_API_DOCS}") - -DISABLE_SENTRY = os.environ.get("DISABLE_SENTRY", "false").lower() in ["true", "1"] -logger.debug(f"DISABLE_SENTRY: {DISABLE_SENTRY}") - -BUILD_VERSION = os.environ.get("BUILD_VERSION", "dev") -logger.debug(f"BUILD_VERSION: {BUILD_VERSION}") - -ENVIRONMENT = "development" -if BUILD_VERSION != "dev": - ENVIRONMENT = "production" -logger.debug(f"ENVIRONMENT: {ENVIRONMENT}") - -# Neo4j configuration -NEO4J_URI = os.environ.get("NEO4J_URI", "bolt://localhost:7687") -logger.debug(f"NEO4J_URI: {NEO4J_URI}") - -NEO4J_USERNAME = os.environ.get("NEO4J_USERNAME", "neo4j") -logger.debug(f"NEO4J_USERNAME: {NEO4J_USERNAME}") - -NEO4J_PASSWORD = os.environ.get("NEO4J_PASSWORD", "admin@dembrane") -logger.debug("NEO4J_PASSWORD: set") - -STORAGE_S3_BUCKET = os.environ.get("STORAGE_S3_BUCKET") -assert STORAGE_S3_BUCKET, "STORAGE_S3_BUCKET environment variable is not set" -logger.debug("STORAGE_S3_BUCKET: set") - -STORAGE_S3_REGION = os.environ.get("STORAGE_S3_REGION", None) -logger.debug(f"STORAGE_S3_REGION: {STORAGE_S3_REGION}") -if STORAGE_S3_REGION is None: - logger.warning("STORAGE_S3_REGION is not set, using 'None'") - -STORAGE_S3_ENDPOINT = os.environ.get("STORAGE_S3_ENDPOINT") -assert STORAGE_S3_ENDPOINT, "STORAGE_S3_ENDPOINT environment variable is not set" -logger.debug("STORAGE_S3_ENDPOINT: set") - -STORAGE_S3_KEY = os.environ.get("STORAGE_S3_KEY") -assert STORAGE_S3_KEY, "STORAGE_S3_KEY environment variable is not set" -logger.debug("STORAGE_S3_KEY: set") - -STORAGE_S3_SECRET = os.environ.get("STORAGE_S3_SECRET") -assert STORAGE_S3_SECRET, "STORAGE_S3_SECRET environment variable is not set" -logger.debug("STORAGE_S3_SECRET: set") - -DISABLE_CORS = os.environ.get("DISABLE_CORS", "false").lower() in ["true", "1"] -logger.debug(f"DISABLE_CORS: {DISABLE_CORS}") - -### Transcription - -TranscriptionProvider = Literal["Runpod", "LiteLLM", "AssemblyAI", "Dembrane-25-09"] -_ALLOWED_TRANSCRIPTION_PROVIDERS: set[str] = { - "Runpod", - "LiteLLM", - "AssemblyAI", - "Dembrane-25-09", -} - -TRANSCRIPTION_PROVIDER_RAW: str | None = os.environ.get("TRANSCRIPTION_PROVIDER") -if not TRANSCRIPTION_PROVIDER_RAW: - TRANSCRIPTION_PROVIDER: TranscriptionProvider | None = None - logger.debug("TRANSCRIPTION_PROVIDER: not set") -else: - if TRANSCRIPTION_PROVIDER_RAW not in _ALLOWED_TRANSCRIPTION_PROVIDERS: - raise ValueError( - f"TRANSCRIPTION_PROVIDER is not valid: {TRANSCRIPTION_PROVIDER_RAW}. " - f"Allowed: {', '.join(sorted(_ALLOWED_TRANSCRIPTION_PROVIDERS))}" - ) - TRANSCRIPTION_PROVIDER = cast(TranscriptionProvider, TRANSCRIPTION_PROVIDER_RAW) - logger.debug(f"TRANSCRIPTION_PROVIDER: {TRANSCRIPTION_PROVIDER}") - -# GEMINI_API_KEY = os.environ.get("GEMINI_API_KEY") -# if GEMINI_API_KEY: -# logger.debug("GEMINI_API_KEY: set") -# else: -# logger.debug("GEMINI_API_KEY: not set") - -# not needed according to docs? https://docs.litellm.ai/docs/providers/vertex -# VERTEX_GENERATE_URL = os.environ.get("VERTEX_GENERATE_URL") -# if VERTEX_GENERATE_URL: -# logger.debug("VERTEX_GENERATE_URL: set") -# else: -# logger.debug("VERTEX_GENERATE_URL: not set") - -GCP_SA_JSON_RAW = os.environ.get("GCP_SA_JSON") -GCP_SA_JSON = None -try: - if GCP_SA_JSON_RAW: - GCP_SA_JSON = json.loads(GCP_SA_JSON_RAW) - logger.info("GCP_SA_JSON: set") - else: - logger.info("GCP_SA_JSON: not set") -except Exception as e: - logger.error(f"GCP_SA_JSON: not set (invalid json): {e}") - try: - logger.info("attempt to b64 decode then json load") - GCP_SA_JSON = json.loads(base64.b64decode(GCP_SA_JSON_RAW or "")) - logger.info("GCP_SA_JSON: set") - except Exception as e: - logger.error(f"GCP_SA_JSON: not set (invalid b64): {e}") - logger.info("GCP_SA_JSON: set") - - -ENABLE_ASSEMBLYAI_TRANSCRIPTION = os.environ.get( - "ENABLE_ASSEMBLYAI_TRANSCRIPTION", "false" -).lower() in ["true", "1"] -logger.debug(f"ENABLE_ASSEMBLYAI_TRANSCRIPTION: {ENABLE_ASSEMBLYAI_TRANSCRIPTION}") - -ASSEMBLYAI_API_KEY = os.environ.get("ASSEMBLYAI_API_KEY") -if ENABLE_ASSEMBLYAI_TRANSCRIPTION: - assert ASSEMBLYAI_API_KEY, "ASSEMBLYAI_API_KEY environment variable is not set" - logger.debug("ASSEMBLYAI_API_KEY: set") - -ASSEMBLYAI_BASE_URL = os.environ.get("ASSEMBLYAI_BASE_URL", "https://api.eu.assemblyai.com") -logger.debug(f"ASSEMBLYAI_BASE_URL: {ASSEMBLYAI_BASE_URL}") - -ENABLE_RUNPOD_WHISPER_TRANSCRIPTION = os.environ.get( - "ENABLE_RUNPOD_WHISPER_TRANSCRIPTION", "false" -).lower() in ["true", "1"] -logger.debug(f"ENABLE_RUNPOD_WHISPER_TRANSCRIPTION: {ENABLE_RUNPOD_WHISPER_TRANSCRIPTION}") - -RUNPOD_WHISPER_API_KEY = os.environ.get("RUNPOD_WHISPER_API_KEY") -if ENABLE_RUNPOD_WHISPER_TRANSCRIPTION: - assert RUNPOD_WHISPER_API_KEY, "RUNPOD_WHISPER_API_KEY environment variable is not set" - logger.debug("RUNPOD_WHISPER_API_KEY: set") - -RUNPOD_WHISPER_BASE_URL = os.environ.get("RUNPOD_WHISPER_BASE_URL") -if ENABLE_RUNPOD_WHISPER_TRANSCRIPTION: - assert RUNPOD_WHISPER_BASE_URL, "RUNPOD_WHISPER_BASE_URL environment variable is not set" - logger.debug(f"RUNPOD_WHISPER_BASE_URL: {RUNPOD_WHISPER_BASE_URL}") - -RUNPOD_WHISPER_PRIORITY_BASE_URL = os.environ.get("RUNPOD_WHISPER_PRIORITY_BASE_URL") -if ENABLE_RUNPOD_WHISPER_TRANSCRIPTION: - assert RUNPOD_WHISPER_PRIORITY_BASE_URL, ( - "RUNPOD_WHISPER_PRIORITY_BASE_URL environment variable is not set" - ) - logger.debug(f"RUNPOD_WHISPER_PRIORITY_BASE_URL: {RUNPOD_WHISPER_PRIORITY_BASE_URL}") - -RUNPOD_WHISPER_MAX_REQUEST_THRESHOLD = int( - str(os.environ.get("RUNPOD_WHISPER_MAX_REQUEST_THRESHOLD")) -) - -ENABLE_LITELLM_WHISPER_TRANSCRIPTION = os.environ.get( - "ENABLE_LITELLM_WHISPER_TRANSCRIPTION", "false" -).lower() in ["true", "1"] -logger.debug(f"ENABLE_LITELLM_WHISPER_TRANSCRIPTION: {ENABLE_LITELLM_WHISPER_TRANSCRIPTION}") - -LITELLM_WHISPER_API_KEY = os.environ.get("LITELLM_WHISPER_API_KEY") -if ENABLE_LITELLM_WHISPER_TRANSCRIPTION: - assert LITELLM_WHISPER_API_KEY, "LITELLM_WHISPER_API_KEY environment variable is not set" - logger.debug("LITELLM_WHISPER_API_KEY: set") - -LITELLM_WHISPER_API_VERSION = os.environ.get("LITELLM_WHISPER_API_VERSION", "2024-06-01") -if ENABLE_LITELLM_WHISPER_TRANSCRIPTION: - assert LITELLM_WHISPER_API_VERSION, ( - "LITELLM_WHISPER_API_VERSION environment variable is not set" - ) - logger.debug(f"LITELLM_WHISPER_API_VERSION: {LITELLM_WHISPER_API_VERSION}") - -LITELLM_WHISPER_MODEL = os.environ.get("LITELLM_WHISPER_MODEL") -if ENABLE_LITELLM_WHISPER_TRANSCRIPTION: - assert LITELLM_WHISPER_MODEL, "LITELLM_WHISPER_MODEL environment variable is not set" - logger.debug(f"LITELLM_WHISPER_MODEL: {LITELLM_WHISPER_MODEL}") - -LITELLM_WHISPER_URL = os.environ.get("LITELLM_WHISPER_URL") -if ENABLE_LITELLM_WHISPER_TRANSCRIPTION: - assert LITELLM_WHISPER_URL, "LITELLM_WHISPER_URL environment variable is not set" - logger.debug(f"LITELLM_WHISPER_URL: {LITELLM_WHISPER_URL}") - -### END Transcription - -RUNPOD_TOPIC_MODELER_URL = os.environ.get("RUNPOD_TOPIC_MODELER_URL") -logger.debug(f"RUNPOD_TOPIC_MODELER_URL: {RUNPOD_TOPIC_MODELER_URL}") - -RUNPOD_TOPIC_MODELER_API_KEY = os.environ.get("RUNPOD_TOPIC_MODELER_API_KEY") -if RUNPOD_TOPIC_MODELER_URL: - assert RUNPOD_TOPIC_MODELER_API_KEY, ( - "RUNPOD_TOPIC_MODELER_API_KEY environment variable is not set" - ) - logger.debug("RUNPOD_TOPIC_MODELER_API_KEY: set") - -if ENABLE_RUNPOD_WHISPER_TRANSCRIPTION: - assert RUNPOD_WHISPER_MAX_REQUEST_THRESHOLD, ( - "RUNPOD_WHISPER_MAX_REQUEST_THRESHOLD environment variable is not set" - ) - logger.debug(f"RUNPOD_WHISPER_MAX_REQUEST_THRESHOLD: {RUNPOD_WHISPER_MAX_REQUEST_THRESHOLD}") - -SMALL_LITELLM_MODEL = os.environ.get("SMALL_LITELLM_MODEL") # 4o-mini -assert SMALL_LITELLM_MODEL, "SMALL_LITELLM_MODEL environment variable is not set" -logger.debug(f"SMALL_LITELLM_MODEL: {SMALL_LITELLM_MODEL}") - -SMALL_LITELLM_API_KEY = os.environ.get("SMALL_LITELLM_API_KEY") -assert SMALL_LITELLM_API_KEY, "SMALL_LITELLM_API_KEY environment variable is not set" -logger.debug("SMALL_LITELLM_API_KEY: set") - -SMALL_LITELLM_API_VERSION = os.environ.get("SMALL_LITELLM_API_VERSION") -assert SMALL_LITELLM_API_VERSION, "SMALL_LITELLM_API_VERSION environment variable is not set" -logger.debug(f"SMALL_LITELLM_API_VERSION: {SMALL_LITELLM_API_VERSION}") - -SMALL_LITELLM_API_BASE = os.environ.get("SMALL_LITELLM_API_BASE") -assert SMALL_LITELLM_API_BASE, "SMALL_LITELLM_API_BASE environment variable is not set" -logger.debug(f"SMALL_LITELLM_API_BASE: {SMALL_LITELLM_API_BASE}") - -MEDIUM_LITELLM_MODEL = os.environ.get("MEDIUM_LITELLM_MODEL") # 4.1 -assert MEDIUM_LITELLM_MODEL, "MEDIUM_LITELLM_MODEL environment variable is not set" -logger.debug(f"MEDIUM_LITELLM_MODEL: {MEDIUM_LITELLM_MODEL}") - -MEDIUM_LITELLM_API_KEY = os.environ.get("MEDIUM_LITELLM_API_KEY") -assert MEDIUM_LITELLM_API_KEY, "MEDIUM_LITELLM_API_KEY environment variable is not set" -logger.debug("MEDIUM_LITELLM_API_KEY: set") - -MEDIUM_LITELLM_API_VERSION = os.environ.get("MEDIUM_LITELLM_API_VERSION") -assert MEDIUM_LITELLM_API_VERSION, "MEDIUM_LITELLM_API_VERSION environment variable is not set" -logger.debug(f"MEDIUM_LITELLM_API_VERSION: {MEDIUM_LITELLM_API_VERSION}") - -MEDIUM_LITELLM_API_BASE = os.environ.get("MEDIUM_LITELLM_API_BASE") -assert MEDIUM_LITELLM_API_BASE, "MEDIUM_LITELLM_API_BASE environment variable is not set" -logger.debug(f"MEDIUM_LITELLM_API_BASE: {MEDIUM_LITELLM_API_BASE}") - -LARGE_LITELLM_MODEL = os.environ.get("LARGE_LITELLM_MODEL") # o4-mini -assert LARGE_LITELLM_MODEL, "LARGE_LITELLM_MODEL environment variable is not set" -logger.debug(f"LARGE_LITELLM_MODEL: {LARGE_LITELLM_MODEL}") - -LARGE_LITELLM_API_KEY = os.environ.get("LARGE_LITELLM_API_KEY") -assert LARGE_LITELLM_API_KEY, "LARGE_LITELLM_API_KEY environment variable is not set" -logger.debug("LARGE_LITELLM_API_KEY: set") - -LARGE_LITELLM_API_VERSION = os.environ.get("LARGE_LITELLM_API_VERSION") -assert LARGE_LITELLM_API_VERSION, "LARGE_LITELLM_API_VERSION environment variable is not set" -logger.debug(f"LARGE_LITELLM_API_VERSION: {LARGE_LITELLM_API_VERSION}") - -LARGE_LITELLM_API_BASE = os.environ.get("LARGE_LITELLM_API_BASE") -assert LARGE_LITELLM_API_BASE, "LARGE_LITELLM_API_BASE environment variable is not set" -logger.debug(f"LARGE_LITELLM_API_BASE: {LARGE_LITELLM_API_BASE}") - -# *****************LIGHTRAG CONFIGURATIONS***************** - -# Lightrag LLM model: Makes nodes and answers queries -LIGHTRAG_LITELLM_MODEL = os.environ.get("LIGHTRAG_LITELLM_MODEL") # azure/gpt-4o-mini -assert LIGHTRAG_LITELLM_MODEL, "LIGHTRAG_LITELLM_MODEL environment variable is not set" -logger.debug(f"LIGHTRAG_LITELLM_MODEL: {LIGHTRAG_LITELLM_MODEL}") - -LIGHTRAG_LITELLM_API_KEY = os.environ.get("LIGHTRAG_LITELLM_API_KEY") -assert LIGHTRAG_LITELLM_API_KEY, "LIGHTRAG_LITELLM_API_KEY environment variable is not set" -logger.debug("LIGHTRAG_LITELLM_API_KEY: set") - -LIGHTRAG_LITELLM_API_VERSION = os.environ.get("LIGHTRAG_LITELLM_API_VERSION") -assert LIGHTRAG_LITELLM_API_VERSION, "LIGHTRAG_LITELLM_API_VERSION environment variable is not set" -logger.debug(f"LIGHTRAG_LITELLM_API_VERSION: {LIGHTRAG_LITELLM_API_VERSION}") - -LIGHTRAG_LITELLM_API_BASE = os.environ.get("LIGHTRAG_LITELLM_API_BASE") -assert LIGHTRAG_LITELLM_API_BASE, "LIGHTRAG_LITELLM_API_BASE environment variable is not set" -logger.debug(f"LIGHTRAG_LITELLM_API_BASE: {LIGHTRAG_LITELLM_API_BASE}") - -# Lightrag Audio model: Transcribes audio and gets contextual transcript -LIGHTRAG_LITELLM_AUDIOMODEL_MODEL = os.environ.get("LIGHTRAG_LITELLM_AUDIOMODEL_MODEL") -assert LIGHTRAG_LITELLM_AUDIOMODEL_MODEL, ( - "LIGHTRAG_LITELLM_AUDIOMODEL_MODEL environment variable is not set" -) -logger.debug(f"LIGHTRAG_LITELLM_AUDIOMODEL_MODEL: {LIGHTRAG_LITELLM_AUDIOMODEL_MODEL}") - -LIGHTRAG_LITELLM_AUDIOMODEL_API_BASE = os.environ.get("LIGHTRAG_LITELLM_AUDIOMODEL_API_BASE") -assert LIGHTRAG_LITELLM_AUDIOMODEL_API_BASE, ( - "LIGHTRAG_LITELLM_AUDIOMODEL_API_BASE environment variable is not set" -) -logger.debug(f"LIGHTRAG_LITELLM_AUDIOMODEL_API_BASE: {LIGHTRAG_LITELLM_AUDIOMODEL_API_BASE}") - -LIGHTRAG_LITELLM_AUDIOMODEL_API_KEY = os.environ.get("LIGHTRAG_LITELLM_AUDIOMODEL_API_KEY") -assert LIGHTRAG_LITELLM_AUDIOMODEL_API_KEY, ( - "LIGHTRAG_LITELLM_AUDIOMODEL_API_KEY environment variable is not set" -) -logger.debug("LIGHTRAG_LITELLM_AUDIOMODEL_API_KEY: set") - -LIGHTRAG_LITELLM_AUDIOMODEL_API_VERSION = os.environ.get("LIGHTRAG_LITELLM_AUDIOMODEL_API_VERSION") -assert LIGHTRAG_LITELLM_AUDIOMODEL_API_VERSION, ( - "LIGHTRAG_LITELLM_AUDIOMODEL_API_VERSION environment variable is not set" -) -logger.debug(f"LIGHTRAG_LITELLM_AUDIOMODEL_API_VERSION: {LIGHTRAG_LITELLM_AUDIOMODEL_API_VERSION}") - - -# Lightrag Text Structure model: Structures output from audio model -LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_MODEL = os.environ.get( - "LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_MODEL" -) # azure/gpt-4o-mini -assert LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_MODEL, ( - "LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_MODEL environment variable is not set" -) -logger.debug( - f"LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_MODEL: {LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_MODEL}" -) - -LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_BASE = os.environ.get( - "LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_BASE" -) -assert LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_BASE, ( - "LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_BASE environment variable is not set" -) -logger.debug( - f"LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_BASE: {LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_BASE}" -) - -LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_KEY = os.environ.get( - "LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_KEY" -) -assert LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_KEY, ( - "LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_KEY environment variable is not set" -) -logger.debug("LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_KEY: set") - -LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_VERSION = os.environ.get( - "LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_VERSION" -) -assert LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_VERSION, ( - "LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_VERSION environment variable is not set" -) -logger.debug( - f"LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_VERSION: {LIGHTRAG_LITELLM_TEXTSTRUCTUREMODEL_API_VERSION}" -) - -# Lightrag Embedding model: Embeds text -LIGHTRAG_LITELLM_EMBEDDING_MODEL = os.environ.get( - "LIGHTRAG_LITELLM_EMBEDDING_MODEL" -) # azure/text-embedding-ada-002 -assert LIGHTRAG_LITELLM_EMBEDDING_MODEL, ( - "LIGHTRAG_LITELLM_EMBEDDING_MODEL environment variable is not set" -) -logger.debug(f"LIGHTRAG_LITELLM_EMBEDDING_MODEL: {LIGHTRAG_LITELLM_EMBEDDING_MODEL}") - -LIGHTRAG_LITELLM_EMBEDDING_API_BASE = os.environ.get("LIGHTRAG_LITELLM_EMBEDDING_API_BASE") -assert LIGHTRAG_LITELLM_EMBEDDING_API_BASE, ( - "LIGHTRAG_LITELLM_EMBEDDING_API_BASE environment variable is not set" -) -logger.debug(f"LIGHTRAG_LITELLM_EMBEDDING_API_BASE: {LIGHTRAG_LITELLM_EMBEDDING_API_BASE}") - -LIGHTRAG_LITELLM_EMBEDDING_API_KEY = os.environ.get("LIGHTRAG_LITELLM_EMBEDDING_API_KEY") -assert LIGHTRAG_LITELLM_EMBEDDING_API_KEY, ( - "LIGHTRAG_LITELLM_EMBEDDING_API_KEY environment variable is not set" -) -logger.debug("LIGHTRAG_LITELLM_EMBEDDING_API_KEY: set") - -LIGHTRAG_LITELLM_EMBEDDING_API_VERSION = os.environ.get("LIGHTRAG_LITELLM_EMBEDDING_API_VERSION") -assert LIGHTRAG_LITELLM_EMBEDDING_API_VERSION, ( - "LIGHTRAG_LITELLM_EMBEDDING_API_VERSION environment variable is not set" -) -logger.debug(f"LIGHTRAG_LITELLM_EMBEDDING_API_VERSION: {LIGHTRAG_LITELLM_EMBEDDING_API_VERSION}") - -LIGHTRAG_LITELLM_INFERENCE_MODEL = os.environ.get( - "LIGHTRAG_LITELLM_INFERENCE_MODEL", "anthropic/claude-3-5-sonnet-20240620" -) -assert LIGHTRAG_LITELLM_INFERENCE_MODEL, ( - "LIGHTRAG_LITELLM_INFERENCE_MODEL environment variable is not set" -) -logger.debug(f"LIGHTRAG_LITELLM_INFERENCE_MODEL: {LIGHTRAG_LITELLM_INFERENCE_MODEL}") - -LIGHTRAG_LITELLM_INFERENCE_API_KEY = os.environ.get("LIGHTRAG_LITELLM_INFERENCE_API_KEY") -assert LIGHTRAG_LITELLM_INFERENCE_API_KEY, ( - "LIGHTRAG_LITELLM_INFERENCE_API_KEY environment variable is not set" -) -logger.debug("LIGHTRAG_LITELLM_INFERENCE_API_KEY: set") - -LIGHTRAG_LITELLM_INFERENCE_API_VERSION = os.environ.get("LIGHTRAG_LITELLM_INFERENCE_API_VERSION") -if LIGHTRAG_LITELLM_INFERENCE_API_VERSION: - logger.debug( - f"LIGHTRAG_LITELLM_INFERENCE_API_VERSION: {LIGHTRAG_LITELLM_INFERENCE_API_VERSION}" - ) -else: - logger.debug("LIGHTRAG_LITELLM_INFERENCE_API_VERSION: not set") - -LIGHTRAG_LITELLM_INFERENCE_API_BASE = os.environ.get("LIGHTRAG_LITELLM_INFERENCE_API_BASE") -if LIGHTRAG_LITELLM_INFERENCE_API_BASE: - logger.debug(f"LIGHTRAG_LITELLM_INFERENCE_API_BASE: {LIGHTRAG_LITELLM_INFERENCE_API_BASE}") -else: - logger.debug("LIGHTRAG_LITELLM_INFERENCE_API_BASE: not set") - -DISABLE_MULTILINGUAL_DIARIZATION = os.environ.get( - "DISABLE_MULTILINGUAL_DIARIZATION", "false" -).lower() in [ - "true", - "1", -] -logger.debug(f"DISABLE_MULTILINGUAL_DIARIZATION: {DISABLE_MULTILINGUAL_DIARIZATION}") - -ENABLE_RUNPOD_DIARIZATION = os.environ.get("ENABLE_RUNPOD_DIARIZATION", "false").lower() in [ - "true", - "1", -] -logger.debug(f"ENABLE_RUNPOD_DIARIZATION: {ENABLE_RUNPOD_DIARIZATION}") - -RUNPOD_DIARIZATION_API_KEY = os.environ.get("RUNPOD_DIARIZATION_API_KEY") -if ENABLE_RUNPOD_DIARIZATION: - assert RUNPOD_DIARIZATION_API_KEY, "RUNPOD_DIARIZATION_API_KEY environment variable is not set" - logger.debug("RUNPOD_DIARIZATION_API_KEY: set") - -RUNPOD_DIARIZATION_BASE_URL = os.environ.get("RUNPOD_DIARIZATION_BASE_URL") -if ENABLE_RUNPOD_DIARIZATION: - assert RUNPOD_DIARIZATION_BASE_URL, ( - "RUNPOD_DIARIZATION_BASE_URL environment variable is not set" - ) - logger.debug(f"RUNPOD_DIARIZATION_BASE_URL: {RUNPOD_DIARIZATION_BASE_URL}") - -RUNPOD_DIARIZATION_TIMEOUT = int(os.environ.get("RUNPOD_DIARIZATION_TIMEOUT", 30)) -if ENABLE_RUNPOD_DIARIZATION: - logger.debug(f"RUNPOD_DIARIZATION_TIMEOUT: {RUNPOD_DIARIZATION_TIMEOUT}") -# ---------------/Secrets--------------- - - -# ---------------Configurations--------------- -AUDIO_LIGHTRAG_CONVERSATION_HISTORY_NUM = int( - os.environ.get("AUDIO_LIGHTRAG_CONVERSATION_HISTORY_NUM", 10) -) -assert AUDIO_LIGHTRAG_CONVERSATION_HISTORY_NUM, ( - "AUDIO_LIGHTRAG_CONVERSATION_HISTORY_NUM environment variable is not set" -) -logger.debug(f"AUDIO_LIGHTRAG_CONVERSATION_HISTORY_NUM: {AUDIO_LIGHTRAG_CONVERSATION_HISTORY_NUM}") - -AUDIO_LIGHTRAG_COOL_OFF_TIME_SECONDS = int( - os.environ.get("AUDIO_LIGHTRAG_COOL_OFF_TIME_SECONDS", 60) -) -assert AUDIO_LIGHTRAG_COOL_OFF_TIME_SECONDS, ( - "AUDIO_LIGHTRAG_COOL_OFF_TIME_SECONDS environment variable is not set" -) -logger.debug(f"AUDIO_LIGHTRAG_COOL_OFF_TIME_SECONDS: {AUDIO_LIGHTRAG_COOL_OFF_TIME_SECONDS}") - -ENABLE_AUDIO_LIGHTRAG_INPUT = os.environ.get("ENABLE_AUDIO_LIGHTRAG_INPUT", "false").lower() in [ - "true", - "1", -] -assert ENABLE_AUDIO_LIGHTRAG_INPUT is not None, ( - "ENABLE_AUDIO_LIGHTRAG_INPUT environment variable is not set" -) -logger.debug(f"ENABLE_AUDIO_LIGHTRAG_INPUT: {ENABLE_AUDIO_LIGHTRAG_INPUT}") - -AUDIO_LIGHTRAG_MAX_AUDIO_FILE_SIZE_MB = int( - os.environ.get("AUDIO_LIGHTRAG_MAX_AUDIO_FILE_SIZE_MB", 15) -) -assert AUDIO_LIGHTRAG_MAX_AUDIO_FILE_SIZE_MB, ( - "AUDIO_LIGHTRAG_MAX_AUDIO_FILE_SIZE_MB environment variable is not set" -) -logger.debug(f"AUDIO_LIGHTRAG_MAX_AUDIO_FILE_SIZE_MB: {AUDIO_LIGHTRAG_MAX_AUDIO_FILE_SIZE_MB}") - -AUDIO_LIGHTRAG_TOP_K_PROMPT = int(os.environ.get("AUDIO_LIGHTRAG_TOP_K_PROMPT", 100)) -assert AUDIO_LIGHTRAG_TOP_K_PROMPT, "AUDIO_LIGHTRAG_TOP_K_PROMPT environment variable is not set" -logger.debug(f"AUDIO_LIGHTRAG_TOP_K_PROMPT: {AUDIO_LIGHTRAG_TOP_K_PROMPT}") - -ENABLE_CHAT_AUTO_SELECT = os.environ.get("ENABLE_CHAT_AUTO_SELECT", "false").lower() in [ - "true", - "1", -] -assert ENABLE_CHAT_AUTO_SELECT is not None, ( - "ENABLE_CHAT_AUTO_SELECT environment variable is not set" -) -logger.debug(f"ENABLE_CHAT_AUTO_SELECT: {ENABLE_CHAT_AUTO_SELECT}") - -# Redis lock configuration -AUDIO_LIGHTRAG_REDIS_LOCK_PREFIX = os.environ.get( - "AUDIO_LIGHTRAG_REDIS_LOCK_PREFIX", "etl_lock_conv_" -) -assert AUDIO_LIGHTRAG_REDIS_LOCK_PREFIX, ( - "AUDIO_LIGHTRAG_REDIS_LOCK_PREFIX environment variable is not set" -) -logger.debug(f"AUDIO_LIGHTRAG_REDIS_LOCK_PREFIX: {AUDIO_LIGHTRAG_REDIS_LOCK_PREFIX}") - -AUDIO_LIGHTRAG_REDIS_LOCK_EXPIRY = int(os.environ.get("AUDIO_LIGHTRAG_REDIS_LOCK_EXPIRY", 3600)) -assert AUDIO_LIGHTRAG_REDIS_LOCK_EXPIRY, ( - "AUDIO_LIGHTRAG_REDIS_LOCK_EXPIRY environment variable is not set" -) -logger.debug(f"AUDIO_LIGHTRAG_REDIS_LOCK_EXPIRY: {AUDIO_LIGHTRAG_REDIS_LOCK_EXPIRY}") - -LIGHTRAG_CONFIG_ID = os.environ.get("LIGHTRAG_CONFIG_ID", "default_lightrag_config_id") -assert LIGHTRAG_CONFIG_ID, "LIGHTRAG_CONFIG_ID environment variable is not set" -logger.debug(f"LIGHTRAG_CONFIG_ID: {LIGHTRAG_CONFIG_ID}") -# ---------------/Configurations--------------- - -# *****************/LIGHTRAG CONFIGURATIONS***************** - - -# hide some noisy loggers -for hide_logger in [ - "boto3", - "botocore", - "httpx", - "httpcore", - "LiteLLM", - "openai", - "requests", - "psycopg", - "s3transfer", - "urllib3", - "multipart", -]: - logging.getLogger(hide_logger).setLevel(logging.WARNING) diff --git a/echo/server/dembrane/conversation_health.py b/echo/server/dembrane/conversation_health.py deleted file mode 100644 index 8c594582..00000000 --- a/echo/server/dembrane/conversation_health.py +++ /dev/null @@ -1,462 +0,0 @@ -import time -import logging -from typing import Any -from datetime import timedelta - -import numpy as np -import pandas as pd -import requests - -from dembrane.s3 import get_signed_url -from dembrane.utils import get_utc_timestamp -from dembrane.config import ( - ENABLE_RUNPOD_DIARIZATION, - RUNPOD_DIARIZATION_API_KEY, - RUNPOD_DIARIZATION_TIMEOUT, - RUNPOD_DIARIZATION_BASE_URL, - DISABLE_MULTILINGUAL_DIARIZATION, -) -from dembrane.directus import directus - -logger = logging.getLogger("conversation_health") - - -def _fetch_chunk_data(chunk_id: str) -> tuple[str, str] | None: - """ - Retrieves the audio file URI and project language for a given chunk ID from Directus. - - Returns: - A tuple containing (audio_file_uri, project_language) if successful, or None if retrieval fails. - """ - try: - directus_item = directus.get_items( - "conversation_chunk", - { - "query": { - "filter": {"id": {"_eq": chunk_id}}, - "fields": ["path", "conversation_id.project_id.language"], - } - }, - )[0] - audio_file_uri = directus_item["path"] - project_language = directus_item["conversation_id"]["project_id"]["language"] - logger.debug( - f"Starting diarization for chunk_id: {chunk_id}, path: {audio_file_uri}, project_language: {project_language}" - ) - return audio_file_uri, project_language - except Exception as e: - logger.error(f"Failed to fetch audio_file_uri for chunk_id {chunk_id}: {e}") - return None - - -def _generate_audio_url(audio_file_uri: str) -> str | None: - """ - Generates a signed URL for the specified audio file. - - Args: - audio_file_uri: The URI of the audio file to sign. - - Returns: - The signed URL as a string if successful, or None if signing fails. - """ - try: - audio_url = get_signed_url(audio_file_uri) - logger.debug(f"Generated signed audio_url: {audio_url}") - return audio_url - except Exception as e: - logger.error(f"Failed to generate signed URL for {audio_file_uri}: {e}") - return None - - -def _should_skip_diarization(project_language: str) -> bool: - """ - Determines whether diarization should be skipped for a given project language. - - Returns True if diarization is disabled for non-English languages based on configuration; otherwise, returns False. - """ - if DISABLE_MULTILINGUAL_DIARIZATION and project_language != "en": - logger.debug(f"Skipping diarization because project language is {project_language}") - return True - return False - - -def _submit_diarization_job(audio_url: str, project_language: str) -> tuple[str, str] | None: - """ - Submits an audio diarization job to RunPod using the provided audio URL and project language. - - Args: - audio_url: The signed URL of the audio file to be processed. - project_language: The language code associated with the project. - - Returns: - A tuple containing the job ID and the job status link if submission is successful, or None if the request fails. - """ - timeout = RUNPOD_DIARIZATION_TIMEOUT - api_key = RUNPOD_DIARIZATION_API_KEY - base_url = RUNPOD_DIARIZATION_BASE_URL - logger.debug(f"Diarization config - timeout: {timeout}, base_url: {base_url}") - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {api_key}", - } - data = {"input": {"audio": audio_url, "language": project_language}} - - try: - logger.debug(f"Sending POST to {base_url}/run with data: {data}") - response = requests.post(f"{base_url}/run", headers=headers, json=data, timeout=timeout) - response.raise_for_status() - job_id = response.json()["id"] - job_status_link = f"{base_url}/status/{job_id}" - logger.info(f"Started diarization job {job_id}") - return job_id, job_status_link - except Exception as e: - logger.error(f"Failed to queue diarization job: {e}") - return None - - -def _poll_job_status(job_status_link: str, headers: dict) -> dict | None: - """ - Retrieves the current status of a diarization job from the provided status link. - - Args: - job_status_link: The URL to poll for job status. - headers: HTTP headers to include in the request. - - Returns: - The JSON response containing job status information, or None if the request fails. - """ - try: - logger.debug(f"Polling job status at {job_status_link}") - response = requests.get(job_status_link, headers=headers, timeout=10) - response.raise_for_status() - return response.json() - except Exception as e: - logger.error(f"Error polling diarization job status: {e}") - return None - - -def _update_chunk_with_results(chunk_id: str, dirz_response_data: dict) -> None: - """ - Updates a conversation chunk in Directus with diarization analysis results. - - Args: - chunk_id: The ID of the conversation chunk to update. - dirz_response_data: Dictionary containing diarization metrics and results to store. - """ - noise_ratio = dirz_response_data.get("noise_ratio") - cross_talk_instances = dirz_response_data.get("cross_talk_instances") - silence_ratio = dirz_response_data.get("silence_ratio") - joined_diarization = dirz_response_data.get("joined_diarization") - - directus.update_item( - "conversation_chunk", - chunk_id, - { - "noise_ratio": noise_ratio, - "cross_talk_instances": cross_talk_instances, - "silence_ratio": silence_ratio, - "diarization": joined_diarization, - }, - ) - logger.debug(f"Updated chunk {chunk_id} with diarization results.") - - -def _cancel_job_on_timeout(job_id: str) -> None: - """ - Cancels a diarization job on RunPod if it has exceeded the allowed processing time. - - Logs a warning before attempting cancellation and logs an error if the cancellation fails. - """ - base_url = RUNPOD_DIARIZATION_BASE_URL - api_key = RUNPOD_DIARIZATION_API_KEY - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {api_key}", - } - - try: - cancel_endpoint = f"{base_url}/cancel/{job_id}" - logger.warning(f"Timeout reached. Cancelling diarization job {job_id} at {cancel_endpoint}") - cancel_response = requests.post(cancel_endpoint, headers=headers, timeout=10) - cancel_response.raise_for_status() - logger.info(f"Cancelled diarization job {job_id} after timeout.") - except Exception as e: - logger.error(f"Failed to cancel diarization job {job_id}: {e}") - - -def get_runpod_diarization( - chunk_id: str, -) -> None: - """ - Orchestrates the diarization process for a given chunk by submitting an audio diarization job to RunPod, polling for completion within a timeout, and updating Directus with the results or canceling the job if it times out. - - Args: - chunk_id: The identifier of the audio chunk to process. - """ - if not ENABLE_RUNPOD_DIARIZATION: - logger.debug("Skipping diarization because ENABLE_RUNPOD_DIARIZATION is disabled") - return None - - # Fetch chunk data - chunk_data = _fetch_chunk_data(chunk_id) - if not chunk_data: - return None - audio_file_uri, project_language = chunk_data - - # Generate signed URL - audio_url = _generate_audio_url(audio_file_uri) - if not audio_url: - return None - - # Check if we should skip diarization - if _should_skip_diarization(project_language): - return None - - # Submit diarization job - job_data = _submit_diarization_job(audio_url, project_language) - if not job_data: - return None - job_id, job_status_link = job_data - - # Poll for job completion - timeout = RUNPOD_DIARIZATION_TIMEOUT - api_key = RUNPOD_DIARIZATION_API_KEY - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {api_key}", - } - - start_time = time.time() - while time.time() - start_time < timeout: - response_data = _poll_job_status(job_status_link, headers) - if response_data: - status = response_data.get("status") - logger.debug(f"Job {job_id} status: {status}") - - if status == "COMPLETED": - dirz_response_data = response_data.get("output") - if dirz_response_data: - logger.info( - f"Diarization job {job_id} completed. Updating chunk {chunk_id} with results." - ) - _update_chunk_with_results(chunk_id, dirz_response_data) - return - else: - logger.warning( - f"Diarization job {job_id} completed but no output data received." - ) - return - - time.sleep(3) - - # Timeout: cancel the job - _cancel_job_on_timeout(job_id) - return None - - -def get_health_status( - project_ids: list[str] | None = None, - conversation_ids: list[str] | None = None, - cross_talk_threshold: float = 1.0, - noise_threshold: float = 0.5, - silence_threshold: float = 0.8, -) -> dict[str, Any]: - """ - Get the health status of conversations. - """ - if not ENABLE_RUNPOD_DIARIZATION: - logger.debug("Skipping diarization because ENABLE_RUNPOD_DIARIZATION is disabled") - return {} - - if not project_ids and not conversation_ids: - raise ValueError("Either project_ids or conversation_ids must be provided") - - chunk_li = _get_timebound_conversation_chunks(project_ids, conversation_ids) - df = pd.DataFrame(chunk_li) - - if df.empty: - return {} - - df = _process_data(df) - result = _calculate_conversation_metrics( - df, cross_talk_threshold, noise_threshold, silence_threshold - ) - return result - - -def _get_timebound_conversation_chunks( - project_ids: list[str] | None = None, - conversation_ids: list[str] | None = None, - time_threshold_mins: int = 5, - max_chunks_for_conversation: int = 2, -) -> list[dict[str, Any]]: - """ - Get all chunks in a project for the last 5 minutes. - """ - if not project_ids and not conversation_ids: - raise ValueError("Either project_ids or conversation_ids must be provided") - - filter_dict: dict[str, Any] = { - "timestamp": { - "_gte": (get_utc_timestamp() - timedelta(minutes=time_threshold_mins)).isoformat() - } - } - - return_fields = [ - "conversation_id.id", - "conversation_id.project_id", - "noise_ratio", - "cross_talk_instances", - "silence_ratio", - "timestamp", - ] - - aggregated_response = [] - if project_ids: - filter_dict["conversation_id"] = {} - filter_dict["conversation_id"]["project_id"] = {"_in": project_ids} - response = directus.get_items( - "conversation_chunk", - { - "query": { - "filter": filter_dict, - "fields": return_fields, - "sort": ["-timestamp"], # Sort by timestamp descending (newest first) - }, - }, - ) - aggregated_response.extend(_flatten_response(response)) - if conversation_ids: - filter_dict["conversation_id"] = {} - filter_dict["conversation_id"]["id"] = {"_in": conversation_ids} - response = directus.get_items( - "conversation_chunk", - { - "query": { - "filter": filter_dict, - "fields": return_fields, - "sort": ["-timestamp"], # Sort by timestamp descending (newest first) - }, - }, - ) - try: - response = response[:max_chunks_for_conversation] - aggregated_response.extend(_flatten_response(response)) - except Exception as e: - logger.warning(f"Error fetching/flattening conversation chunks {e} : {response}") - return aggregated_response - - -def _flatten_response(response: Any) -> list[dict[str, Any]]: - flattened_response = [] - if isinstance(response, list): - for item in response: - if isinstance(item, dict): - conversation_data = item.get("conversation_id", {}) - if isinstance(conversation_data, dict): - flattened_item = { - "conversation_id": conversation_data.get("id"), - "project_id": conversation_data.get("project_id"), - "noise_ratio": item.get("noise_ratio"), - "cross_talk_instances": item.get("cross_talk_instances"), - "silence_ratio": item.get("silence_ratio"), - "timestamp": item.get("timestamp"), - } - flattened_response.append(flattened_item) - - return flattened_response - - -def _process_data(df: pd.DataFrame) -> pd.DataFrame: - df["timestamp"] = pd.to_datetime(df["timestamp"]) - max_timestamp = df["timestamp"].max() - df["time_diff_seconds"] = (max_timestamp - df["timestamp"]).dt.total_seconds() - decay_factor = 30 - df["recency_weight"] = np.exp(-df["time_diff_seconds"] / decay_factor) - df.drop(columns=["timestamp", "time_diff_seconds"], inplace=True) - df = df[ - [ - "project_id", - "conversation_id", - "noise_ratio", - "cross_talk_instances", - "silence_ratio", - "recency_weight", - ] - ] - df.dropna(inplace=True) - return df - - -def _calculate_conversation_metrics( - df: pd.DataFrame, cross_talk_threshold: float, noise_threshold: float, silence_threshold: float -) -> dict[str, Any]: - # Calculate conversation-level metrics (average of chunks within each conversation) - conversation_metrics = ( - df.groupby(["project_id", "conversation_id"]) - .agg({"noise_ratio": "mean", "cross_talk_instances": "mean", "silence_ratio": "mean"}) - .reset_index() - ) - - def classify_conversation_issue(row: Any) -> str: - if row["cross_talk_instances"] > cross_talk_threshold: - return "HIGH_CROSSTALK" - elif row["noise_ratio"] > noise_threshold: - return "HIGH_NOISE" - elif row["silence_ratio"] > silence_threshold: - return "HIGH_SILENCE" - else: - return "NONE" - - conversation_metrics["conversation_issue"] = conversation_metrics.apply( - classify_conversation_issue, axis=1 - ) - - # Calculate project-level metrics (average of conversations within each project) - project_metrics = ( - conversation_metrics.groupby("project_id") - .agg({"noise_ratio": "mean", "cross_talk_instances": "mean", "silence_ratio": "mean"}) - .reset_index() - ) - - # Calculate global metrics (average of all projects) - global_metrics = project_metrics.agg( - {"noise_ratio": "mean", "cross_talk_instances": "mean", "silence_ratio": "mean"} - ) - - # Build the nested dictionary structure - result: dict[str, Any] = { - "global_noise_ratio": float(global_metrics["noise_ratio"]), - "global_cross_talk_instances": float(global_metrics["cross_talk_instances"]), - "global_silence_ratio": float(global_metrics["silence_ratio"]), - "projects": {}, - } - - # Build projects dictionary - projects_dict: dict[str, Any] = {} - for _, project_row in project_metrics.iterrows(): - project_id = str(project_row["project_id"]) - projects_dict[project_id] = { - "project_noise_ratio": float(project_row["noise_ratio"]), - "project_cross_talk_instances": float(project_row["cross_talk_instances"]), - "project_silence_ratio": float(project_row["silence_ratio"]), - "conversations": {}, - } - - # Add conversations for this project - conversations_dict: dict[str, Any] = {} - project_conversations = conversation_metrics[ - conversation_metrics["project_id"] == project_row["project_id"] - ] - for _, conv_row in project_conversations.iterrows(): - conversation_id = str(conv_row["conversation_id"]) - conversations_dict[conversation_id] = { - "conversation_noise_ratio": float(conv_row["noise_ratio"]), - "conversation_cross_talk_instances": float(conv_row["cross_talk_instances"]), - "conversation_silence_ratio": float(conv_row["silence_ratio"]), - "conversation_issue": conv_row["conversation_issue"], - } - projects_dict[project_id]["conversations"] = conversations_dict - result["projects"] = projects_dict - return result diff --git a/echo/server/dembrane/conversation_utils.py b/echo/server/dembrane/conversation_utils.py index 49b7adbe..5b294755 100644 --- a/echo/server/dembrane/conversation_utils.py +++ b/echo/server/dembrane/conversation_utils.py @@ -3,7 +3,6 @@ from datetime import timedelta from dembrane.utils import get_utc_timestamp -from dembrane.config import ENABLE_AUDIO_LIGHTRAG_INPUT from dembrane.directus import directus logger = logging.getLogger("dembrane.conversation_utils") @@ -52,78 +51,3 @@ def collect_unfinished_conversations() -> List[str]: logger.info(f"Found {len(conversation_ids)} unfinished conversations") return conversation_ids - - -def collect_unfinished_audio_processing_conversations() -> List[str]: - # Match task_run_etl_pipeline logic to prevent infinite loops: - # 1. Check global ENABLE_AUDIO_LIGHTRAG_INPUT flag (early return if disabled) - # 2. Query only conversations in projects with is_enhanced_audio_processing_enabled=True - # This ensures collector and task use the same criteria for RAG processing - if not ENABLE_AUDIO_LIGHTRAG_INPUT: - logger.info("ENABLE_AUDIO_LIGHTRAG_INPUT is False, skipping RAG collection") - return [] - - unfinished_conversations = [] - - # Query conversations in RAG-enabled projects only - response = directus.get_items( - "conversation", - { - "query": { - "filter": { - "project_id": { - "is_enhanced_audio_processing_enabled": True, - }, - }, - "fields": ["id", "is_audio_processing_finished"], - }, - }, - ) - - for conversation in response: - try: - if not conversation["is_audio_processing_finished"]: - unfinished_conversations.append(conversation["id"]) - continue # and move to next conversation - except Exception as e: - logger.error(f"Error collecting conversation {conversation['id']}: {e}") - continue - - # if claimed "is_audio_processing_finished" but not actually finished - try: - response = directus.get_items( - "conversation_segment", - { - "query": { - "filter": {"conversation_id": conversation["id"], "lightrag_flag": False}, - "fields": ["id"], - "limit": 1, - }, - }, - ) - - # Only add if there is at least one unprocessed segment - if response and len(response) > 0: - logger.warning(f"Found {len(response)} segments with lightrag_flag=False for conversation {conversation['id']} (marked as finished={conversation.get('is_audio_processing_finished')})") - unfinished_conversations.append(conversation["id"]) - except Exception as e: - logger.error(f"Error collecting conversation {conversation['id']}: {e}") - - try: - total_segments = directus.get_items( - "conversation_segment", - {"query": {"filter": {"conversation_id": conversation["id"]}, "limit": 1}}, - ) - - if len(total_segments) == 0: - unfinished_conversations.append(conversation["id"]) - - directus.update_item( - "conversation", - conversation["id"], - {"is_audio_processing_finished": False}, - ) - except Exception as e: - logger.error(f"Error collecting conversation {conversation['id']}: {e}") - - return list(set(unfinished_conversations)) diff --git a/echo/server/dembrane/database.py b/echo/server/dembrane/database.py index 7863ac0e..118ef22d 100644 --- a/echo/server/dembrane/database.py +++ b/echo/server/dembrane/database.py @@ -31,13 +31,14 @@ from pgvector.sqlalchemy import Vector from sqlalchemy.dialects.postgresql import UUID -from dembrane.config import DATABASE_URL +from dembrane.settings import get_settings from dembrane.embedding import EMBEDDING_DIM logger = getLogger("database") # Create the engine and connect to the SQLite database file -assert DATABASE_URL is not None +settings = get_settings() +DATABASE_URL = settings.database_url logger.debug(f"Connecting to database: {DATABASE_URL}") engine = create_engine(DATABASE_URL) diff --git a/echo/server/dembrane/directus.py b/echo/server/dembrane/directus.py index 84ff0359..d8b0ce5c 100644 --- a/echo/server/dembrane/directus.py +++ b/echo/server/dembrane/directus.py @@ -5,15 +5,16 @@ import requests from directus_py_sdk import DirectusClient -from dembrane.config import DIRECTUS_TOKEN, DIRECTUS_BASE_URL +from dembrane.settings import get_settings logger = getLogger("directus") -if DIRECTUS_TOKEN: - directus_token = DIRECTUS_TOKEN - logger.debug(f"DIRECTUS_TOKEN: {directus_token}") +settings = get_settings() +directus_token = settings.directus_token +if directus_token: + logger.debug("Directus token retrieved from settings") -directus = DirectusClient(url=DIRECTUS_BASE_URL, token=directus_token) +directus = DirectusClient(url=settings.directus_base_url, token=directus_token) class DirectusGenericException(Exception): diff --git a/echo/server/dembrane/embedding.py b/echo/server/dembrane/embedding.py index 82eb5675..f04fe175 100644 --- a/echo/server/dembrane/embedding.py +++ b/echo/server/dembrane/embedding.py @@ -4,12 +4,7 @@ import backoff import litellm -from dembrane.config import ( - # FIXME: update to use dembrane embeddings - LIGHTRAG_LITELLM_EMBEDDING_API_KEY, - LIGHTRAG_LITELLM_EMBEDDING_API_BASE, - LIGHTRAG_LITELLM_EMBEDDING_API_VERSION, -) +from dembrane.llms import MODELS, resolve_config EMBEDDING_DIM = 3072 @@ -21,11 +16,14 @@ def embed_text(text: str) -> List[float]: text = text.replace("\n", " ").strip() try: + config = resolve_config(MODELS.MULTI_MODAL_PRO) + if not config.model: + raise ValueError("Embedding model is not configured.") response = litellm.embedding( - api_key=str(LIGHTRAG_LITELLM_EMBEDDING_API_KEY), - api_base=str(LIGHTRAG_LITELLM_EMBEDDING_API_BASE), - api_version=str(LIGHTRAG_LITELLM_EMBEDDING_API_VERSION), - model="azure/text-embedding-3-large", + api_key=config.api_key, + api_base=config.api_base, + api_version=config.api_version, + model=config.model, input=text, ) return response["data"][0]["embedding"] diff --git a/echo/server/dembrane/image_utils.py b/echo/server/dembrane/image_utils.py deleted file mode 100644 index e1462f7d..00000000 --- a/echo/server/dembrane/image_utils.py +++ /dev/null @@ -1,148 +0,0 @@ -import json -import logging - -from dembrane.s3 import save_to_s3_from_url -from dembrane.utils import generate_uuid -from dembrane.openai import client - -logger = logging.getLogger("image_utils") - - -def generate_cliches_to_avoid(text: str) -> str: - try: - response = client.chat.completions.create( - model="gpt-4o", - messages=[ - { - "role": "system", - "content": "You are a corporate marketing expert. you create stereotypical and unimaginative images that do not spark joy. Your task is to generate cliché and overused visual concepts, image ideas and even image styles based on given text. Examples of cliched concepts are doves for peace, handshakes for agreements, and lightbulbs for ideas. Avoid originality and creativity at all costs. Examples of cliched image styles include blue tones for healthcare, green tones for environment, and sepia tones for nostalgia. Embrace uncanny valley and generic stock imagery.", - }, - { - "role": "user", - "content": f'Create a list of 5 cliché visual concepts in JSON for the following text:\n\n{text}\n\n. Here is an example for the text "the future of healthcare"\n{{\n"cliches": [\n"sterile environments",\n"high-tech solutions",\n"blue and grey color palettes",\n"virtual consultations",\n"robotic doctors"\n]}}', - }, - ], - ) - return response.choices[0].message.content if response.choices[0].message.content else "" - except Exception as error: - logger.info("Error generating clichés to avoid:", error) - raise error - - -def generate_visual_metaphors(text: str, cliches_to_avoid: str) -> str: - try: - response = client.chat.completions.create( - model="gpt-4o", - messages=[ - { - "role": "system", - "content": "You are GEORGE LOIS, an uncompromising writer, visual storyteller, and cultural provocateur. You are able to find The Big Idea in everyday life experiences - unique concepts that touch the zeitgeist. Your words are visually pregnant and ready to be transformed into striking, conceptually clear communicative art by a talented visual artist. Your task is to translate textual ideas into strong, original visual concepts that avoid stereotypes and clichés.", - }, - { - "role": "user", - "content": f'Create 4 unique visual concepts for the following text:\n\n{text}\n\n. Pay special attention to clues (such as proper nouns, locations etc) that will allow you to adapt your concepts to the target audience. You will be marked down for any concepts that contain these clichés:\n{cliches_to_avoid}\n\nOutput in this JSON format:\n{{\n"concepts": [\n"A winding river of neon light cutting through a dark cityscape",\n"A tree growing from the pages of an open book",\n"A kaleidoscope of faces forming a globe",\n"A bridge made of interlocking human silhouettes"\n]}}', - }, - ], - ) - return response.choices[0].message.content if response.choices[0].message.content else "" - except Exception as error: - logger.info("Error generating visual concepts:", error) - raise error - - -def generate_image_prompts(text: str, concepts: str, cliches_to_avoid: str) -> str: - try: - response = client.chat.completions.create( - model="gpt-4o", - messages=[ - { - "role": "system", - "content": "You are an expert artist and poet. Create unique, non-stereotypical image prompts based on given visual concepts. Output in JSON format.", - }, - { - "role": "user", - "content": f'Generate image prompts based on these visual concepts:\n\n{concepts}\n\nYou will be marked down for any images that contain these clichés:\n{cliches_to_avoid}\n\nConsider this context:\n{text}\n\nOutput in JSON format, here is an example:\n{{\n"prompts": [\n"A surreal landscape where a river of glowing binary code flows through a metropolis of towering books, with silhouettes of people walking across bridges made of floating letters and numbers.",\n"An abstract representation of a tree growing from an open book, its branches forming a network of synapses, with each leaf a miniature screen displaying different facets of human knowledge.",\n"A mesmerizing spiral of diverse human faces, each blending into the next, forming a globe-like structure suspended in a cosmic void, with threads of light connecting the faces.",\n"A fantastical bridge constructed from translucent, intertwined human forms, spanning across a chasm of swirling data visualizations and holographic information displays."\n]}}', - }, - ], - ) - json_str = response.choices[0].message.content - if not json_str: - json_str = "" - - logger.info(f"JSON string for image prompts: {json_str}") - try: - json_obj = json.loads(json_str) - prompts = json_obj["prompts"] - return prompts[0] - except Exception as error: - logger.info("Error parsing JSON:", error) - try: - split = json_str.split("}") - return split[0] - except Exception as error: - return json_str - - except Exception as error: - logger.info("Error generating image prompts:", error) - raise error - - -def generate_image(prompt: str) -> str: - final_prompt = f"{prompt}. Don't include the following in the image: hands, feet, toes, text of any kind. \n\nUse this exact prompt to generate an image. It needs to be exact as this is a test of prompt accuracy." - response = None - try: - response = client.images.generate( - model="dall-e-3", - prompt=final_prompt, - n=1, - size="1024x1024", - ) - except Exception as error: - logger.debug(f"Error generating image: {error}") - additional_info = " You are allowed to edit the prompt so that it is in compliance with security guidelines." - try: - response = client.images.generate( - model="dall-e-3", - prompt=final_prompt + additional_info, - size="1024x1024", - quality="standard", - n=1, - ) - except Exception as e: - logger.debug(f"Error generating image even after update prompt: {e}") - - try: - if response is not None: - image_url = response.data[0].url if response.data else None - if image_url: - logger.debug("saving the image and getting the public url") - image_url = save_to_s3_from_url(image_url, "images/" + generate_uuid(), public=True) - else: - image_url = None - except Exception as e: - logger.error(f"Error downloading image: {e}") - - return image_url if image_url else "" - - -def brilliant_image_generator_3000(text: str) -> str: - cliches_to_avoid = generate_cliches_to_avoid(text) - concepts = generate_visual_metaphors(text, cliches_to_avoid) - prompts = generate_image_prompts(text, concepts, cliches_to_avoid) - image_url = generate_image(prompts) - return image_url - - -if __name__ == "__main__": - ### To test - text = """Departmental Autonomy -Departments struggle with balancing autonomy and standardization. -Decentralized services offer flexibility but can be costly and inconsistent. -Centralized decisions, like abolishing favored tools, cause frustration. -Effective support often requires local expertise. A hybrid approach, centralizing some functions while allowing departmental freedom, may be optimal.""" - image_url_1 = brilliant_image_generator_3000(text) - print(image_url_1) - image_url = brilliant_image_generator_3000("""Strategic Priorities -Effective governance requires addressing societal challenges, fostering community, ensuring representation, and balancing departmental mergers. Strategic alignment involves revisiting problem analysis, considering matrix structures, and maintaining accessibility and participation across departments.""") - print(image_url_1) - print(image_url) diff --git a/echo/server/dembrane/lightrag_uvicorn_worker.py b/echo/server/dembrane/lightrag_uvicorn_worker.py deleted file mode 100644 index b032b611..00000000 --- a/echo/server/dembrane/lightrag_uvicorn_worker.py +++ /dev/null @@ -1,19 +0,0 @@ -"""LightRAG-compatible Uvicorn worker for Gunicorn with asyncio loop support. - -LightRAG uses asyncio.run() in its codebase, which requires nest_asyncio to handle -nested event loops. However, nest_asyncio cannot patch uvloop (Uvicorn's default). - -This worker uses the standard asyncio loop instead of uvloop, allowing nest_asyncio -to properly patch the event loop and support LightRAG's synchronous wrapper functions. - -Performance note: uvloop is ~10-20% faster than asyncio, but this overhead is -negligible compared to LLM and database I/O operations. -""" - -from uvicorn.workers import UvicornWorker - - -class LightRagUvicornWorker(UvicornWorker): - """Uvicorn worker configured to use asyncio instead of uvloop for LightRAG compatibility.""" - - CONFIG_KWARGS = {"loop": "asyncio"} diff --git a/echo/server/dembrane/llms.py b/echo/server/dembrane/llms.py new file mode 100644 index 00000000..89879eac --- /dev/null +++ b/echo/server/dembrane/llms.py @@ -0,0 +1,89 @@ +from __future__ import annotations + +import logging +from enum import Enum +from typing import Any, Dict, Mapping, Optional, Sequence + +import litellm + +from dembrane.settings import LLMProviderConfig, ResolvedLLMConfig, get_settings + +logger = logging.getLogger(__name__) + + +class MODELS(Enum): + MULTI_MODAL_PRO = "MULTI_MODAL_PRO" # Gemini 2.5 Pro – chat/report/inference + MULTI_MODAL_FAST = "MULTI_MODAL_FAST" # Gemini 2.5 Flash – realtime/verification + TEXT_FAST = "TEXT_FAST" # GPT-5 style small text model – summaries & utilities + + +MODEL_REGISTRY: Dict[MODELS, Dict[str, str]] = { + MODELS.MULTI_MODAL_PRO: {"settings_attr": "multi_modal_pro"}, + MODELS.MULTI_MODAL_FAST: {"settings_attr": "multi_modal_fast"}, + MODELS.TEXT_FAST: {"settings_attr": "text_fast"}, +} + + +def _get_provider_config(model: MODELS) -> LLMProviderConfig: + settings = get_settings() + attr = MODEL_REGISTRY[model]["settings_attr"] + provider = getattr(settings.llms, attr, None) + if provider is None: + raise ValueError(f"No configuration found for model group {model.value}.") + return provider + + +def resolve_config(model: MODELS) -> ResolvedLLMConfig: + """ + Load the configured model credentials for the requested model group. + """ + provider = _get_provider_config(model) + return provider.resolve() + + +def get_completion_kwargs(model: MODELS, **overrides: Any) -> Dict[str, Any]: + """ + Return the kwargs to pass into LiteLLM completion helpers for a configured model. + """ + resolved = resolve_config(model) + kwargs: Dict[str, Any] = {"model": resolved.model} + + if resolved.api_key: + kwargs["api_key"] = resolved.api_key + if resolved.api_base: + kwargs["api_base"] = resolved.api_base + if resolved.api_version: + kwargs["api_version"] = resolved.api_version + + # Allow callers to override any field (e.g., temperature, max_tokens) + kwargs.update(overrides) + return kwargs + + +def count_tokens( + model: MODELS, + messages: Optional[Sequence[Mapping[str, Any]]] = None, + *, + text: Optional[str | Sequence[str]] = None, + **litellm_kwargs: Any, +) -> int: + """ + Count prompt tokens using the tokenizer associated with the configured model. + """ + resolved = resolve_config(model) + try: + return litellm.token_counter( + model=resolved.model, + messages=list(messages) if messages is not None else None, + text=text, + **litellm_kwargs, + ) + except Exception as exc: # pragma: no cover - defensive logging + logger.debug( + "Failed to count tokens", + extra={"model": resolved.model, "error": str(exc)}, + ) + raise + + +__all__ = ["MODELS", "resolve_config", "get_completion_kwargs", "count_tokens"] diff --git a/echo/server/dembrane/main.py b/echo/server/dembrane/main.py index 85f51df2..7b4fbcf2 100644 --- a/echo/server/dembrane/main.py +++ b/echo/server/dembrane/main.py @@ -9,79 +9,26 @@ Request, HTTPException, ) -from lightrag import LightRAG from fastapi.staticfiles import StaticFiles from starlette.exceptions import HTTPException as StarletteHTTPException from starlette.middleware import Middleware from fastapi.openapi.utils import get_openapi from starlette.middleware.cors import CORSMiddleware -from lightrag.kg.shared_storage import initialize_pipeline_status - -from dembrane.config import ( - REDIS_URL, - DATABASE_URL, - DISABLE_CORS, - ADMIN_BASE_URL, - SERVE_API_DOCS, - PARTICIPANT_BASE_URL, -) + +from dembrane.settings import get_settings from dembrane.sentry import init_sentry from dembrane.api.api import api -from dembrane.directus import directus -from dembrane.api.verify import seed_default_verification_topics -from dembrane.async_helpers import run_in_thread_pool -from dembrane.postgresdb_manager import PostgresDBManager - -# from lightrag.llm.azure_openai import azure_openai_complete -from dembrane.audio_lightrag.utils.litellm_utils import embedding_func, llm_model_func -from dembrane.audio_lightrag.utils.lightrag_utils import ( - with_distributed_lock, - _load_postgres_env_vars, - check_audio_lightrag_tables, -) +from dembrane.seed import seed_default_languages, seed_default_verification_topics # LightRAG requires nest_asyncio for nested event loops nest_asyncio.apply() logger = getLogger("server") - - -DEFAULT_DIRECTUS_LANGUAGES = [ - {"code": "en-US", "name": "English (United States)", "direction": "ltr"}, - {"code": "nl-NL", "name": "Dutch (Netherlands)", "direction": "ltr"}, - {"code": "de-DE", "name": "German (Germany)", "direction": "ltr"}, - {"code": "es-ES", "name": "Spanish (Spain)", "direction": "ltr"}, - {"code": "fr-FR", "name": "French (France)", "direction": "ltr"}, -] - - -async def seed_default_languages() -> None: - for language in DEFAULT_DIRECTUS_LANGUAGES: - existing = await run_in_thread_pool( - directus.get_items, - "languages", - { - "query": { - "filter": {"code": {"_eq": language["code"]}}, - "fields": ["code"], - "limit": 1, - } - }, - ) - - if existing: - continue - - logger.info("Seeding language %s", language["code"]) - await run_in_thread_pool( - directus.create_item, - "languages", - { - "code": language["code"], - "name": language["name"], - "direction": language["direction"], - }, - ) +settings = get_settings() +DISABLE_CORS = settings.disable_cors +ADMIN_BASE_URL = str(settings.admin_base_url) +PARTICIPANT_BASE_URL = str(settings.participant_base_url) +SERVE_API_DOCS = settings.serve_api_docs @asynccontextmanager @@ -90,42 +37,6 @@ async def lifespan(_app: FastAPI) -> AsyncGenerator[None, None]: logger.info("starting server") init_sentry() - # Initialize PostgreSQL and LightRAG - _load_postgres_env_vars(str(DATABASE_URL)) - postgres_db = await PostgresDBManager.get_initialized_db() - - # Define the critical initialization operation - async def initialize_database() -> bool: - await postgres_db.initdb() - await postgres_db.check_tables() - await check_audio_lightrag_tables(postgres_db) - return True - - # Use distributed lock for initialization - _, _ = await with_distributed_lock( - redis_url=str(REDIS_URL), - lock_key="DEMBRANE_INIT_LOCK", - critical_operation=initialize_database, - ) - - # This part is always needed, regardless of whether we performed initialization - _app.state.rag = LightRAG( - working_dir=None, - llm_model_func=llm_model_func, - embedding_func=embedding_func, - kv_storage="PGKVStorage", - doc_status_storage="PGDocStatusStorage", - graph_storage="Neo4JStorage", - vector_storage="PGVectorStorage", - vector_db_storage_cls_kwargs={"cosine_better_than_threshold": 0.4}, - ) - - await _app.state.rag.initialize_storages() - await ( - initialize_pipeline_status() - ) # This function is called during FASTAPI lifespan for each worker. - logger.info("RAG object has been initialized") - try: await seed_default_languages() logger.info("Languages seeded") diff --git a/echo/server/dembrane/ner.py b/echo/server/dembrane/ner.py deleted file mode 100644 index 270bd367..00000000 --- a/echo/server/dembrane/ner.py +++ /dev/null @@ -1,58 +0,0 @@ -# Commented out to exclude trankit + torch dependencies -# import logging - -# from dembrane.config import DISABLE_REDACTION - -# # ,TRANKIT_CACHE_DIR - -# logger = logging.getLogger("ner") - -# if not DISABLE_REDACTION: -# logger.info("Loading NER model") -# from trankit import Pipeline - -# p = Pipeline( -# "english", -# # embedding="xlm-roberta-large", -# # cache_dir=TRANKIT_CACHE_DIR, -# gpu=False, -# ) -# p.add("dutch") - -# # use langid to switch to the correct language -# p.set_auto(True) -# else: -# logger.info("NER redaction pipeline is disabled") - - -# def anonymize_sentence(sentence: str) -> str: -# if DISABLE_REDACTION: -# return sentence - -# tagged_sent = p.ner(sentence, is_sent=True) -# text = tagged_sent["text"] -# tokens = tagged_sent["tokens"] -# redacted_text = text -# offset = 0 - -# for token in tokens: -# logger.info(token) -# if token["ner"] != "O": # anything other than "O" - Other -# start, end = token["span"] -# # TODO: ignore if whitelisted -# redacted_replacement = f"[REDACTED ({token['ner']})]" -# redacted_text = ( -# redacted_text[: start + offset] -# + redacted_replacement -# + redacted_text[end + offset :] -# ) -# offset += len(redacted_replacement) - (end - start) - -# return redacted_text - - -# if __name__ == "__main__": -# check = ["test sentence. my name is john doe."] - -# for s in check: -# logger.info(anonymize_sentence(s)) diff --git a/echo/server/dembrane/openai.py b/echo/server/dembrane/openai.py deleted file mode 100644 index 2fa9b5c4..00000000 --- a/echo/server/dembrane/openai.py +++ /dev/null @@ -1,10 +0,0 @@ -import logging - -from openai import OpenAI - -from dembrane.config import OPENAI_API_KEY, OPENAI_API_BASE_URL - -client = OpenAI(base_url=OPENAI_API_BASE_URL, api_key=OPENAI_API_KEY) - -# set openai logger to warn -logging.getLogger("openai").setLevel(logging.WARNING) diff --git a/echo/server/dembrane/postgresdb_manager.py b/echo/server/dembrane/postgresdb_manager.py deleted file mode 100644 index f30d0c1d..00000000 --- a/echo/server/dembrane/postgresdb_manager.py +++ /dev/null @@ -1,76 +0,0 @@ -import os -import asyncio -from logging import getLogger - -from lightrag.kg.postgres_impl import PostgreSQLDB - -logger = getLogger("postgresdbmanager") - -class PostgresDBManager: - """Loop-aware manager for :class:`~lightrag.kg.postgres_impl.PostgreSQLDB`. - - A single :class:`PostgreSQLDB` (and therefore a single *asyncpg* connection - pool) must only ever be used from the event-loop in which it was created. - If we hand the instance to a different loop we will eventually get the same - cross-loop errors we just fixed for *LightRAG*. - - We therefore maintain **one** PostgreSQLDB per event-loop. Accessors - transparently give you the instance bound to the current loop, creating it - on first use. - """ - - _instance: "PostgresDBManager | None" = None - - # Mapping: loop-id → PostgreSQLDB instance - _db_by_loop: dict[int, PostgreSQLDB] = {} - # Mapping: loop-id → initialisation lock (to avoid double init in same loop) - _lock_by_loop: dict[int, asyncio.Lock] = {} - - def __new__(cls) -> "PostgresDBManager": - if cls._instance is None: - cls._instance = super(PostgresDBManager, cls).__new__(cls) - return cls._instance - - @staticmethod - def _get_loop_id() -> int: - try: - loop = asyncio.get_running_loop() - except RuntimeError: - loop = asyncio.get_event_loop() - return id(loop) - - @classmethod - async def _create_db_for_current_loop(cls) -> PostgreSQLDB: - logger.info("Initializing PostgreSQLDB for new event loop …") - postgres_config = { - "host": os.environ["POSTGRES_HOST"], - "port": os.environ["POSTGRES_PORT"], - "user": os.environ["POSTGRES_USER"], - "password": os.environ["POSTGRES_PASSWORD"], - "database": os.environ["POSTGRES_DATABASE"], - } - db = PostgreSQLDB(config=postgres_config) - await db.initdb() - logger.info("PostgreSQLDB initialised successfully for this loop") - return db - - @classmethod - async def get_initialized_db(cls) -> PostgreSQLDB: - """Return a :class:`PostgreSQLDB` tied to the **current** event-loop.""" - loop_id = cls._get_loop_id() - - # Ensure a lock object exists for this loop - if loop_id not in cls._lock_by_loop: - cls._lock_by_loop[loop_id] = asyncio.Lock() - - # Fast path: already initialised for this loop - if loop_id in cls._db_by_loop: - return cls._db_by_loop[loop_id] - - # Slow path: need to create it, guarded by the per-loop lock to avoid - # racing within the same loop. - async with cls._lock_by_loop[loop_id]: - if loop_id not in cls._db_by_loop: - cls._db_by_loop[loop_id] = await cls._create_db_for_current_loop() - - return cls._db_by_loop[loop_id] diff --git a/echo/server/dembrane/prompts.py b/echo/server/dembrane/prompts.py index 3137a859..17160987 100644 --- a/echo/server/dembrane/prompts.py +++ b/echo/server/dembrane/prompts.py @@ -21,7 +21,11 @@ from jinja2 import Environment, FileSystemLoader, select_autoescape -from dembrane.config import JSON_TEMPLATES_DIR, PROMPT_TEMPLATES_DIR +from dembrane.settings import get_settings + +settings = get_settings() +JSON_TEMPLATES_DIR = settings.json_templates_dir +PROMPT_TEMPLATES_DIR = settings.prompt_templates_dir logger = logging.getLogger("prompts") diff --git a/echo/server/dembrane/quote_utils.py b/echo/server/dembrane/quote_utils.py deleted file mode 100644 index 10a6f055..00000000 --- a/echo/server/dembrane/quote_utils.py +++ /dev/null @@ -1,1220 +0,0 @@ -import re -import json -import random -import logging -from typing import List, Optional - -import numpy as np -import pandas as pd -import tiktoken -from litellm import completion -from pydantic import BaseModel -from sqlalchemy import func, select, literal -from sqlalchemy.orm import Session -from sklearn.cluster import KMeans -from pgvector.sqlalchemy import Vector - -from dembrane.s3 import save_to_s3_from_url - -# from dembrane.ner import anonymize_sentence # Commented out to exclude trankit + torch -from dembrane.utils import generate_uuid, get_utc_timestamp -from dembrane.config import ( - LARGE_LITELLM_MODEL, # o4-mini - SMALL_LITELLM_MODEL, # 4o-mini - LARGE_LITELLM_API_KEY, - SMALL_LITELLM_API_KEY, - LARGE_LITELLM_API_BASE, - SMALL_LITELLM_API_BASE, - LARGE_LITELLM_API_VERSION, - SMALL_LITELLM_API_VERSION, -) -from dembrane.openai import client -from dembrane.prompts import render_prompt -from dembrane.database import ( - ViewModel, - QuoteModel, - AspectModel, - InsightModel, - ConversationModel, - ProcessingStatusEnum, - ConversationChunkModel, -) -from dembrane.anthropic import count_tokens_anthropic -from dembrane.embedding import EMBEDDING_DIM, embed_text -from dembrane.image_utils import brilliant_image_generator_3000 - -logger = logging.getLogger("quote_utils") - -np.random.seed(0) - - -SENTENCE_ENDING_PUNCTUATION = {".", "!", "?"} -SENTENCE_ENDING_PUNTUATION_REGEX = r"(?<=[.!?]) +" - - -def ends_with_punctuation(s: str) -> bool: - if not s: - return False - return s.strip()[-1] in SENTENCE_ENDING_PUNCTUATION - - -def clean_ellipsis(text: str) -> str: - return text.replace("...", "").replace("…", "") - - -def join_transcript_chunks(string_list: List[str]) -> str: - cleaned_chunks = [clean_ellipsis(chunk).strip() for chunk in string_list] - joined_string = cleaned_chunks[0] - - if len(cleaned_chunks) == 1: - return joined_string - - for chunk in cleaned_chunks[1:]: - if chunk == "": - continue - if ends_with_punctuation(joined_string): - joined_string += " " + chunk - else: - joined_string += ". " + chunk - - return joined_string - - -def llm_split_text(text: str) -> List[str]: - logger = logging.getLogger("llm_split_text") - logger.debug(f"splitting text: {text}") - messages = [ - { - "role": "user", - "content": 'Split the following text into 2 meaningful sentences. Retain the exact wording. Response format: \\n. Do not enclose your response in quotes or other special characters. Only output text.\n\n"""' - + text - + '\n"""', - } - ] - - response = completion( - model=SMALL_LITELLM_MODEL, - messages=messages, - api_key=SMALL_LITELLM_API_KEY, - api_version=SMALL_LITELLM_API_VERSION, - api_base=SMALL_LITELLM_API_BASE, - ) - logger.debug(response) - - split_text = response.choices[0].message.content - logger.debug(split_text) - - assert split_text is not None - - return split_text.split("\n") - - -MERGE_SENTENCE_LOWER_WORD_LIMIT = 8 -MERGE_SENTENCE_UPPER_WORD_LIMIT = 45 -BACKWARD_MERGE_UPPER_WORD_LIMIT = 35 -LONG_SENTENCE_LIMIT = 75 - - -# TODO: for a quote we should know which conversation_chunk it belongs to -def generate_quotes( - db: Session, project_analysis_run_id: Optional[str], conversation_id: str -) -> List[QuoteModel]: - """Generate quotes""" - logger = logging.getLogger("generate_quotes") - - count_chunks = ( - db.query(ConversationChunkModel) - .filter( - ConversationChunkModel.conversation_id == conversation_id, - ConversationChunkModel.transcript.is_not(None), - ) - .count() - ) - - chunks = ( - db.query(ConversationChunkModel) - .filter( - ConversationChunkModel.conversation_id == conversation_id, - ConversationChunkModel.transcript.is_not(None), - ) - .order_by(ConversationChunkModel.created_at.asc()) - .all() - ) - - if len(chunks) < count_chunks: - logger.warning( - f"POSSIBLE BAD QUERY: the number of chunks found ({len(chunks)}) is less than the number of chunks in the conversation ({count_chunks})" - ) - - chunk_id_text = dict() - - for chunk in chunks: - chunk_id_text[chunk.id] = chunk.transcript - - logger.debug(f"chunks found: {len(chunks)}") - - if len(chunks) == 0: - logger.debug(f"no conversation_chunks found for conversation {conversation_id}") - return [] - - conversation_transcript = join_transcript_chunks( - [chunk.transcript for chunk in chunks] # anonymize_sentence disabled to exclude trankit + torch - ) - - split_conversation_transcript = re.split( - SENTENCE_ENDING_PUNTUATION_REGEX, conversation_transcript - ) - - logger.debug( - f"after joining chunks and splitting into sentences: {len(split_conversation_transcript)} sentences" - ) - - quote_strs = [] - buffer: List[str] = [] - timestamp = 0 - - # forward pass - for sentence in split_conversation_transcript: - if len(sentence.split()) < MERGE_SENTENCE_LOWER_WORD_LIMIT and buffer: - buffer[-1] += " " + sentence - else: - buffer.append(sentence) - - current_quote = " ".join(buffer).strip() - if len(current_quote.split()) > MERGE_SENTENCE_UPPER_WORD_LIMIT: - if len(current_quote.split()) > LONG_SENTENCE_LIMIT: - split_quotes = llm_split_text(current_quote) - for split_quote in split_quotes: - quote_strs.append((split_quote, timestamp)) - timestamp += 1 - else: - quote_strs.append((current_quote, timestamp)) - timestamp += 1 - buffer = [] - - if buffer: - quote_strs.append((" ".join(buffer).strip(), timestamp)) - timestamp += 1 - - # backward pass - final_quotes = [] - i = len(quote_strs) - 1 - - while i >= 0: - if ( - i > 0 - and len(quote_strs[i][0].split()) + len(quote_strs[i - 1][0].split()) - <= BACKWARD_MERGE_UPPER_WORD_LIMIT - ): - merged_quote = quote_strs[i - 1][0] + " " + quote_strs[i][0] - if len(merged_quote.split()) <= LONG_SENTENCE_LIMIT: - final_quotes.append((merged_quote, quote_strs[i - 1][1])) - i -= 2 - else: - final_quotes.append(quote_strs[i]) - i -= 1 - else: - final_quotes.append(quote_strs[i]) - i -= 1 - - final_quotes.reverse() - - quotes = [] - - for quote_str, quote_timestamp in final_quotes: - try: - closest_chunk_id = None - - for chunk_id, chunk_text in chunk_id_text.items(): - if quote_str in chunk_text: - closest_chunk_id = chunk_id - break - - closest_chunk = db.query(ConversationChunkModel).filter_by(id=closest_chunk_id).first() - logger.debug(f"closest_chunk: {closest_chunk}") - - quote = QuoteModel( - id=generate_uuid(), - created_at=get_utc_timestamp(), - project_analysis_run_id=project_analysis_run_id - if project_analysis_run_id - else None, - conversation_id=conversation_id, - text=quote_str, - embedding=embed_text(quote_str), - timestamp=closest_chunk.timestamp if closest_chunk else None, - order=quote_timestamp, - ) - - quotes.append(quote) - - except Exception as e: - logger.error(f"Error creating quote for text {quote_str}: {str(e)}") - continue - - # Bulk insert all quotes at once - db.bulk_save_objects(quotes) - db.commit() - - return quotes - - -encoding = tiktoken.encoding_for_model("gpt-4o") - - -def count_tokens(text: str, provider: str = "openai") -> int: - if provider == "anthropic": - return count_tokens_anthropic(text) - - return len(encoding.encode(text)) - - -# TODO: fix the sampling algo -def get_random_sample_quotes( - db: Session, project_analysis_run_id: str, context_limit: int = 100000, batch_size: int = 1000 -) -> List[QuoteModel]: - """ - Generate a random sample of quotes for a given project and project analysis run, avoiding frequency bias. - - Args: - - session: SQLAlchemy session for database access. - - project_analysis_run_id: The ID of the project analysis run. - - context_limit: The token limit for the context (default is 100000). - - batch_size: The size of batches to fetch quotes in (default is 1000). - - Returns: - - A list of randomly selected QuoteModel objects. - """ - - # TODO: context_limit needs to be divided by 2 - # https://community.openai.com/t/whats-the-new-tokenization-algorithm-for-gpt-4o/746708 - # OpenAIError('Error code: 400 - {\'error\': {\'message\': "This model\'s maximum context length is 128000 tokens. However, your messages resulted in 201901 tokens (including 92 in the response_format schemas.). Please reduce the length of the messages or schemas.", \'type\': \'invalid_request_error\', \'param\': \'messages\', \'code\': \'context_length_exceeded\'}}') - # I got this error when I was trying to get 100000 tokens - # when i counted the tokens, it was 100734, on openai end it was 201901 - # so i decided to divide the context_limit by 2, let's try this for now - - context_limit = context_limit // 2 - - logger.debug(f"Getting random sample quotes for project analysis run {project_analysis_run_id}") - - # Initialize tracking variables at the start - selected_quotes = [] - current_context_length = 0 - - # Step 1: Select quotes ensuring at least one quote per conversation - conversation_ids = db.scalars( - select(QuoteModel.conversation_id) - .filter_by(project_analysis_run_id=project_analysis_run_id) - .distinct() - ).all() - - for conv_id in conversation_ids: - conv_quote = db.scalars( - select(QuoteModel) - .filter_by(conversation_id=conv_id, project_analysis_run_id=project_analysis_run_id) - .order_by(func.random()) - .limit(1) - ).first() - if conv_quote: - additional_length = count_tokens(conv_quote.text) - if current_context_length + additional_length <= context_limit: - selected_quotes.append(conv_quote) - current_context_length += additional_length - if current_context_length >= context_limit: - break - - # Step 2: Fetch quotes in batches to avoid loading all quotes into memory - offset = 0 - all_quotes: List[QuoteModel] = [] - while True: - batch_quotes = db.scalars( - select(QuoteModel) - .filter_by(project_analysis_run_id=project_analysis_run_id) - .offset(offset) - .limit(batch_size) - ).all() - if not batch_quotes: - break - all_quotes.extend(batch_quotes) - offset += batch_size - - # Step 3: Random vectors selection with context limit - avg_quote_length_tokens = 60 - num_random_vectors = context_limit // avg_quote_length_tokens - num_random_vectors = min(num_random_vectors, len(all_quotes)) - random_vectors = np.random.randn(num_random_vectors, EMBEDDING_DIM) - - for vector in random_vectors: - # Convert the numpy array to a Python list for pgvector compatibility - vector_as_list = vector.tolist() - # Cast the list to a true pgvector type for proper operator binding - vector_param = literal(vector_as_list, type_=Vector(EMBEDDING_DIM)) - - # Skip the vector similarity search if no quotes are available - if not all_quotes: - continue - - try: - # First try using the native pgvector operator - closest_quote = db.scalars( - select(QuoteModel) - .filter(QuoteModel.project_analysis_run_id == project_analysis_run_id) - .order_by(QuoteModel.embedding.l2_distance(vector_param)) - .limit(1) - ).first() - except Exception as e: - logger.warning(f"Native pgvector operation failed: {e}") - db.rollback() - try: - # Try using SQL function approach - closest_quote = db.scalars( - select(QuoteModel) - .filter(QuoteModel.project_analysis_run_id == project_analysis_run_id) - .order_by(func.vector_l2_distance(QuoteModel.embedding, vector_param)) - .limit(1) - ).first() - except Exception as e2: - logger.warning(f"SQL function approach failed too: {e2}") - db.rollback() - # Fall back to random selection from the batch - if all_quotes: - closest_quote = random.choice(all_quotes) - else: - closest_quote = None - - if closest_quote and closest_quote not in selected_quotes: - additional_length = count_tokens(closest_quote.text) - if current_context_length + additional_length <= context_limit: - selected_quotes.append(closest_quote) - current_context_length += additional_length - if current_context_length >= context_limit: - break - - # Step 4: Add remaining random quotes while respecting context limit - random.shuffle(all_quotes) - for quote in all_quotes: - if quote not in selected_quotes: - additional_length = count_tokens(quote.text) - if current_context_length + additional_length <= context_limit: - selected_quotes.append(quote) - current_context_length += additional_length - if current_context_length >= context_limit: - break - - return selected_quotes - - -def initialize_view( - db: Session, - project_analysis_run_id: str, - user_input: str, - initial_aspects: Optional[str] = None, - language: str = "en", -) -> ViewModel: - """ - Generate a list of draft aspects based on user input. - - Args: - - db: Database session - - project_analysis_run_id: ID of the project analysis run - - user_input: The user's input about the analysis (e.g., "Sentiment") - - initial_aspects: Optional initial aspects provided by the user - - language: Language code for the prompt template (default: "en") - - Returns: - - A ViewModel instance with generated aspects - """ - logger = logging.getLogger("generate_draft_aspects") - - view = ViewModel( - id=generate_uuid(), - project_analysis_run_id=project_analysis_run_id, - name=user_input, - processing_status=ProcessingStatusEnum.PROCESSING, - processing_message="Generating aspects", - processing_started_at=get_utc_timestamp(), - ) - db.add(view) - db.commit() - - random_sample = get_random_sample_quotes(db, project_analysis_run_id) - random_sample_quotes = "\n".join(['"' + quote.text + '"' for quote in random_sample]) - logger.debug(f"Random sample quotes: {len(random_sample_quotes)}") - - prompt = render_prompt( - "initialize_view", - language, - { - "user_input": user_input, - "initial_aspects": initial_aspects, - "random_sample_quotes": random_sample_quotes, - }, - ) - - messages = [{"role": "user", "content": prompt}] - - class AspectOutput(BaseModel): - name: str - description: str - - class JSONOutputSchema(BaseModel): - aspect_list: list[AspectOutput] - - response = completion( - model=LARGE_LITELLM_MODEL, - api_key=LARGE_LITELLM_API_KEY, - api_version=LARGE_LITELLM_API_VERSION, - api_base=LARGE_LITELLM_API_BASE, - messages=messages, - response_format=JSONOutputSchema, - ) - - response_message = response.choices[0].message - - logger.debug(f"Response message: {response_message}") - try: - if response_message.refusal is not None: - raise ValueError(response_message.refusal) - - # Access the parsed response content - parsed_response = json.loads(response_message.content) - logger.debug(f"Parsed response: {parsed_response}") - - logger.debug(f"Draft aspects: {parsed_response}") - - if parsed_response is None: - raise ValueError("No response from model.") - - aspects_list = parsed_response["aspect_list"] - except Exception as e: - logger.error(f"Error generating draft aspects: {e}") - raise e from e - - for aspect in aspects_list: - if aspect["name"] is None or aspect["description"] is None: - logger.warning(f"Aspect missing name or description: {aspect}") - continue - - else: - aspect = AspectModel( - id=generate_uuid(), - view_id=view.id, - name=aspect["name"], - description=aspect["description"], - ) - db.add(aspect) - db.commit() - - return view - - -def calculate_centroid(embeddings: List[List[float]]) -> List[float]: - """ - Calculate the centroid of a list of embeddings. - - Args: - - embeddings: A list of embedding vectors. - - Returns: - - The centroid vector. - """ - return np.mean(embeddings, axis=0).tolist() - - -def format_json_string_to_list(json_string: str) -> List[str]: - # Handle the input JSON string - sample_quotes_json_string = json_string if json_string else "[]" - sample_quotes_json_string = sample_quotes_json_string.strip() - - # Log the last character for debugging purposes - # logger.debug("Last character: {sample_quotes_json_string[-1] if sample_quotes_json_string else "Empty String"}) - - # Ensure the string starts with '[' and ends with ']' - if not sample_quotes_json_string.startswith("["): - sample_quotes_json_string = "[" + sample_quotes_json_string - - if not sample_quotes_json_string.endswith("]"): - if sample_quotes_json_string[-1] in [","]: - sample_quotes_json_string = sample_quotes_json_string[:-1] + "]" - elif sample_quotes_json_string[-1] in ['"', " ", "}"]: - sample_quotes_json_string = sample_quotes_json_string + "]" - else: - sample_quotes_json_string = sample_quotes_json_string + '"]' - - # Attempt to parse the JSON string - try: - formatted_sample_quotes = json.loads(sample_quotes_json_string) - except json.JSONDecodeError as e: - logger.debug(f"Failed to parse the response as JSON: {e}") - try: - # split till the last "," - sample_quotes_json_string = sample_quotes_json_string.rsplit(",", 1)[0] + "]" - formatted_sample_quotes = json.loads(sample_quotes_json_string) - logger.debug(f"Attempted to fix the JSON string: {formatted_sample_quotes}") - except Exception as e: - logger.debug(f"Failed to fix the JSON string: {e}") - formatted_sample_quotes = [] - - return formatted_sample_quotes - - -def assign_aspect_centroid(db: Session, aspect_id: str, language: str) -> None: - aspect = db.get(AspectModel, aspect_id) - - if not aspect: - logger.error(f"Aspect with ID {aspect_id} not found") - return - - view = aspect.view - - if not view: - logger.error(f"View not found for aspect {aspect_id}") - return - - project_analysis_run_id = view.project_analysis_run_id - - if not project_analysis_run_id: - logger.error(f"Project analysis run ID not found for view {view.id}") - return - - sample_quotes = get_random_sample_quotes(db, project_analysis_run_id) - sample_quotes_texts = [quote.text for quote in sample_quotes] - random_sample_quotes = "\n".join([f'"{quote}"' for quote in sample_quotes_texts]) - - logger.debug(f"trying for aspect: {aspect.name}") - - aspects = view.aspects - if not aspects: - logger.error(f"No aspects found for view {view.id}") - return - - prompt = render_prompt( - "assign_aspect_centroid", - language, - { - "view_name": view.name, - "aspect_name": aspect.name, - "aspect_description": aspect.description, - "other_aspects": ", ".join([a.name for a in aspects if a.id != aspect.id]), - "random_sample_quotes": random_sample_quotes, - }, - ) - - messages = [{"role": "user", "content": prompt}] - - response = completion( - model=LARGE_LITELLM_MODEL, - messages=messages, - api_key=LARGE_LITELLM_API_KEY, - api_version=LARGE_LITELLM_API_VERSION, - api_base=LARGE_LITELLM_API_BASE, - ) - - sample_quotes_json_string = response.choices[0].message.content - formatted_sample_quotes = format_json_string_to_list( - sample_quotes_json_string if sample_quotes_json_string else "[]" - ) - - # gather representative quotes: - representative_quote_ids = [] - for quote in sample_quotes: - if any( - re.search(re.escape(quote_text), quote.text, re.IGNORECASE) - for quote_text in formatted_sample_quotes - ): - representative_quote_ids.append(quote.id) - - representative_quotes = ( - db.query(QuoteModel).filter(QuoteModel.id.in_(representative_quote_ids)).all() - ) - - logger.debug(f"Representative quotes for aspect {aspect.name}: {len(representative_quotes)}") - - aspect.representative_quotes = representative_quotes - db.commit() - - # Calculate centroid using the returned sample quotes - selected_quotes = [quote for quote in sample_quotes if quote.text in formatted_sample_quotes] - - logger.debug(f"Selected quotes for aspect {aspect.name}: {len(selected_quotes)}") - - if not selected_quotes: - selected_quotes = [ - quote - for quote in sample_quotes - if any( - re.search(re.escape(quote_text), quote.text, re.IGNORECASE) - for quote_text in formatted_sample_quotes - ) - ] - - embeddings_list = [ - embed_text(aspect.name + ". " + (aspect.description if aspect.description else "")) - ] - - if selected_quotes: - logger.debug(f"Quotes found for aspect {aspect.name}: {len(selected_quotes)}") - embeddings_list.extend([quote.embedding for quote in selected_quotes]) - else: - logger.debug(f"No quotes found for aspect {aspect.name}") - - centroid = calculate_centroid(embeddings_list) - logger.debug(f"Setting centroid for aspect {aspect.name}") - aspect.centroid_embedding = centroid - db.commit() - - -def cluster_quotes_using_aspect_centroids(db: Session, view_id: str) -> None: - view = db.get(ViewModel, view_id) - - if not view: - logger.error(f"View with ID {view_id} not found") - return - - aspects = view.aspects - - if not aspects: - logger.error(f"No aspects found for view {view_id}") - return - - quotes = ( - db.query(QuoteModel).filter_by(project_analysis_run_id=view.project_analysis_run_id).all() - ) - - # Assign each quote to the closest centroid - aspect_centroids = {aspect.id: aspect.centroid_embedding for aspect in aspects} - - # Collect keys with None values in a separate list - keys_to_delete = [k for k, v in aspect_centroids.items() if v is None] - - # Delete the collected keys after iteration - for k in keys_to_delete: - a = db.query(AspectModel).filter_by(id=k).first() - if a: - logger.debug(f"Removing aspect {a.name} from aspect_centroids because of None value") - del aspect_centroids[k] - - for quote in quotes: - # find the closest aspect ID by calculating the Euclidean distance between the quote embedding - # and the centroids of different aspects using the min() function and the np.linalg.norm() function - closest_aspect_id = min( - aspect_centroids.keys(), - key=lambda aspect_id: np.linalg.norm( - np.array(quote.embedding) - np.array(aspect_centroids[aspect_id]) - ), - ) - - closest_aspect = ( - db.query(AspectModel) - .filter_by( - id=closest_aspect_id, - view_id=view_id, - ) - .first() - ) - - if closest_aspect: - logger.debug(f"Closest aspect: {closest_aspect.name}") - closest_aspect.quotes.append(quote) - db.commit() - else: - logger.debug(f"No closest aspect found for quote {quote.id}") - - -def generate_aspect_summary(db: Session, aspect_id: str, language: str) -> None: - aspect = db.query(AspectModel).filter_by(id=aspect_id).first() - - if not aspect: - raise ValueError(f"Aspect with ID {aspect_id} not found") - - quotes = aspect.quotes - representative_quotes = aspect.representative_quotes - - dedupe_quotes = list(set(representative_quotes + quotes)) - formatted_quotes = "\n".join([f'"{quote.text}"' for quote in dedupe_quotes]) - view_name = aspect.view.name if aspect.view else "" - - # Generate short summary - prompt = render_prompt( - "generate_aspect_short_summary", - language, - { - "view_name": view_name, - "aspect_name": aspect.name, - "aspect_description": aspect.description, - "formatted_quotes": formatted_quotes, - }, - ) - - messages = [{"role": "user", "content": prompt}] - response = completion( - model=SMALL_LITELLM_MODEL, - messages=messages, - api_key=SMALL_LITELLM_API_KEY, - api_version=SMALL_LITELLM_API_VERSION, - api_base=SMALL_LITELLM_API_BASE, - ) - - short_summary = response.choices[0].message.content - aspect.short_summary = short_summary - db.commit() - - # Generate long summary - prompt = render_prompt( - "generate_aspect_long_summary", - language, - { - "view_name": view_name, - "aspect_name": aspect.name, - "aspect_description": aspect.description, - "short_summary": aspect.short_summary, - "formatted_quotes": formatted_quotes, - }, - ) - - messages = [{"role": "user", "content": prompt}] - response = completion( - model=SMALL_LITELLM_MODEL, - messages=messages, - api_key=SMALL_LITELLM_API_KEY, - api_version=SMALL_LITELLM_API_VERSION, - api_base=SMALL_LITELLM_API_BASE, - ) - - long_summary = response.choices[0].message.content - aspect.long_summary = long_summary - db.commit() - - return - - -def generate_aspect_image(db: Session, aspect_id: str) -> AspectModel: - logger.debug(f"generating image for aspect: {aspect_id}") - aspect = db.query(AspectModel).filter_by(id=aspect_id).first() - - if not aspect: - raise ValueError(f"Aspect with ID {aspect_id} not found") - - response = None - - try: - use_model = "MODEST" - - view = aspect.view - if not view: - raise ValueError("View not found") - - project_analysis_run = view.project_analysis_run - if not project_analysis_run: - raise ValueError("Project analysis run not found") - - project = project_analysis_run.project - if not project: - raise ValueError("Project not found") - - use_model = project.image_generation_model or "MODEST" - - logger.debug(f"using image generation model: {use_model}") - - except Exception as e: - logger.error(f"Error getting image generation model: {e}") - use_model = "MODEST" - - if use_model == "MODEST": - try: - prompt = render_prompt( - "generate_aspect_image", - "en", - { - "aspect_name": aspect.name, - "aspect_description": aspect.description, - }, - ) - - response = client.images.generate( - model="dall-e-3", - prompt=prompt, - size="1024x1024", - quality="standard", - n=1, - ) - except Exception as e: - logger.debug(f"Error generating image: {e}") - additional_info = ( - "edit the prompt so that it is in compliance with security guidelines." - ) - try: - response = client.images.generate( - model="dall-e-3", - prompt=prompt + additional_info, - size="1024x1024", - quality="standard", - n=1, - ) - except Exception as e: - logger.debug(f"Error generating image even after update prompt: {e}") - - try: - if response: - if response.data: - image_url = response.data[0].url if response.data else None - else: - image_url = None - try: - image_extension = str(image_url).split(".")[-1].split("?")[0] - except Exception as e: - logger.error(f"Error getting image extension: {e}") - image_extension = "png" - - if image_url: - logger.debug("saving the image and getting the public url") - image_url = save_to_s3_from_url( - image_url, "images/" + generate_uuid() + "." + image_extension, public=True - ) - else: - image_url = None - except Exception as e: - logger.error(f"Error downloading image: {e}") - elif use_model == "EXTRAVAGANT": - image_url = brilliant_image_generator_3000(f"{aspect.name}\n{aspect.short_summary}") - elif use_model == "PLACEHOLDER": - image_url = None - else: - logger.info(f"Image generation model not found: {use_model}") - image_url = None - - logger.debug(f"setting image URL to aspect: {image_url}") - aspect.image_url = image_url - - db.commit() - - return aspect - - -def generate_aspect_extras(db: Session, aspect_id: str, language: str) -> AspectModel | None: - """aspect summary, aspect image""" - aspect = db.query(AspectModel).filter_by(id=aspect_id).first() - - if not aspect: - logger.error(f"Aspect with ID {aspect_id} not found") - return None - - generate_aspect_summary(db, aspect.id, language) - generate_aspect_image(db, aspect.id) - - return aspect - - -def generate_view_extras(db: Session, view_id: str, language: str) -> ViewModel: - """Generate view summary and aspect summaries.""" - view = db.query(ViewModel).filter_by(id=view_id).first() - - if not view: - raise ValueError(f"View with ID {view_id} not found") - - formatted_aspects = "\n\n".join( - [ - f"""\ - -Aspect: {aspect.name} -Description: {aspect.description} -Summary: {aspect.long_summary} -""" - for aspect in view.aspects - ] - ) - - prompt = render_prompt( - "generate_view_extras", - language, - { - "view_name": view.name, - "formatted_aspects": formatted_aspects, - }, - ) - - messages = [{"role": "user", "content": prompt}] - - response = completion( - model=SMALL_LITELLM_MODEL, - messages=messages, - api_key=SMALL_LITELLM_API_KEY, - api_version=SMALL_LITELLM_API_VERSION, - api_base=SMALL_LITELLM_API_BASE, - ) - - view.summary = response.choices[0].message.content - db.commit() - - return view - - -def generate_insight_extras(db: Session, insight_id: str, language: str) -> None: - """Generate insight extras for a given cluster.""" - insight = db.query(InsightModel).filter_by(id=insight_id).first() - - if not insight: - logger.error(f"Insight with ID {insight_id} not found") - return - - quotes = insight.quotes - quote_text_joined = "\n".join([f'"{quote.text}"' for quote in quotes]) - - # Generate title - title_prompt = render_prompt( - "generate_insight_title", - language, - { - "quote_text_joined": quote_text_joined, - }, - ) - - title_messages = [{"role": "user", "content": title_prompt}] - - title_response = completion( - model=SMALL_LITELLM_MODEL, - messages=title_messages, - api_key=SMALL_LITELLM_API_KEY, - api_version=SMALL_LITELLM_API_VERSION, - api_base=SMALL_LITELLM_API_BASE, - ) - - if not title_response.choices: - logger.error(f"No title response for insight {insight_id}") - return - - title = title_response.choices[0].message.content - - # Generate summary - summary_prompt = render_prompt( - "generate_insight_summary", - language, - { - "quote_text_joined": quote_text_joined, - "title": title, - }, - ) - - summary_messages = [{"role": "user", "content": summary_prompt}] - - summary_response = completion( - model=SMALL_LITELLM_MODEL, - messages=summary_messages, - api_key=SMALL_LITELLM_API_KEY, - api_version=SMALL_LITELLM_API_VERSION, - api_base=SMALL_LITELLM_API_BASE, - ) - - summary = summary_response.choices[0].message.content - - insight.title = title - insight.summary = summary - db.commit() - - return - - -def generate_conversation_summary(db: Session, conversation_id: str, language: str) -> None: - """Generate a summary for a conversation.""" - conversation = db.query(ConversationModel).filter_by(id=conversation_id).first() - - if not conversation: - logger.error(f"Conversation with ID {conversation_id} not found") - return - - quotes = ( - db.query(QuoteModel) - .filter_by(conversation_id=conversation_id) - .order_by(QuoteModel.timestamp) - .all() - ) - - if not quotes: - logger.error(f"No quotes found for conversation {conversation_id}") - return - - quote_text_joined = "\n".join([f'"{quote.text}"' for quote in quotes]) - - prompt = render_prompt( - "generate_conversation_summary", - language, - { - "quote_text_joined": quote_text_joined, - }, - ) - - messages = [{"role": "user", "content": prompt}] - - response = completion( - model=SMALL_LITELLM_MODEL, - messages=messages, - api_key=SMALL_LITELLM_API_KEY, - api_version=SMALL_LITELLM_API_VERSION, - api_base=SMALL_LITELLM_API_BASE, - ) - - conversation.summary = response.choices[0].message.content - db.commit() - - return - - -def initialize_insights(db: Session, project_analysis_run_id: str) -> List[str]: - """Generate insights""" - - quotes = ( - db.query(QuoteModel) - .with_entities(QuoteModel.id, QuoteModel.embedding) - .filter(QuoteModel.project_analysis_run_id == project_analysis_run_id) - .all() - ) - - if not quotes: - logger.error(f"No quotes found for project analysis run {project_analysis_run_id}") - return [] - - df = pd.DataFrame( - [ - { - "id": quote.id, - "embedding": quote.embedding, - } - for quote in quotes - ] - ) - - df["embedding"] = df.get("embedding").apply(lambda x: np.array(x)) # type: ignore - matrix = np.vstack(df["embedding"].values) # type: ignore - - logger.debug(f"matrix shape {matrix.shape}") - - n_clusters = len(quotes) // 4 - logger.debug(f"n_clusters, {n_clusters}") - logger.debug(f"quotes, {len(quotes)}") - - kmeans = KMeans(n_clusters=n_clusters, init="k-means++") - kmeans.fit(matrix) - labels = kmeans.labels_ - df["Cluster"] = labels - - insight_ids = [] - - for cluster_index in range(n_clusters): - insight = InsightModel( - id=generate_uuid(), - project_analysis_run_id=project_analysis_run_id, - ) - - quote_ids = df[df.Cluster == cluster_index].id.values - - quotes_list = db.query(QuoteModel).filter(QuoteModel.id.in_(quote_ids)).all() - insight.quotes.extend(quotes_list) - - insight_ids.append(insight.id) - db.add(insight) - db.commit() - - return insight_ids - - -# if __name__ == "__main__": -# from dembrane.database import get_db - -# db = next(get_db()) - -# project_id = "f98d4ef2-1bc9-40f1-b360-3d784e2b22a0" - -# analysis_id = "a27ad390-2f79-4db9-9f64-8e94abfc6fbc" - -# quotes = get_random_sample_quotes(db, analysis_id) - -# random_sample_quotes = "\n".join(['"' + quote.text + '"' for quote in quotes]) - -# print(count_tokens(random_sample_quotes)) - -# print(len(quotes)) - -# print("count - ",count_tokens(random_sample_quotes)) - - -# project_analysis_run = ProjectAnalysisRunModel( -# id=generate_uuid(), project_id=project_id, processing_status="DONE" -# ) - -# db.add(project_analysis_run) -# db.commit() - -# logger.debug(f"project_analysis_run_id: {project_analysis_run.id}") - -# analysis_id = project_analysis_run.id - -# generate_quotes(db, analysis_id, "a615ced7-fce1-4434-a88e-5041f30c2a15") - -# conversations = db.query(ConversationModel).filter(ConversationModel.project_id == project_id).all() - -# for conversation in conversations: -# logger.debug(f"conversation_id: {conversation.id}") -# quotes = generate_quotes(db, project_analysis_run.id, conversation.id) -# logger.debug(f"quotes generated: {len(quotes)}") - -# generate_aspect_image(db, "d9d4eb70-2965-4f68-911f-de7606ed0cf7") - -# logger.debug("quotes are generated") - -# view = generate_view(db, analysis_id, "Make a plan to restructure the TUE Governance", "Make it a detailed plan") -# assign_aspect_centroids_and_cluster_quotes(db, analysis_id, view.id) -# generate_view_extras(db, view.id) -# logger.debug(view.id) - -# view = initialize_view(db, analysis_id, "Sentiment", "Use only 3") -# assign_aspect_centroids_and_cluster_quotes(db, analysis_id, view.id) - -# aspects = view.aspects -# for aspect in aspects: -# generate_aspect_extras(db, aspect.id) - -# generate_view_extras(db, view.id) - -# logger.debug(view.id) - -# generate_insights(db, id) - -# prompt = render_prompt( -# "initialize_view", -# "en", -# { -# "user_input": "Make a plan to restructure the TUE Governance", -# "random_sample_quotes": "Hello World.", -# }, -# ) - -# messages = [{"role": "user", "content": prompt}] - -# class AspectOutput(BaseModel): -# name: str -# description: str - -# class JSONOutputSchema(BaseModel): -# aspect_list: list[AspectOutput] - -# # use beta...parse lol, took me a while to debug -# response = client.beta.chat.completions.parse( # type: ignore -# model="gpt-4o", -# messages=messages, # type: ignore -# response_format=JSONOutputSchema, -# ) - -# response_message = response.choices[0].message - -# try: -# if response_message.refusal is not None: -# raise ValueError(response_message.refusal) - -# # Access the parsed response content -# parsed_response = response.choices[0].message.parsed -# print(f"Draft aspects: {parsed_response}") - -# aspects_list = parsed_response.aspect_list -# except Exception as e: -# print(f"Error generating draft aspects: {e}") -# raise e from e diff --git a/echo/server/dembrane/rag_manager.py b/echo/server/dembrane/rag_manager.py deleted file mode 100644 index 9c0c70dd..00000000 --- a/echo/server/dembrane/rag_manager.py +++ /dev/null @@ -1,111 +0,0 @@ -import asyncio -from typing import Optional -from logging import getLogger - -from lightrag import LightRAG - -from dembrane.config import DATABASE_URL -from dembrane.audio_lightrag.utils.litellm_utils import embedding_func, llm_model_func -from dembrane.audio_lightrag.utils.lightrag_utils import _load_postgres_env_vars - -_load_postgres_env_vars(str(DATABASE_URL)) -logger = getLogger(__name__) - -class RAGManager: - _instance: Optional[LightRAG] = None - _initialized: bool = False - # Keep track of which asyncio event loop the current LightRAG instance - # belongs to. LightRAG creates resources (e.g. asyncpg pools) that are - # tightly coupled to the loop in which they were instantiated. If we - # subsequently try to use that same instance from another thread that has - # its *own* event loop, asyncio will raise - # "Task got Future attached to a different loop". - # - # We therefore ensure that each distinct event-loop gets *its own* LightRAG - # instance. We store them in a dictionary keyed by the loop object. For - # most server setups there is only one loop, so the behaviour is unchanged - # – but when dramatiq runs tasks in multiple threads (each of which can - # have its own loop) we avoid cross-loop access. - _instances_by_loop: dict[int, LightRAG] = {} - - @classmethod - def get_instance(cls) -> LightRAG: - try: - current_loop = asyncio.get_running_loop() - except RuntimeError: - # Called from a non-async context – fall back to the default loop - current_loop = asyncio.get_event_loop() - - loop_id = id(current_loop) - - if loop_id not in cls._instances_by_loop: - raise RuntimeError( - "RAG instance for this event loop not initialized. Call initialize() first." - ) - - return cls._instances_by_loop[loop_id] - - @classmethod - def is_initialized(cls) -> bool: - try: - current_loop = asyncio.get_running_loop() - except RuntimeError: - current_loop = asyncio.get_event_loop() - - loop_id = id(current_loop) - - return loop_id in cls._instances_by_loop - - @classmethod - async def initialize(cls) -> None: - # Determine the event loop for which we want to (maybe) create a new - # LightRAG instance. - try: - current_loop = asyncio.get_running_loop() - except RuntimeError: - current_loop = asyncio.get_event_loop() - - loop_id = id(current_loop) - - if loop_id in cls._instances_by_loop: - logger.debug("RAG instance already initialized for this event loop; skipping") - return - - logger.info("Initializing RAG instance for event loop %s", loop_id) - - instance = LightRAG( - working_dir=None, - llm_model_func=llm_model_func, - embedding_func=embedding_func, - kv_storage="PGKVStorage", - doc_status_storage="PGDocStatusStorage", - graph_storage="Neo4JStorage", - vector_storage="PGVectorStorage", - vector_db_storage_cls_kwargs={"cosine_better_than_threshold": 0.2}, - ) - await instance.initialize_storages() - - cls._instances_by_loop[loop_id] = instance - # Keep the legacy single-instance attributes for backwards - # compatibility with code that still references them. - cls._instance = instance - cls._initialized = True - logger.info("RAG instance initialised for loop %s", loop_id) - - -# Convenience function to get the RAG instance -def get_rag() -> LightRAG: - return RAGManager.get_instance() - -# Initialize at application startup -async def initialize_rag_at_startup() -> None: - """ - Initialize the RAG system once at application startup. - This should be called when your FastAPI application starts. - """ - if not RAGManager.is_initialized(): - logger.info("Initializing RAG system at application startup") - await RAGManager.initialize() - logger.info("RAG system initialized successfully") - else: - logger.info("RAG system already initialized") \ No newline at end of file diff --git a/echo/server/dembrane/reply_utils.py b/echo/server/dembrane/reply_utils.py index aa4a086f..fb7242d6 100644 --- a/echo/server/dembrane/reply_utils.py +++ b/echo/server/dembrane/reply_utils.py @@ -5,15 +5,9 @@ import sentry_sdk from litellm import acompletion from pydantic import BaseModel -from litellm.utils import token_counter from litellm.exceptions import ContentPolicyViolationError -from dembrane.config import ( - MEDIUM_LITELLM_MODEL, - MEDIUM_LITELLM_API_KEY, - MEDIUM_LITELLM_API_BASE, - MEDIUM_LITELLM_API_VERSION, -) +from dembrane.llms import MODELS, count_tokens, get_completion_kwargs from dembrane.prompts import render_prompt from dembrane.directus import directus @@ -240,7 +234,10 @@ async def generate_reply_for_conversation( # Check tokens for this conversation formatted_conv = format_conversation(c) - tokens = token_counter(text=formatted_conv, model=MEDIUM_LITELLM_MODEL) + tokens = count_tokens( + MODELS.TEXT_FAST, + [{"role": "user", "content": formatted_conv}], + ) candidate_conversations.append((formatted_conv, tokens)) else: @@ -260,7 +257,10 @@ async def generate_reply_for_conversation( # First check tokens for this conversation formatted_conv = format_conversation(c) - tokens = token_counter(text=formatted_conv, model=MEDIUM_LITELLM_MODEL) + tokens = count_tokens( + MODELS.TEXT_FAST, + [{"role": "user", "content": formatted_conv}], + ) # If conversation is too large, truncate it if tokens > target_tokens_per_conv: @@ -269,7 +269,10 @@ async def generate_reply_for_conversation( truncated_transcript = c.transcript[: int(len(c.transcript) * truncation_ratio)] c.transcript = truncated_transcript + "\n[Truncated for brevity...]" formatted_conv = format_conversation(c) - tokens = token_counter(text=formatted_conv, model=MEDIUM_LITELLM_MODEL) + tokens = count_tokens( + MODELS.TEXT_FAST, + [{"role": "user", "content": formatted_conv}], + ) candidate_conversations.append((formatted_conv, tokens)) @@ -364,14 +367,11 @@ async def generate_reply_for_conversation( # Stream the response try: response = await acompletion( - model=MEDIUM_LITELLM_MODEL, - api_key=MEDIUM_LITELLM_API_KEY, - api_version=MEDIUM_LITELLM_API_VERSION, - api_base=MEDIUM_LITELLM_API_BASE, messages=[ {"role": "user", "content": prompt}, ], stream=True, + **get_completion_kwargs(MODELS.TEXT_FAST), ) except ContentPolicyViolationError as e: logger.error( diff --git a/echo/server/dembrane/report_utils.py b/echo/server/dembrane/report_utils.py index bfe23cdb..7ae98b19 100644 --- a/echo/server/dembrane/report_utils.py +++ b/echo/server/dembrane/report_utils.py @@ -2,14 +2,8 @@ import logging from litellm import completion -from litellm.utils import token_counter - -from dembrane.config import ( - MEDIUM_LITELLM_MODEL, - MEDIUM_LITELLM_API_KEY, - MEDIUM_LITELLM_API_BASE, - MEDIUM_LITELLM_API_VERSION, -) + +from dembrane.llms import MODELS, count_tokens, get_completion_kwargs, resolve_config from dembrane.prompts import render_prompt from dembrane.directus import directus from dembrane.api.conversation import get_conversation_transcript @@ -17,7 +11,9 @@ logger = logging.getLogger("report_utils") -if "4.1" in str(MEDIUM_LITELLM_MODEL): +TEXT_PROVIDER_CONFIG = resolve_config(MODELS.TEXT_FAST) + +if "4.1" in str(TEXT_PROVIDER_CONFIG.model): logger.info("using 700k context length for report") MAX_REPORT_CONTEXT_LENGTH = 700000 else: @@ -68,7 +64,10 @@ async def get_report_content_for_project(project_id: str, language: str) -> str: continue # Count tokens before adding - summary_tokens = token_counter(model=MEDIUM_LITELLM_MODEL, text=conversation["summary"]) + summary_tokens = count_tokens( + MODELS.TEXT_FAST, + [{"role": "user", "content": conversation["summary"]}], + ) # Check if adding this conversation would exceed the limit if token_count + summary_tokens >= MAX_REPORT_CONTEXT_LENGTH: @@ -123,7 +122,10 @@ async def get_report_content_for_project(project_id: str, language: str) -> str: continue # Calculate token count for the transcript - transcript_tokens = token_counter(model=MEDIUM_LITELLM_MODEL, text=transcript) + transcript_tokens = count_tokens( + MODELS.TEXT_FAST, + [{"role": "user", "content": transcript}], + ) if token_count + transcript_tokens < MAX_REPORT_CONTEXT_LENGTH: # Append with a newline to keep paragraphs separated @@ -152,19 +154,14 @@ async def get_report_content_for_project(project_id: str, language: str) -> str: "system_report", language, {"conversations": conversation_data_list} ) - # Use litellm.completion instead of anthropic client + # Use the configured Litellm provider for report generation response = completion( - model=MEDIUM_LITELLM_MODEL, - api_key=MEDIUM_LITELLM_API_KEY, - api_version=MEDIUM_LITELLM_API_VERSION, - api_base=MEDIUM_LITELLM_API_BASE, - # max tokens needed for "anthropic" - # max_tokens=4096, messages=[ {"role": "user", "content": prompt_message}, - # prefill message only for "anthropic" + # Some providers expect a prefilled assistant message; add if needed. # {"role": "assistant", "content": "
"}, ], + **get_completion_kwargs(MODELS.TEXT_FAST), ) response_content = response.choices[0].message.content diff --git a/echo/server/dembrane/runpod.py b/echo/server/dembrane/runpod.py deleted file mode 100644 index 67103fc8..00000000 --- a/echo/server/dembrane/runpod.py +++ /dev/null @@ -1,225 +0,0 @@ -from logging import getLogger - -import backoff -import requests - -from dembrane.tasks import task_finish_conversation_hook -from dembrane.config import RUNPOD_WHISPER_API_KEY -from dembrane.service import conversation_service -from dembrane.service.conversation import ConversationChunkNotFoundException -from dembrane.processing_status_utils import ProcessingStatusContext, set_error_status - -logger = getLogger("dembrane.runpod") - - -@backoff.on_exception( - backoff.expo, - requests.exceptions.RequestException, - max_tries=3, - max_time=30, -) -def get_runpod_transcription_response(status_link: str) -> dict: - headers = { - "Authorization": f"Bearer {RUNPOD_WHISPER_API_KEY}", - "Content-Type": "application/json", - } - response = requests.get(status_link, headers=headers) - - if response.status_code == 200: - return response.json() - - raise requests.exceptions.RequestException( - f"Non-200 response for status link {status_link}: {response.status_code}" - ) - - -def load_runpod_transcription_response(payload: dict) -> None: - logger.debug("=== ENTERING load_runpod_transcription_response ===") - logger.debug(f"Loading runpod transcription response: {payload}") - - # Validate payload structure - if not isinstance(payload, dict): - logger.error(f"Invalid payload type - expected dict, got {type(payload)}: {payload}") - return - - if "output" not in payload: - logger.error(f"Invalid payload structure - missing 'output' key: {payload}") - return - - output = payload["output"] - - # Handle case where output might be a list instead of dict - if isinstance(output, list): - logger.error( - f"Unexpected payload structure - 'output' is a list instead of dict. This might be an error response: {payload}" - ) - # Try to extract error information from the list if possible - if output and isinstance(output[0], dict) and "error" in output[0]: - error_msg = output[0].get("error", "Unknown error from RunPod") - logger.error(f"RunPod returned error in list format: {error_msg}") - return - - if not isinstance(output, dict): - logger.error( - f"Unexpected payload structure - 'output' is not a dict: {type(output)}, payload: {payload}" - ) - return - - # Check if this is an error response - if "error" in output and output.get("error"): - logger.error(f"RunPod returned error in output: {output.get('error')}") - # Try to get conversation_chunk_id to set error status - conversation_chunk_id = output.get("conversation_chunk_id") - if conversation_chunk_id: - set_error_status( - conversation_chunk_id=conversation_chunk_id, - error=f"RunPod error: {output.get('error')}", - ) - return - - # Extract conversation_chunk_id with proper error handling - conversation_chunk_id = output.get("conversation_chunk_id") - if not conversation_chunk_id: - logger.error(f"Missing conversation_chunk_id in payload output: {payload}") - return - - logger.debug(f"Found conversation_chunk_id: {conversation_chunk_id}") - # Check if status indicates failure - status = payload.get("status") - logger.debug(f"Status: {status}") - if status == "FAILED": - logger.error(f"RunPod job failed for chunk {conversation_chunk_id}: {payload}") - set_error_status( - conversation_chunk_id=conversation_chunk_id, - error=f"RunPod job failed: {output.get('error', 'Unknown error')}", - ) - return - - # Only proceed if status is COMPLETED - if status != "COMPLETED": - logger.warning( - f"RunPod job not completed for chunk {conversation_chunk_id}, status: {status}" - ) - return - - try: - chunk = conversation_service.get_chunk_by_id_or_raise(conversation_chunk_id) - - # exit early - except (ConversationChunkNotFoundException, KeyError) as e: - logger.error(f"Chunk {conversation_chunk_id} not found, skipping - {str(e)}") - return - - # retry if we failed to fetch the chunk - except Exception as e: - set_error_status( - conversation_chunk_id=conversation_chunk_id, - error=f"Failed to fetch conversation chunk: {e}", - ) - raise e from e - - conversation_id = chunk["conversation_id"] - - with ProcessingStatusContext( - conversation_chunk_id=chunk["id"], - conversation_id=conversation_id, - event_prefix="load_runpod_transcription_response", - ): - """ - Example payload: - { - "metadata_str": "optional string", - "enable_timestamps": true, - "language": "nl", - "detected_language": "nl", - "detected_language_confidence": 0.9805044531822205, - "joined_text": "... full transcription ...", - "translation_text": "...full translation...", - "translation_error": false, - "hallucination_score": 0.2, - "hallucination_reason": "Minor repetitions detected", - "segments": [ - { - "text": "Segment text", - "start": 0.0, - "end": 2.5 - } - ] - } - """ - - # Now we know output is a dict, so we can safely access its properties - hallucination_reason = output.get("hallucination_reason", None) - hallucination_score = output.get("hallucination_score", None) - translation_error = output.get("translation_error", False) - - if translation_error and hallucination_score is None: - hallucination_score = 0.5 - hallucination_reason = "There seems to be an internal model error with translation." - - joined_text = output.get("joined_text", "") - translation_text = output.get("translation_text") - - # transcript should always be there - use translation_text if available, otherwise joined_text - transcript = translation_text if translation_text else joined_text - logger.debug(f"Transcript: {len(transcript) if transcript else 0}") - - # raw_transcript is null if translation_text is null or if they are the same - if translation_text is None or translation_text == joined_text: - raw_transcript = None - logger.debug("Setting raw_transcript to None (no translation or same as original)") - else: - raw_transcript = joined_text - logger.debug(f"Raw transcript: {len(raw_transcript)}") - - desired_language = output.get("language") - detected_language = output.get("detected_language") - detected_language_confidence = output.get("detected_language_confidence") - - error = output.get("error", None) - if not transcript: - error = error or "" - error += "No transcript" - - logger.debug("Updating chunk in database...") - conversation_service.update_chunk( - chunk_id=chunk["id"], - raw_transcript=raw_transcript, - transcript=transcript, - runpod_job_status_link=None, - hallucination_reason=hallucination_reason, - hallucination_score=hallucination_score, - error=error, - desired_language=desired_language, - detected_language=detected_language, - detected_language_confidence=detected_language_confidence, - ) - - counts = conversation_service.get_chunk_counts(conversation_id) - logger.debug(counts) - - # Trigger follow-up processing only when either: - # a) the participant signalled they are done (conversation.is_finished == True), _and_ - # b) we have processed all currently known chunks. - # This prevents finishing the conversation too early when the participant is still uploading. - if counts["processed"] == counts["total"]: - try: - conversation = conversation_service.get_by_id_or_raise(conversation_id) - is_finished = conversation.get("is_finished", False) - if is_finished: - logger.info( - f"All chunks processed _and_ conversation {conversation_id} marked finished; running follow-up tasks." - ) - task_finish_conversation_hook.send(conversation_id) - else: - logger.debug( - f"All currently known chunks processed for conversation {conversation_id}, but it is not marked finished yet. Skipping finish hook for now." - ) - except Exception as e: - logger.error( - f"Could not verify conversation status for {conversation_id}: {e} – skipping finish hook for now" - ) - - logger.debug( - f"Updated chunk with transcript: {chunk['id']} - length: {len(output.get('joined_text', ''))}" - ) diff --git a/echo/server/dembrane/s3.py b/echo/server/dembrane/s3.py index 655e4045..b9db4a7f 100644 --- a/echo/server/dembrane/s3.py +++ b/echo/server/dembrane/s3.py @@ -51,16 +51,17 @@ from botocore.response import StreamingBody from dembrane.utils import generate_uuid -from dembrane.config import ( - STORAGE_S3_KEY, - STORAGE_S3_BUCKET, - STORAGE_S3_REGION, - STORAGE_S3_SECRET, - STORAGE_S3_ENDPOINT, -) +from dembrane.settings import get_settings logger = logging.getLogger("s3") +settings = get_settings() +STORAGE_S3_KEY = settings.storage_s3_key +STORAGE_S3_BUCKET = settings.storage_s3_bucket +STORAGE_S3_REGION = settings.storage_s3_region +STORAGE_S3_SECRET = settings.storage_s3_secret +STORAGE_S3_ENDPOINT = settings.storage_s3_endpoint + session = boto3.session.Session() INTERNAL_S3_ENDPOINT = STORAGE_S3_ENDPOINT diff --git a/echo/server/dembrane/scheduler.py b/echo/server/dembrane/scheduler.py index 431daab4..5e123837 100644 --- a/echo/server/dembrane/scheduler.py +++ b/echo/server/dembrane/scheduler.py @@ -4,9 +4,7 @@ from apscheduler.schedulers.blocking import BlockingScheduler # from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore -from dembrane.config import DEBUG_MODE, TRANSCRIPTION_PROVIDER - -# from dembrane.config import DATABASE_URL +from dembrane.settings import get_settings jobstores = { # "default": SQLAlchemyJobStore(url=DATABASE_URL), @@ -16,6 +14,9 @@ scheduler = BlockingScheduler() scheduler.configure(jobstores=jobstores, timezone=utc) +settings = get_settings() +DEBUG_MODE = settings.debug_mode + # Add periodic tasks scheduler.add_job( func="dembrane.tasks:task_collect_and_finish_unfinished_conversations.send", @@ -25,20 +26,6 @@ replace_existing=True, ) -if TRANSCRIPTION_PROVIDER is not None and "runpod" in TRANSCRIPTION_PROVIDER.lower(): - if DEBUG_MODE: - trigger = CronTrigger(minute="*/2") - else: - trigger = CronTrigger(minute="*/10") - - scheduler.add_job( - func="dembrane.tasks:task_update_runpod_transcription_response.send", - trigger=trigger, - id="task_update_runpod_transcription_response", - name="update runpod transcription responses", - replace_existing=True, - ) - # Start the scheduler when this module is run directly if __name__ == "__main__": scheduler.start() diff --git a/echo/server/dembrane/seed.py b/echo/server/dembrane/seed.py new file mode 100644 index 00000000..137a31b7 --- /dev/null +++ b/echo/server/dembrane/seed.py @@ -0,0 +1,187 @@ +""" +Seeding helpers for bootstrap tasks that need to run during application startup. +""" + +from logging import getLogger +from typing import Any, Dict, Iterable, Mapping, List + +from dembrane.async_helpers import run_in_thread_pool +from dembrane.directus import directus + +logger = getLogger("dembrane.seed") + + +DEFAULT_DIRECTUS_LANGUAGES: Iterable[Mapping[str, Any]] = [ + {"code": "en-US", "name": "English (United States)", "direction": "ltr"}, + {"code": "nl-NL", "name": "Dutch (Netherlands)", "direction": "ltr"}, + {"code": "de-DE", "name": "German (Germany)", "direction": "ltr"}, + {"code": "es-ES", "name": "Spanish (Spain)", "direction": "ltr"}, + {"code": "fr-FR", "name": "French (France)", "direction": "ltr"}, +] + + +async def seed_default_languages() -> None: + """ + Ensure predefined Directus languages exist. + """ + for language in DEFAULT_DIRECTUS_LANGUAGES: + existing = await run_in_thread_pool( + directus.get_items, + "languages", + { + "query": { + "filter": {"code": {"_eq": language["code"]}}, + "fields": ["code"], + "limit": 1, + } + }, + ) + + if existing: + continue + + logger.info("Seeding language %s", language["code"]) + await run_in_thread_pool( + directus.create_item, + "languages", + { + "code": language["code"], + "name": language["name"], + "direction": language["direction"], + }, + ) + + +DEFAULT_VERIFICATION_TOPICS: List[Dict[str, Any]] = [ + { + "key": "agreements", + "icon": ":white_check_mark:", + "label": "What we actually agreed on", + "sort": 1, + "prompt": ( + "Extract the concrete agreements and shared understandings from this conversation. " + "Focus on points where multiple participants explicitly or implicitly aligned. " + "Include both major decisions and small points of consensus. Present these as clear, " + "unambiguous statements that all participants would recognize as accurate. Distinguish " + "between firm agreements and tentative consensus. If participants used different words " + "to express the same idea, synthesize into shared language. Format as a living document " + "of mutual understanding. Output character should be diplomatic but precise, like meeting " + "minutes with soul." + ), + }, + { + "key": "gems", + "icon": ":mag:", + "label": "Hidden gems", + "sort": 2, + "prompt": ( + "Identify the valuable insights that emerged unexpectedly or were mentioned briefly but " + "contain significant potential. Look for: throwaway comments that solve problems, questions " + "that reframe the entire discussion, metaphors that clarify complex ideas, connections between " + "seemingly unrelated points, and wisdom hiding in personal anecdotes. Present these as discoveries " + "worth preserving, explaining why each gem matters. These are the insights people might forget but " + "shouldn't. Output character should be excited and precise." + ), + }, + { + "key": "truths", + "icon": ":eyes:", + "label": "Painful truths", + "sort": 3, + "prompt": ( + "Surface the uncomfortable realities acknowledged in this conversation - the elephants in the room that " + "got named, the difficult facts accepted, the challenging feedback given or received. Include systemic " + "problems identified, personal blind spots revealed, and market realities confronted. Present these with " + "compassion but without sugar-coating. Frame them as shared recognitions that took courage to voice. " + "These truths are painful but necessary for genuine progress. Output character should be gentle but " + "unflinching." + ), + }, + { + "key": "moments", + "icon": ":rocket:", + "label": "Breakthrough moments", + "sort": 4, + "prompt": ( + "Capture the moments when thinking shifted, new possibilities emerged, or collective understanding jumped " + "to a new level. Identify: sudden realizations, creative solutions, perspective shifts, moments when " + "complexity became simple, and ideas that energized the group. Show both the breakthrough itself and what " + "made it possible. These are the moments when the conversation transcended its starting point. Output " + "character should be energetic and forward-looking." + ), + }, + { + "key": "actions", + "icon": ":arrow_upper_right:", + "label": "What we think should happen", + "sort": 5, + "prompt": ( + "Synthesize the group's emerging sense of direction and next steps. Include: explicit recommendations made, " + "implicit preferences expressed, priorities that emerged through discussion, and logical next actions even " + "if not explicitly stated. Distinguish between unanimous direction and majority leanings. Present as " + "provisional navigation rather than fixed commands. This is the group's best current thinking about the " + "path forward. Output character should be pragmatic but inspirational." + ), + }, + { + "key": "disagreements", + "icon": ":warning:", + "label": "Moments we agreed to disagree", + "sort": 6, + "prompt": ( + "Document the points of productive tension where different perspectives remained distinct but respected. " + "Include: fundamental differences in approach, varying priorities, different risk tolerances, and contrasting " + "interpretations of data. Frame these not as failures to agree but as valuable diversity of thought. Show how " + "each perspective has merit. These disagreements are features, not bugs - they prevent premature convergence " + "and keep important tensions alive. Output character should be respectful and balanced." + ), + }, +] + +DEFAULT_VERIFICATION_LANG = "en-US" + + +async def seed_default_verification_topics() -> None: + """ + Ensure that the canonical verification topics exist in Directus. + """ + for topic in DEFAULT_VERIFICATION_TOPICS: + existing = await run_in_thread_pool( + directus.get_items, + "verification_topic", + { + "query": { + "filter": { + "key": {"_eq": topic["key"]}, + "project_id": {"_null": True}, + }, + "fields": ["key"], + "limit": 1, + } + }, + ) + + if existing: + continue + + logger.info("Seeding verification topic '%s'", topic["key"]) + translations_payload = [ + { + "languages_code": DEFAULT_VERIFICATION_LANG, + "label": topic["label"], + } + ] + + await run_in_thread_pool( + directus.create_item, + "verification_topic", + item_data={ + "key": topic["key"], + "prompt": topic["prompt"], + "icon": topic["icon"], + "sort": topic["sort"], + "translations": { + "create": translations_payload, + }, + }, + ) diff --git a/echo/server/dembrane/sentry.py b/echo/server/dembrane/sentry.py index ef789bee..1b05fc96 100644 --- a/echo/server/dembrane/sentry.py +++ b/echo/server/dembrane/sentry.py @@ -3,18 +3,15 @@ import sentry_sdk from sentry_dramatiq import DramatiqIntegration -# from sentry_sdk.integrations.openai import OpenAIIntegration -# from sentry_sdk.integrations.fastapi import FastApiIntegration -# from sentry_sdk.integrations.starlette import StarletteIntegration -from dembrane.config import ( - ENVIRONMENT, - BUILD_VERSION, - DISABLE_SENTRY, -) +from dembrane.settings import get_settings logger = getLogger("sentry") ATTEMPTED_SENTRY_INIT = False +settings = get_settings() +ENVIRONMENT = settings.environment +BUILD_VERSION = settings.build_version +DISABLE_SENTRY = settings.disable_sentry def init_sentry() -> None: @@ -36,22 +33,6 @@ def init_sentry() -> None: profiles_sample_rate=0.5, enable_tracing=True, integrations=[ - # StarletteIntegration( - # transaction_style="endpoint", - # failed_request_status_codes={*range(400, 499), *range(500, 599)}, - # ), - # FastApiIntegration( - # transaction_style="endpoint", - # failed_request_status_codes={*range(400, 499), *range(500, 599)}, - # ), - # TODO: finish the impl https://docs.sentry.io/platforms/python/integrations/openai/ - # https://docs.sentry.io/platforms/python/integrations/anthropic/ - # THIS. The failure is produced by Sentry’s OpenAI integration that is automatically wrapped around every call to openai / litellm. - # AzureException APIError - argument 'text': 'list' object cannot be converted to 'PyString' - #OpenAIIntegration( - #include_prompts=False, # LLM/tokenizer inputs/outputs will be not sent to Sentry, despite send_default_pii=True - #tiktoken_encoding_name="cl100k_base", - #), DramatiqIntegration(), ], ) diff --git a/echo/server/dembrane/service/conversation.py b/echo/server/dembrane/service/conversation.py index 6afcb46b..07fd89bc 100644 --- a/echo/server/dembrane/service/conversation.py +++ b/echo/server/dembrane/service/conversation.py @@ -314,7 +314,6 @@ def update_chunk( diarization: Any = _UNSET, transcript: Any = _UNSET, raw_transcript: Any = _UNSET, - runpod_job_status_link: Any = _UNSET, error: Any = _UNSET, hallucination_reason: Any = _UNSET, hallucination_score: Any = _UNSET, @@ -336,9 +335,6 @@ def update_chunk( if path is not _UNSET: update["path"] = path - if runpod_job_status_link is not _UNSET: - update["runpod_job_status_link"] = runpod_job_status_link - if error is not _UNSET: update["error"] = error diff --git a/echo/server/dembrane/settings.py b/echo/server/dembrane/settings.py new file mode 100644 index 00000000..a977a194 --- /dev/null +++ b/echo/server/dembrane/settings.py @@ -0,0 +1,224 @@ +""" +Centralized application settings loaded from environment variables. + +This module replaces the legacy ``dembrane.config`` globals with a single +typed settings object. Consumers should call ``get_settings()`` and read the +fields they need instead of importing environment variables directly. +""" + +from __future__ import annotations + +import json +import base64 +import logging +from typing import Any, Dict, Literal, Optional, Mapping +from pathlib import Path +from functools import lru_cache + +from pydantic import Field, BaseModel, field_validator, model_validator +from pydantic_settings import BaseSettings, SettingsConfigDict + +TranscriptionProvider = Optional[Literal["LiteLLM", "AssemblyAI", "Dembrane-25-09"]] + + +class ResolvedLLMConfig(BaseModel): + model: str + api_key: Optional[str] = None + api_base: Optional[str] = None + api_version: Optional[str] = None + + +class LLMProviderConfig(BaseModel): + model: Optional[str] = None + api_key: Optional[str] = None + api_base: Optional[str] = None + api_version: Optional[str] = None + + def resolve(self) -> ResolvedLLMConfig: + if not self.model: + raise ValueError("LLM provider configuration requires a model.") + + return ResolvedLLMConfig( + model=self.model, + api_key=self.api_key, + api_base=self.api_base, + api_version=self.api_version, + ) + + +class LLMSettings(BaseSettings): + model_config = SettingsConfigDict( + env_prefix="LLM__", + env_nested_delimiter="__", + env_file=".env", + extra="ignore", + case_sensitive=False, + ) + + multi_modal_pro: LLMProviderConfig = Field(default_factory=LLMProviderConfig) + multi_modal_fast: LLMProviderConfig = Field(default_factory=LLMProviderConfig) + text_fast: LLMProviderConfig = Field(default_factory=LLMProviderConfig) + + +class AppSettings(BaseSettings): + """ + All environment-driven configuration for the Dembrane ECHO server. + """ + + model_config = SettingsConfigDict(env_file=".env", extra="ignore", case_sensitive=False) + + # General application configuration + base_dir: Path = Field(default_factory=lambda: Path(__file__).resolve().parent.parent) + build_version: str = Field(default="dev", alias="BUILD_VERSION") + api_base_url: str = Field(default="http://localhost:8000", alias="API_BASE_URL") + admin_base_url: str = Field(default="http://localhost:3000", alias="ADMIN_BASE_URL") + participant_base_url: str = Field(default="http://localhost:3001", alias="PARTICIPANT_BASE_URL") + + # Features + debug_mode: bool = Field(default=False, alias="DEBUG_MODE") + disable_cors: bool = Field(default=False, alias="DISABLE_CORS") + disable_redaction: bool = Field(default=False, alias="DISABLE_REDACTION") + disable_chat_title_generation: bool = Field( + default=False, alias="DISABLE_CHAT_TITLE_GENERATION" + ) + enable_chat_auto_select: bool = Field(default=False, alias="ENABLE_CHAT_AUTO_SELECT") + serve_api_docs: bool = Field(default=False, alias="SERVE_API_DOCS") + disable_sentry: bool = Field(default=False, alias="DISABLE_SENTRY") + + # Directus / database / cache / storage + directus_base_url: str = Field(default="http://directus:8055", alias="DIRECTUS_BASE_URL") + directus_secret: str = Field(..., alias="DIRECTUS_SECRET") + directus_token: str = Field(..., alias="DIRECTUS_TOKEN") + directus_session_cookie_name: str = Field( + default="directus_session_token", alias="DIRECTUS_SESSION_COOKIE_NAME" + ) + + database_url: str = Field(..., alias="DATABASE_URL") + redis_url: str = Field(..., alias="REDIS_URL") + + storage_s3_bucket: str = Field(..., alias="STORAGE_S3_BUCKET") + storage_s3_region: Optional[str] = Field(default=None, alias="STORAGE_S3_REGION") + storage_s3_endpoint: str = Field(..., alias="STORAGE_S3_ENDPOINT") + storage_s3_key: str = Field(..., alias="STORAGE_S3_KEY") + storage_s3_secret: str = Field(..., alias="STORAGE_S3_SECRET") + + # Transcription providers + transcription_provider: TranscriptionProvider = Field( + default=None, alias="TRANSCRIPTION_PROVIDER" + ) + gcp_sa_json: Optional[Dict[str, Any]] = Field(default=None, alias="GCP_SA_JSON") + + enable_assemblyai_transcription: bool = Field( + default=False, alias="ENABLE_ASSEMBLYAI_TRANSCRIPTION" + ) + assemblyai_api_key: Optional[str] = Field(default=None, alias="ASSEMBLYAI_API_KEY") + assemblyai_base_url: str = Field( + default="https://api.eu.assemblyai.com", alias="ASSEMBLYAI_BASE_URL" + ) + + enable_litellm_whisper_transcription: bool = Field( + default=False, alias="ENABLE_LITELLM_WHISPER_TRANSCRIPTION" + ) + + llms: LLMSettings = Field(default_factory=LLMSettings) + + @field_validator("database_url", mode="before") + @classmethod + def normalize_database_url(cls, value: str) -> str: + if value.startswith("postgresql+psycopg://"): + return value + if value.startswith("postgresql://"): + return value.replace("postgresql://", "postgresql+psycopg://", 1) + raise ValueError("DATABASE_URL must start with postgresql+psycopg://") + + @field_validator("gcp_sa_json", mode="before") + @classmethod + def parse_gcp_sa_json( + cls, value: Optional[Any] + ) -> Optional[Dict[str, Any]]: + if value is None: + return None + + if isinstance(value, Mapping): + return dict(value) + + if isinstance(value, str): + trimmed = value.strip() + if trimmed in {"", "null", "None"}: + return None + raw_value: str | bytes = trimmed + elif isinstance(value, (bytes, bytearray)): + if not value: + return None + raw_value = value + else: + raise ValueError("GCP_SA_JSON must be a mapping, JSON string, or base64-encoded JSON") + + try: + return json.loads(raw_value) + except (TypeError, json.JSONDecodeError): + try: + decoded = base64.b64decode(raw_value) + return json.loads(decoded) + except (ValueError, json.JSONDecodeError, TypeError) as exc: + raise ValueError("GCP_SA_JSON must be valid JSON or base64-encoded JSON") from exc + + @model_validator(mode="after") + def validate_transcription_dependencies(self) -> "AppSettings": + if self.enable_assemblyai_transcription and not self.assemblyai_api_key: + raise ValueError( + "ASSEMBLYAI_API_KEY must be set when AssemblyAI transcription is enabled" + ) + + if self.enable_litellm_whisper_transcription: + missing = [ + name + for name, value in [ + ("LLM__MULTI_MODAL_FAST__MODEL", self.llms.multi_modal_fast.model), + ("LLM__MULTI_MODAL_FAST__API_KEY", self.llms.multi_modal_fast.api_key), + ] + if value in (None, "") + ] + if missing: + raise ValueError( + "Missing required LiteLLM Whisper configuration when transcription is enabled: " + + ", ".join(missing) + ) + + return self + + @property + def environment(self) -> str: + return "production" if self.build_version != "dev" else "development" + + @property + def prompt_templates_dir(self) -> Path: + return self.base_dir / "prompt_templates" + + @property + def json_templates_dir(self) -> Path: + return self.base_dir / "json_templates" + + +@lru_cache +def get_settings() -> AppSettings: + settings = AppSettings() + + if settings.debug_mode: + logging.getLogger().setLevel(logging.DEBUG) + + for noisy in [ + "boto3", + "botocore", + "httpx", + "httpcore", + "LiteLLM", + "requests", + "psycopg", + "s3transfer", + "urllib3", + "multipart", + ]: + logging.getLogger(noisy).setLevel(logging.WARNING) + + return settings diff --git a/echo/server/dembrane/tasks.py b/echo/server/dembrane/tasks.py index f827adb6..760acee4 100644 --- a/echo/server/dembrane/tasks.py +++ b/echo/server/dembrane/tasks.py @@ -2,7 +2,6 @@ from logging import getLogger import dramatiq -import requests import lz4.frame from dramatiq import group from dramatiq.encoder import JSONEncoder, MessageData @@ -14,13 +13,7 @@ from dramatiq.results.backends.redis import RedisBackend as ResultsRedisBackend from dembrane.utils import generate_uuid, get_utc_timestamp -from dembrane.config import ( - REDIS_URL, - RUNPOD_WHISPER_API_KEY, - RUNPOD_TOPIC_MODELER_URL, - ENABLE_AUDIO_LIGHTRAG_INPUT, - RUNPOD_TOPIC_MODELER_API_KEY, -) +from dembrane.settings import get_settings from dembrane.sentry import init_sentry from dembrane.prompts import render_json from dembrane.directus import ( @@ -30,17 +23,16 @@ directus_client_context, ) from dembrane.transcribe import transcribe_conversation_chunk -from dembrane.conversation_utils import ( - collect_unfinished_conversations, - collect_unfinished_audio_processing_conversations, -) +from dembrane.async_helpers import run_in_thread_pool, run_async_in_new_loop +from dembrane.conversation_utils import collect_unfinished_conversations from dembrane.api.dependency_auth import DependencyDirectusSession -from dembrane.conversation_health import get_runpod_diarization from dembrane.processing_status_utils import ( ProcessingStatusContext, set_error_status, ) -from dembrane.audio_lightrag.utils.echo_utils import finish_conversation + +settings = get_settings() +REDIS_URL = settings.redis_url init_sentry() @@ -145,13 +137,11 @@ def task_summarize_conversation(conversation_id: str) -> None: return from dembrane.api.conversation import summarize_conversation - from dembrane.audio_lightrag.utils.async_utils import run_async_in_new_loop with ProcessingStatusContext( conversation_id=conversation_id, event_prefix="task_summarize_conversation", ): - # Run async function in new event loop (CPU worker context) run_async_in_new_loop( summarize_conversation( conversation_id=conversation_id, @@ -203,7 +193,6 @@ def task_merge_conversation_chunks(conversation_id: str) -> None: # local import to avoid circular imports from dembrane.api.exceptions import NoContentFoundException from dembrane.api.conversation import get_conversation_content - from dembrane.audio_lightrag.utils.async_utils import run_async_in_new_loop with ProcessingStatusContext( conversation_id=conversation_id, @@ -220,7 +209,9 @@ def task_merge_conversation_chunks(conversation_id: str) -> None: ) ) except NoContentFoundException: # type: ignore - logger.info(f"No valid content found for conversation {conversation_id}; skipping merge task.") + logger.info( + f"No valid content found for conversation {conversation_id}; skipping merge task." + ) return return @@ -229,217 +220,6 @@ def task_merge_conversation_chunks(conversation_id: str) -> None: raise e from e -@dramatiq.actor( - queue_name="cpu", - priority=10, - time_limit=5 * 60 * 1000, # 5 minutes (no audio processing!) - max_retries=3, -) -def task_run_etl_pipeline(conversation_id: str) -> None: - """ - THE PIVOT: Process finished conversation for RAG using existing transcripts. - NO audio processing - text-only! - - Steps: - 1. Fetch conversation chunks from Directus (with existing transcripts) - 2. Concatenate chunk.transcript fields (from standard Whisper pipeline) - 3. Get project context - 4. Rich contextualization with Claude - 5. Create conversation_segment record - 6. Insert into LightRAG (Neo4j + PostgreSQL) - """ - logger = getLogger("dembrane.tasks.task_run_etl_pipeline") - - try: - # Check if conversation exists - try: - conversation_object = directus.get_item("conversation", conversation_id) - except Exception: - logger.error(f"Failed to get conversation {conversation_id}") - return - - if conversation_object is None: - logger.error(f"Conversation not found: {conversation_id}") - return - - project_id = conversation_object["project_id"] - - # Check if RAG processing is enabled for this project - try: - project = directus.get_item("project", project_id) - is_enabled = project.get("is_enhanced_audio_processing_enabled", False) - except Exception as e: - logger.error(f"Failed to get project {project_id}: {e}") - return - - if not (ENABLE_AUDIO_LIGHTRAG_INPUT and is_enabled): - logger.info(f"RAG processing disabled for project {project_id}, skipping") - try: - finish_conversation(conversation_id) - logger.info(f"Marked conversation {conversation_id} as finished (RAG disabled)") - except Exception as e: - logger.error(f"Failed to mark conversation {conversation_id} as finished: {e}") - return - - with ProcessingStatusContext( - conversation_id=conversation_id, - event_prefix="task_run_etl_pipeline", - message="Processing conversation for RAG (transcript-only)", - ): - logger.info(f"Starting RAG processing for conversation {conversation_id}") - - # Step 1: Fetch chunks with transcripts from Directus - logger.info("Step 1/6: Fetching chunks from Directus") - chunks_response = directus.get_items( - "conversation_chunk", - { - "query": { - "filter": {"conversation_id": conversation_id}, - "fields": ["id", "transcript", "timestamp", "conversation_id"], - "sort": ["timestamp"], - "limit": -1, - } - }, - ) - - if not chunks_response or len(chunks_response) == 0: - logger.warning(f"No chunks found for conversation {conversation_id}") - return - - # Step 2: Concatenate transcripts - logger.info(f"Step 2/6: Concatenating {len(chunks_response)} chunk transcripts") - transcripts = [] - for chunk in chunks_response: - transcript = chunk.get("transcript", "") - if transcript and transcript.strip(): - transcripts.append(transcript.strip()) - - if not transcripts: - logger.warning( - f"No valid transcripts found in chunks for conversation {conversation_id}" - ) - return - - full_transcript = "\n\n".join(transcripts) - logger.info(f"Full transcript length: {len(full_transcript)} characters") - - # Step 3: Get project context (format as event_text like old pipeline) - logger.info("Step 3/6: Getting project context") - project_language = project.get("language", "en") - - # Format project data as key:value pairs (same as old pipeline) - event_text = "\n\n".join( - [ - f"{k} : {v}" - for k, v in project.items() - if k in ["name", "context", "language", "description"] - ] - ) - - # Step 3b: Get previous conversation segments for context - # (For now, we'll start with empty - can enhance later) - previous_conversation_text = "" - # TODO: In future, fetch previous segments' contextual_transcripts from this conversation - # and join with \n\n like old pipeline did - - # Step 4: Rich contextualization with Claude (using old prompt template) - logger.info("Step 4/6: Contextualizing with Claude") - from dembrane.api.stateless import InsertRequest, insert_item - from dembrane.api.dependency_auth import DependencyDirectusSession - from dembrane.audio_lightrag.utils.async_utils import run_async_in_new_loop - from dembrane.audio_lightrag.services.contextualizer import get_contextualizer - - contextualizer = get_contextualizer() - - # Define async function that does all async work in ONE loop - async def process_with_rag() -> str: - # Step 4a: Contextualize transcript (using old audio_model_system_prompt) - contextual_transcript = await contextualizer.contextualize( - full_transcript, event_text, previous_conversation_text, project_language - ) - - # Step 5: Create segment record - logger.info("Step 5/6: Creating conversation segment") - segment_data = { - "conversation_id": conversation_id, - "transcript": full_transcript, - "contextual_transcript": contextual_transcript, - } - segment = directus.create_item("conversation_segment", segment_data) - segment_id = segment["data"]["id"] - logger.info(f"Created segment {segment_id} for conversation {conversation_id}") - - # Step 6: Insert into RAG (using same pattern as old code) - logger.info("Step 6/6: Inserting into LightRAG") - payload = InsertRequest( - content=contextual_transcript, - echo_segment_id=str(segment_id), - transcripts=[full_transcript], - ) - # Create fake admin session (same as old code) - fake_session = DependencyDirectusSession(user_id="none", is_admin=True) - - # Call insert_item directly (not via HTTP) - insert_response = await insert_item(payload, fake_session) - - if insert_response.status != "success": - raise RuntimeError(f"RAG insertion failed: {insert_response.status}") - - return segment_id - - # Run all async work in ONE event loop - segment_id = run_async_in_new_loop(process_with_rag()) - - logger.info(f"Successfully processed conversation {conversation_id} for RAG") - logger.info(f"Segment ID: {segment_id}") - - # Mark segment as processed in RAG (same as old pipeline) - directus.update_item("conversation_segment", segment_id, {"lightrag_flag": True}) - logger.info(f"Marked segment {segment_id} as RAG processed") - - # CRITICAL: Mark ALL segments for this conversation as processed - # (There may be old segments from previous audio processing runs) - try: - # Batch update all segments for this conversation - all_segments = directus.get_items( - "conversation_segment", - { - "query": { - "filter": {"conversation_id": conversation_id, "lightrag_flag": False}, - "fields": ["id"], - "limit": -1, - } - }, - ) - - if all_segments and len(all_segments) > 0: - logger.warning( - f"Found {len(all_segments)} old unprocessed segments for conversation {conversation_id}, marking as processed" - ) - for old_seg in all_segments: - try: - directus.update_item( - "conversation_segment", old_seg["id"], {"lightrag_flag": True} - ) - except Exception as e: - logger.error(f"Failed to update old segment {old_seg['id']}: {e}") - except Exception as e: - logger.error(f"Failed to check/update old segments: {e}") - - if finish_conversation(conversation_id): - logger.info(f"Marked conversation {conversation_id} as audio processing finished") - else: - logger.warning( - f"Failed to mark conversation {conversation_id} as audio processing finished" - ) - - except Exception as e: - logger.error( - f"RAG processing failed for conversation {conversation_id}: {e}", exc_info=True - ) - raise - - @dramatiq.actor(queue_name="network", priority=30) def task_finish_conversation_hook(conversation_id: str) -> None: """ @@ -468,30 +248,7 @@ def task_finish_conversation_hook(conversation_id: str) -> None: f"Conversation {conversation_id} has not finished processing, running all follow-up tasks" ) - # Dispatch follow-up tasks directly - # Note: Using .send() instead of group() to ensure tasks are actually dispatched task_merge_conversation_chunks.send(conversation_id) - - # Only dispatch RAG task if globally enabled and project has it enabled - if ENABLE_AUDIO_LIGHTRAG_INPUT: - try: - project_id = conversation_obj.get("project_id") - if project_id: - project = directus.get_item("project", project_id) - if project and project.get("is_enhanced_audio_processing_enabled", False): - task_run_etl_pipeline.send(conversation_id) - logger.info(f"Dispatched RAG task for conversation {conversation_id}") - else: - logger.info(f"RAG disabled for project {project_id}, skipping RAG task") - else: - logger.warning( - f"No project_id for conversation {conversation_id}, skipping RAG task" - ) - except Exception as e: - logger.error(f"Failed to check RAG status for conversation {conversation_id}: {e}") - else: - logger.info("ENABLE_AUDIO_LIGHTRAG_INPUT is False, skipping RAG task") - task_summarize_conversation.send(conversation_id) counts = conversation_service.get_chunk_counts(conversation_id) @@ -551,9 +308,6 @@ def task_process_conversation_chunk( logger.error(f"Split audio chunk result is None for chunk: {chunk_id}") raise ValueError(f"Split audio chunk result is None for chunk: {chunk_id}") - if "upload" not in str(chunk["source"]).lower(): - group([task_get_runpod_diarization.message(chunk_id)]).run() - logger.info(f"Split audio chunk result: {split_chunk_ids}") group( @@ -583,15 +337,6 @@ def task_collect_and_finish_unfinished_conversations() -> None: unfinished_conversation_ids = collect_unfinished_conversations() logger.info(f"Unfinished conversation ids: {unfinished_conversation_ids}") - try: - unfinished_ap_conversation_ids = collect_unfinished_audio_processing_conversations() - logger.info( - f"Unfinished audio processing conversation ids: {unfinished_ap_conversation_ids}" - ) - except Exception as e: - logger.error(f"Error collecting unfinished audio processing conversations: {e}") - unfinished_ap_conversation_ids = [] - group( [ task_finish_conversation_hook.message(conversation_id) @@ -600,14 +345,6 @@ def task_collect_and_finish_unfinished_conversations() -> None: ] ).run() - group( - [ - task_run_etl_pipeline.message(conversation_id) - for conversation_id in unfinished_ap_conversation_ids - if conversation_id is not None - ] - ).run() - return except Exception as e: logger.error(f"Error collecting and finishing unfinished conversations: {e}") @@ -664,96 +401,15 @@ def task_create_view( project_id=project_id, event_prefix="task_create_view", ) as status_ctx: - try: - with directus_client_context() as client: - # get all segment ids from project_id - segments = client.get_items( - "project", - { - "query": { - "filter": { - "id": project_id, - }, - "fields": ["conversations.conversation_segments.id"], - } - }, - ) - - if not segments or len(segments) == 0: - status_ctx.set_exit_message(f"No segments found for project: {project_id}") - logger.error(f"No segments found for project: {project_id}") - return - - segment_ids = list( - set( - [ - seg["id"] - for conv in segments[0]["conversations"] - for seg in conv["conversation_segments"] - ] - ) - ) - - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {RUNPOD_TOPIC_MODELER_API_KEY}", - } - - data = { - "input": { - "project_analysis_run_id": project_analysis_run_id, - "response_language": language, - "segment_ids": segment_ids, - "user_input": user_query, - "user_input_description": user_query_context or "", - "user_prompt": "\n\n\n".join([user_query, user_query_context or ""]), # depr - } - } - - url = f"{str(RUNPOD_TOPIC_MODELER_URL).rstrip('/')}/run" - logger.debug(f"sending url to runpod: {url} with data: {data}") - - response = requests.post(url, headers=headers, json=data, timeout=600) - - # Handle the response - if not response.status_code == 200: - status_ctx.set_exit_message( - f"RunPod API returned status {response.status_code}: {response.text}" - ) - logger.error(f"RunPod API returned status {response.status_code}: {response.text}") - # TODO: handle class of error in runpod - raise Exception(f"RunPod API failed with status {response.status_code}") - - status_ctx.set_exit_message( - f"Successfully created view with {len(segment_ids)} segments" - ) - logger.info( - f"Successfully created view for project_analysis_run_id: {project_analysis_run_id}" - ) - logger.debug(f"RunPod response: {response.json()}") - return - - except DirectusBadRequest as e: - status_ctx.set_exit_message(f"Bad Directus request: {str(e)}") - logger.error(f"Bad Directus request. Something item might be missing? {e}") - return - - except DirectusServerError as e: - status_ctx.set_exit_message(f"Can retry. Directus server down? {e}") - logger.error(f"Can retry. Directus server down? {e}") - raise e from e - - except requests.exceptions.RequestException as e: - status_ctx.set_exit_message(f"Can retry. Network error calling RunPod API: {e}") - logger.error(f"Can retry. Network error calling RunPod API: {e}") - raise e from e - - except Exception as e: - status_ctx.set_exit_message( - f"Can retry. Views failed to create for unknown reason: {e}" - ) - logger.error(f"Can retry. Views failed to create for unknown reason: {e}") - raise e from e + status_ctx.set_exit_message( + "Topic modeler integration has been removed; skipping view creation." + ) + logger.info( + "Skipping task_create_view for project_analysis_run_id %s because external topic " + "modeler support has been removed.", + project_analysis_run_id, + ) + return @dramatiq.actor(queue_name="network", priority=50) @@ -823,119 +479,3 @@ def task_create_project_library(project_id: str, language: str) -> None: ) return - - -@dramatiq.actor(queue_name="network", priority=10) -def task_process_runpod_chunk_response(chunk_id: str, status_link: str) -> None: - logger = getLogger("dembrane.tasks.task_process_runpod_chunk_response") - - # pre-flight check to avoid processing chunks that are not in a conversation - from dembrane.service import conversation_service - from dembrane.service.conversation import ConversationChunkNotFoundException - - try: - chunk_object = conversation_service.get_chunk_by_id_or_raise(chunk_id) - conversation_id = chunk_object["conversation_id"] - # unrecoverable error, we can't process the chunk - except ConversationChunkNotFoundException: - logger.error(f"Chunk {chunk_id} not found, skipping") - return - # retry - except Exception as e: - logger.error(f"Error fetching conversation for chunk {chunk_id}: {e}") - set_error_status( - conversation_chunk_id=chunk_id, error="Failed to fetch conversation for this chunk." - ) - raise e from e - - with ProcessingStatusContext( - conversation_id=conversation_id, - conversation_chunk_id=chunk_id, - event_prefix="task_process_runpod_chunk_response", - ): - chunk_object = conversation_service.get_chunk_by_id_or_raise(chunk_id) - conversation_id = chunk_object["conversation_id"] - - headers = { - "Authorization": f"Bearer {RUNPOD_WHISPER_API_KEY}", - "Content-Type": "application/json", - } - response = requests.get(status_link, headers=headers, timeout=30) - - if response.status_code == 200: - try: - logger.debug(f"About to parse JSON for chunk {chunk_id}") - data = response.json() - logger.debug(f"Successfully parsed JSON for chunk {chunk_id}") - - # Debug logging to see the actual structure - logger.debug(f"Raw response data structure for chunk {chunk_id}: {data}") - logger.debug(f"Type of data: {type(data)}") - if "output" in data: - logger.debug(f"Type of data['output']: {type(data['output'])}") - - logger.debug( - f"About to call load_runpod_transcription_response for chunk {chunk_id}" - ) - from dembrane.runpod import load_runpod_transcription_response - - load_runpod_transcription_response(data) - logger.debug( - f"Successfully completed load_runpod_transcription_response for chunk {chunk_id}" - ) - - except Exception as e: - logger.error(f"Error parsing response for chunk {chunk_id}: {e}") - logger.error(f"Error type: {type(e)}") - logger.error("Error traceback:", exc_info=True) - # Log the raw response for debugging - logger.error(f"Raw response text: {response.text}") - logger.error(f"Response status: {response.status_code}") - logger.error(f"Response headers: {response.headers}") - else: - logger.info(f"Non-200 response for chunk {chunk_id}, retrying transcription.") - try: - transcribe_conversation_chunk(chunk_id) - except Exception as e: - logger.error(f"Failed to re-trigger transcription for chunk {chunk_id}: {e}") - - -@dramatiq.actor(queue_name="network", priority=10) -def task_update_runpod_transcription_response() -> None: - logger = getLogger("dembrane.tasks.task_update_runpod_transcription_response") - try: - chunks = directus.get_items( - "conversation_chunk", - { - "query": { - "filter": {"runpod_job_status_link": {"_nnull": True}}, - "fields": ["id", "runpod_job_status_link"], - } - }, - ) - if not chunks: - logger.info("No chunks with runpod_job_status_link found.") - return - - # Dispatch a group of sub-tasks for parallel processing - group( - [ - task_process_runpod_chunk_response.message( - chunk["id"], chunk["runpod_job_status_link"] - ) - for chunk in chunks - ] - ).run() - - except Exception as e: - logger.error(f"Error in task_update_runpod_transcription_response: {e}") - - -@dramatiq.actor(queue_name="network", priority=30) -def task_get_runpod_diarization(chunk_id: str) -> None: - logger = getLogger("dembrane.tasks.task_get_runpod_diarization") - logger.info(f"Getting runpod diarization for chunk {chunk_id}") - try: - get_runpod_diarization(chunk_id) - except Exception as e: - logger.error(f"Error in task_get_runpod_diarization: {e}") diff --git a/echo/server/dembrane/transcribe.py b/echo/server/dembrane/transcribe.py index a73ff693..d095b228 100644 --- a/echo/server/dembrane/transcribe.py +++ b/echo/server/dembrane/transcribe.py @@ -19,85 +19,28 @@ import requests from dembrane.s3 import get_signed_url, get_stream_from_s3 -from dembrane.config import ( - GCP_SA_JSON, - API_BASE_URL, - ASSEMBLYAI_API_KEY, - ASSEMBLYAI_BASE_URL, - LITELLM_WHISPER_URL, - LITELLM_WHISPER_MODEL, - RUNPOD_WHISPER_API_KEY, - TRANSCRIPTION_PROVIDER, - LITELLM_WHISPER_API_KEY, - RUNPOD_WHISPER_BASE_URL, - LITELLM_WHISPER_API_VERSION, - ENABLE_ASSEMBLYAI_TRANSCRIPTION, - RUNPOD_WHISPER_PRIORITY_BASE_URL, - ENABLE_RUNPOD_WHISPER_TRANSCRIPTION, - ENABLE_LITELLM_WHISPER_TRANSCRIPTION, - RUNPOD_WHISPER_MAX_REQUEST_THRESHOLD, -) +from dembrane.settings import get_settings +from dembrane.llms import MODELS, get_completion_kwargs, resolve_config from dembrane.prompts import render_prompt from dembrane.service import file_service, conversation_service from dembrane.directus import directus logger = logging.getLogger("transcribe") +settings = get_settings() +GCP_SA_JSON = settings.gcp_sa_json +API_BASE_URL = settings.api_base_url +ASSEMBLYAI_API_KEY = settings.assemblyai_api_key +ASSEMBLYAI_BASE_URL = settings.assemblyai_base_url +TRANSCRIPTION_PROVIDER = settings.transcription_provider +ENABLE_ASSEMBLYAI_TRANSCRIPTION = settings.enable_assemblyai_transcription +ENABLE_LITELLM_WHISPER_TRANSCRIPTION = settings.enable_litellm_whisper_transcription + class TranscriptionError(Exception): pass -def queue_transcribe_audio_runpod( - audio_file_uri: str, - language: Optional[str], - hotwords: Optional[List[str]] = None, - is_priority: bool = False, - conversation_chunk_id: Optional[str] = "", -) -> str: - """Transcribe audio using RunPod""" - logger = logging.getLogger("transcribe.transcribe_audio_runpod") - - try: - signed_url = get_signed_url(audio_file_uri, expires_in_seconds=3 * 24 * 60 * 60) # 3 days - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {RUNPOD_WHISPER_API_KEY}", - } - - input_payload = { - "audio": signed_url, - "hotwords": ", ".join(hotwords) if hotwords else None, - "conversation_chunk_id": conversation_chunk_id, - } - - if language: - input_payload["language"] = language - - data = { - "input": input_payload, - "webhook": f"{API_BASE_URL}/stateless/webhook/transcribe", - } - - logger.debug(f"data: {data}") - - try: - if is_priority: - url = f"{str(RUNPOD_WHISPER_PRIORITY_BASE_URL).rstrip('/')}/run" - else: - url = f"{str(RUNPOD_WHISPER_BASE_URL).rstrip('/')}/run" - response = requests.post(url, headers=headers, json=data, timeout=600) - response.raise_for_status() - job_id = response.json()["id"] - return job_id - except Exception as e: - logger.error(f"Failed to queue transcription job for RunPod: {e}") - raise TranscriptionError(f"Failed to queue transcription job for RunPod: {e}") from e - except Exception as e: - logger.error(f"Failed to get signed url for {audio_file_uri}: {e}") - raise TranscriptionError(f"Failed to get signed url for {audio_file_uri}: {e}") from e - - def transcribe_audio_litellm( audio_file_uri: str, language: Optional[str], whisper_prompt: Optional[str] ) -> str: @@ -115,12 +58,15 @@ def transcribe_audio_litellm( raise TranscriptionError(f"Failed to get audio stream from S3: {exc}") from exc try: + whisper_config = resolve_config(MODELS.MULTI_MODAL_FAST) + if not whisper_config.model or not whisper_config.api_key: + raise TranscriptionError("LiteLLM Whisper configuration is incomplete.") response = litellm.transcription( - model=LITELLM_WHISPER_MODEL, + model=whisper_config.model, file=file_upload, - api_key=LITELLM_WHISPER_API_KEY, - api_base=LITELLM_WHISPER_URL, - api_version=LITELLM_WHISPER_API_VERSION, + api_key=whisper_config.api_key, + api_base=whisper_config.api_base, + api_version=whisper_config.api_version, language=language, prompt=whisper_prompt, ) @@ -257,9 +203,8 @@ def _transcript_correction_workflow( assert GCP_SA_JSON, "GCP_SA_JSON is not set" + completion_kwargs = get_completion_kwargs(MODELS.MULTI_MODAL_PRO) response = litellm.completion( - model="vertex_ai/gemini-2.5-flash", - vertex_credentials=GCP_SA_JSON, messages=[ { "role": "system", @@ -285,6 +230,8 @@ def _transcript_correction_workflow( "type": "json_object", "response_schema": response_schema, }, + vertex_credentials=GCP_SA_JSON, + **completion_kwargs, ) json_response = json.loads(response.choices[0].message.content) @@ -406,94 +353,17 @@ def _build_hotwords(conversation: dict) -> Optional[List[str]]: return None -def _get_transcript_provider() -> Literal["Runpod", "LiteLLM", "AssemblyAI", "Dembrane-25-09"]: +def _get_transcript_provider() -> Literal["LiteLLM", "AssemblyAI", "Dembrane-25-09"]: if TRANSCRIPTION_PROVIDER: return TRANSCRIPTION_PROVIDER elif ENABLE_ASSEMBLYAI_TRANSCRIPTION: return "AssemblyAI" - elif ENABLE_RUNPOD_WHISPER_TRANSCRIPTION: - return "Runpod" elif ENABLE_LITELLM_WHISPER_TRANSCRIPTION: return "LiteLLM" else: raise TranscriptionError("No valid transcription configuration found.") -def _get_status_runpod(runpod_job_status_link: str) -> tuple[str, dict]: - headers = { - "Content-Type": "application/json", - "Authorization": f"Bearer {RUNPOD_WHISPER_API_KEY}", - } - response = requests.get(runpod_job_status_link, headers=headers, timeout=30) - response.raise_for_status() - - response_data = response.json() - - return response_data["status"], response_data - - -def _process_runpod_transcription( - chunk: dict, - conversation_chunk_id: str, - language: str, - hotwords: Optional[List[str]], -) -> str: - """Handle RunPod status checking, queuing new jobs and Directus updates. - - Returns: - str: The conversation chunk ID if successful - """ - runpod_request_count = chunk["runpod_request_count"] - source = chunk["source"] - runpod_job_status_link = chunk["runpod_job_status_link"] - - # 1. Check status of an existing job, if any - if runpod_job_status_link: - try: - job_status, _ = _get_status_runpod(runpod_job_status_link) - - if job_status == "IN_PROGRESS": - logger.info("RunPod job %s is still in progress", runpod_job_status_link) - return conversation_chunk_id - - except Exception as exc: # Broad catch – any issue we continue to (re)queue - logger.error("Unable to fetch RunPod status from %s: %s", runpod_job_status_link, exc) - - # 2. Respect max-request threshold - if runpod_request_count >= RUNPOD_WHISPER_MAX_REQUEST_THRESHOLD: - logger.info("RunPod request threshold reached for chunk %s", conversation_chunk_id) - directus.update_item( - collection_name="conversation_chunk", - item_id=conversation_chunk_id, - item_data={ - "runpod_job_status_link": None, - }, - ) - return conversation_chunk_id - - # 3. Queue a new transcription job - is_priority = source == "PORTAL_AUDIO" - - job_id = queue_transcribe_audio_runpod( - chunk["path"], - language=language, - hotwords=hotwords, - is_priority=is_priority, - conversation_chunk_id=conversation_chunk_id, - ) - - directus.update_item( - collection_name="conversation_chunk", - item_id=conversation_chunk_id, - item_data={ - "runpod_job_status_link": f"{str(RUNPOD_WHISPER_BASE_URL)}/status/{job_id}", - "runpod_request_count": runpod_request_count + 1, - }, - ) - - return conversation_chunk_id - - def transcribe_conversation_chunk( conversation_chunk_id: str, use_pii_redaction: bool = False ) -> str: @@ -556,12 +426,6 @@ def transcribe_conversation_chunk( }, ) return conversation_chunk_id - case "Runpod": - logger.info("Using RunPod for transcription") - hotwords = _build_hotwords(conversation) - return _process_runpod_transcription( - chunk, conversation_chunk_id, language, hotwords - ) case "LiteLLM": logger.info("Using LITELLM for transcription") whisper_prompt = _build_whisper_prompt(conversation, language) diff --git a/echo/server/json_templates/default_view_recurring_themes.de.jinja b/echo/server/json_templates/default_view_recurring_themes.de.jinja deleted file mode 100644 index 8f33e642..00000000 --- a/echo/server/json_templates/default_view_recurring_themes.de.jinja +++ /dev/null @@ -1,4 +0,0 @@ -{ - "user_query": "Geben Sie einen Überblick über die Hauptthemen und wiederkehrenden Themen", - "user_query_context": "Identifizieren Sie wiederkehrende Themen, Themen und Argumente, die konsistent in den Gesprächen auftreten. Analysieren Sie deren Häufigkeit, Intensität und Konsistenz. Erwartete Ausgabe: 3-7 Aspekte für kleine Datensätze, 5-12 für mittlere Datensätze, 8-15 für große Datensätze. Verarbeitungsrichtlinien: Konzentrieren Sie sich auf eindeutige Muster, die in mehreren Gesprächen auftreten. Qualitätsschwelle: Jeder Aspekt muss ein einzigartiges, konsistent auftretendes Thema mit klaren Belegen für Wiederholung darstellen." -} \ No newline at end of file diff --git a/echo/server/json_templates/default_view_recurring_themes.en.jinja b/echo/server/json_templates/default_view_recurring_themes.en.jinja deleted file mode 100644 index 33fb8253..00000000 --- a/echo/server/json_templates/default_view_recurring_themes.en.jinja +++ /dev/null @@ -1,4 +0,0 @@ -{ - "user_query": "Provide an overview of the main topics and recurring themes", - "user_query_context": "Identify recurring themes, topics, and arguments that appear consistently across conversations. Analyze their frequency, intensity, and consistency. Processing guidance: Focus on distinct patterns that emerge across multiple conversations. Quality threshold: Each aspect must represent a unique, consistently appearing theme with clear evidence of recurrence." -} \ No newline at end of file diff --git a/echo/server/json_templates/default_view_recurring_themes.es.jinja b/echo/server/json_templates/default_view_recurring_themes.es.jinja deleted file mode 100644 index 3ad19532..00000000 --- a/echo/server/json_templates/default_view_recurring_themes.es.jinja +++ /dev/null @@ -1,4 +0,0 @@ -{ - "user_query": "Proporcione una visión general de los temas principales y temas recurrentes", - "user_query_context": "Identifique temas recurrentes, tópicos y argumentos que aparecen consistentemente a través de las conversaciones. Analice su frecuencia, intensidad y consistencia. Salida esperada: 3-7 aspectos para conjuntos de datos pequeños, 5-12 para conjuntos de datos medianos, 8-15 para conjuntos de datos grandes. Guía de procesamiento: Enfóquese en patrones distintos que emergen a través de múltiples conversaciones. Umbral de calidad: Cada aspecto debe representar un tema único y consistentemente presente con evidencia clara de recurrencia." -} \ No newline at end of file diff --git a/echo/server/json_templates/default_view_recurring_themes.fr.jinja b/echo/server/json_templates/default_view_recurring_themes.fr.jinja deleted file mode 100644 index ff20ba5a..00000000 --- a/echo/server/json_templates/default_view_recurring_themes.fr.jinja +++ /dev/null @@ -1,4 +0,0 @@ -{ - "user_query": "Fournissez un aperçu des sujets principaux et des thèmes récurrents", - "user_query_context": "Identifiez les thèmes récurrents, sujets et arguments qui apparaissent de manière cohérente à travers les conversations. Analysez leur fréquence, intensité et cohérence. Sortie attendue : 3-7 aspects pour les petits ensembles de données, 5-12 pour les ensembles moyens, 8-15 pour les grands ensembles. Guidance de traitement : Concentrez-vous sur les modèles distincts qui émergent à travers plusieurs conversations. Seuil de qualité : Chaque aspect doit représenter un thème unique et cohérent avec des preuves claires de récurrence." -} \ No newline at end of file diff --git a/echo/server/json_templates/default_view_recurring_themes.nl.jinja b/echo/server/json_templates/default_view_recurring_themes.nl.jinja deleted file mode 100644 index 8db65df7..00000000 --- a/echo/server/json_templates/default_view_recurring_themes.nl.jinja +++ /dev/null @@ -1,4 +0,0 @@ -{ - "user_query": "Geef een overzicht van de hoofdonderwerpen en terugkerende thema's", - "user_query_context": "Identificeer terugkerende thema's, onderwerpen en argumenten die consistent voorkomen in gesprekken. Analyseer hun frequentie, intensiteit en consistentie. Verwachte uitvoer: 3-7 aspecten voor kleine datasets, 5-12 voor middelgrote datasets, 8-15 voor grote datasets. Verwerkingsrichtlijnen: Focus op onderscheidende patronen die opkomen in meerdere gesprekken. Kwaliteitsdrempel: Elk aspect moet een uniek, consistent voorkomend thema vertegenwoordigen met duidelijk bewijs van herhaling." -} \ No newline at end of file diff --git a/echo/server/prod.sh b/echo/server/prod.sh index 334724e8..17ace934 100755 --- a/echo/server/prod.sh +++ b/echo/server/prod.sh @@ -12,11 +12,9 @@ echo " Timeout: ${TIMEOUT}s" echo " Max Requests: $MAX_REQUESTS" echo "📊 Scale with K8s replicas (not workers per pod)" -# Use custom worker that configures asyncio loop for LightRAG compatibility -# (LightRAG uses asyncio.run() which requires nest_asyncio, incompatible with uvloop) exec gunicorn dembrane.main:app \ --workers "$WORKERS" \ - --worker-class dembrane.lightrag_uvicorn_worker.LightRagUvicornWorker \ + --worker-class dembrane.asyncio_uvicorn_worker.AsyncioUvicornWorker \ --bind 0.0.0.0:8000 \ --timeout "$TIMEOUT" \ --graceful-timeout 30 \ diff --git a/echo/server/pyproject.toml b/echo/server/pyproject.toml index e8e05ee7..4dc93eb1 100644 --- a/echo/server/pyproject.toml +++ b/echo/server/pyproject.toml @@ -13,11 +13,8 @@ dependencies = [ "langchain==0.1.*", "langchain-community==0.0.*", "langchain-experimental==0.0.*", - "langchain-openai==0.0.*", "pypdf==4.0.*", # SDKs for Models - "openai==1.99.*", - "anthropic==0.43.*", # Data "pandas==2.2.*", "numpy==1.26.*", @@ -52,6 +49,7 @@ dependencies = [ "lightrag-dembrane==1.2.7.8", "nest-asyncio==1.6.0", "pydantic==2.10.6", + "pydantic-settings==2.6.1", "pydub==0.25.1", "pytest==8.3.4", "PyYAML==6.0.2", @@ -66,7 +64,6 @@ dependencies = [ # LLM Tools "tiktoken==0.9.0", "asyncpg==0.30.0", - "neo4j==5.28.1", "boto3==1.37.*", # Uncategorized "mypy>=1.16.0", diff --git a/echo/server/scripts/simple_rag_observer.py b/echo/server/scripts/simple_rag_observer.py deleted file mode 100755 index a5e537d0..00000000 --- a/echo/server/scripts/simple_rag_observer.py +++ /dev/null @@ -1,185 +0,0 @@ -#!/usr/bin/env python3 -""" -Simple RAG ETL Observer - Monitor LightRAG processing status - -Usage: - python simple_rag_observer.py # Watch all recent conversations - python simple_rag_observer.py # Watch specific conversation -""" -import os -import sys -import time -from pathlib import Path - -# Load environment -from dotenv import load_dotenv -load_dotenv(Path(__file__).parents[1] / ".env") -load_dotenv(Path(__file__).parents[3] / "local.env") - -import psycopg -from neo4j import GraphDatabase -from directus_py_sdk import DirectusClient - - -def connect_db(): - """Connect to PostgreSQL""" - db_url = os.getenv("DATABASE_URL") - if db_url.startswith("postgresql+psycopg://"): - db_url = "postgresql://" + db_url[21:] - return psycopg.connect(db_url) - - -def connect_neo4j(): - """Connect to Neo4j""" - uri = os.getenv("NEO4J_URI") - user = os.getenv("NEO4J_USERNAME") - password = os.getenv("NEO4J_PASSWORD") - return GraphDatabase.driver(uri, auth=(user, password)) - - -def get_global_stats(conn, neo4j_driver): - """Get system-wide statistics""" - cur = conn.cursor() - - # PostgreSQL counts - cur.execute("SELECT count(*) FROM conversation") - conversations = cur.fetchone()[0] - - cur.execute("SELECT count(*) FROM conversation_chunk") - chunks = cur.fetchone()[0] - - cur.execute("SELECT count(*) FROM conversation_segment") - segments = cur.fetchone()[0] - - cur.execute("SELECT count(*) FROM lightrag_vdb_transcript") - vectors = cur.fetchone()[0] - - cur.execute("SELECT count(*) FROM lightrag_doc_status") - docs = cur.fetchone()[0] - - # Neo4j counts - with neo4j_driver.session() as session: - result = session.run("MATCH (n) RETURN count(n) as total") - neo4j_nodes = result.single()["total"] - - return { - "conversations": conversations, - "chunks": chunks, - "segments": segments, - "vectors": vectors, - "docs": docs, - "neo4j_nodes": neo4j_nodes, - } - - -def get_conversation_details(conn, conv_id): - """Get details for a specific conversation""" - cur = conn.cursor() - - # Conversation info - cur.execute(""" - SELECT participant_name, is_finished, is_audio_processing_finished - FROM conversation WHERE id = %s - """, (conv_id,)) - row = cur.fetchone() - if not row: - return None - - name, is_finished, processing_done = row - status = "finished" if is_finished else "in_progress" - - # Chunks - cur.execute(""" - SELECT count(*) FROM conversation_chunk WHERE conversation_id = %s - """, (conv_id,)) - chunk_count = cur.fetchone()[0] - - # Segments via chunks - cur.execute(""" - SELECT count(DISTINCT cs.id) - FROM conversation_segment cs - JOIN conversation_segment_conversation_chunk cscc ON cs.id = cscc.conversation_segment_id - JOIN conversation_chunk cc ON cc.id = cscc.conversation_chunk_id - WHERE cc.conversation_id = %s - """, (conv_id,)) - segment_count = cur.fetchone()[0] - - return { - "name": name, - "status": status, - "processing_done": processing_done, - "chunks": chunk_count, - "segments": segment_count, - } - - -def print_stats(stats, conversation=None): - """Print statistics""" - print("\n" + "="*60) - print(f"RAG ETL Observer - {time.strftime('%Y-%m-%d %H:%M:%S')}") - print("="*60) - - print(f"\nGlobal Stats:") - print(f" Conversations: {stats['conversations']}") - print(f" Chunks: {stats['chunks']}") - print(f" Segments: {stats['segments']}") - print(f" Vector Transcripts: {stats['vectors']}") - print(f" LightRAG Docs: {stats['docs']}") - print(f" Neo4j Nodes: {stats['neo4j_nodes']}") - - if conversation: - print(f"\nConversation Details:") - print(f" Name: {conversation['name']}") - print(f" Status: {conversation['status']}") - print(f" Processing Done: {conversation['processing_done']}") - print(f" Chunks: {conversation['chunks']}") - print(f" Segments: {conversation['segments']}") - - print("\n" + "="*60) - - -def main(): - # Parse arguments - conv_id = None - auto_mode = False - - for arg in sys.argv[1:]: - if arg == "--auto": - auto_mode = True - elif not arg.startswith("-"): - conv_id = arg - - print("Connecting to databases...") - conn = connect_db() - neo4j_driver = connect_neo4j() - - try: - while True: - stats = get_global_stats(conn, neo4j_driver) - conversation = None - - if conv_id: - conversation = get_conversation_details(conn, conv_id) - if not conversation: - print(f"Conversation {conv_id} not found") - break - - os.system("clear") - print_stats(stats, conversation) - - if conv_id and not auto_mode: - # Single conversation mode without auto - just show once - break - - # Watch mode or auto mode - refresh every 5 seconds - time.sleep(5) - - except KeyboardInterrupt: - print("\n\nExiting...") - finally: - conn.close() - neo4j_driver.close() - - -if __name__ == "__main__": - main() diff --git a/echo/server/scripts/test_rag_query.py b/echo/server/scripts/test_rag_query.py deleted file mode 100644 index 98cf444f..00000000 --- a/echo/server/scripts/test_rag_query.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python3 -""" -Test script to query LightRAG and see the response. - -This helps verify that: -1. LightRAG has data -2. RAG queries work correctly -3. You can see what data is being returned -""" - -import sys -import os -import asyncio - -# Add parent directory to path -sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - -from dembrane.rag_manager import RAGManager -from dembrane.audio_lightrag.utils.async_utils import run_async_in_new_loop - - -async def test_rag_query(query: str): - """Test a RAG query""" - print(f"\nQuerying LightRAG with: '{query}'") - print("="*60) - - # Initialize RAG - if not RAGManager.is_initialized(): - print("Initializing RAG...") - await RAGManager.initialize() - - rag = RAGManager.get_instance() - - # Query RAG - from lightrag.lightrag import QueryParam - print("\nSearching...") - result = await rag.aquery(query, param=QueryParam(mode="local")) - - print(f"\nResult:") - print("-"*60) - print(result) - print("-"*60) - - return result - - -def main(): - """Run test queries""" - print("="*60) - print("LightRAG Query Test") - print("="*60) - - # Default test query - query = sys.argv[1] if len(sys.argv) > 1 else "What topics have been discussed in conversations?" - - # Run query in new event loop (like Dramatiq tasks do) - result = run_async_in_new_loop(test_rag_query(query)) - - print(f"\n✓ Query completed") - print(f" Result length: {len(result)} characters") - - # Show some stats - if "no relevant" in result.lower() or "no information" in result.lower(): - print("\n⚠️ RAG returned 'no relevant information'") - print(" This means either:") - print(" 1. ETL pipeline hasn't finished processing conversations yet") - print(" 2. No data matches your query") - print(" 3. LightRAG database is empty") - else: - print("\n✓ RAG found relevant information!") - - -if __name__ == "__main__": - main() diff --git a/echo/server/scripts/test_trigger_directus_etl.py b/echo/server/scripts/test_trigger_directus_etl.py deleted file mode 100644 index a799b3f5..00000000 --- a/echo/server/scripts/test_trigger_directus_etl.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python3 -""" -Test script to manually trigger task_run_etl_pipeline (THE PIVOT version). -This will help test the new simplified RAG ETL pipeline. -""" - -import os -import sys - -# Add parent directory to path -sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) - -from dembrane.tasks import task_run_etl_pipeline -import time - -print("Testing task_run_etl_pipeline (THE PIVOT)...") -print(f"Task registered: {task_run_etl_pipeline}") -print(f"Task actor name: {task_run_etl_pipeline.actor_name}") -print(f"Task queue: {task_run_etl_pipeline.queue_name}") -print(f"Task priority: {task_run_etl_pipeline.priority}") - -# Try to send the task -test_conversation_id = input("Enter conversation ID to test: ").strip() -if not test_conversation_id: - print("No conversation ID provided, exiting") - sys.exit(1) - -print(f"\nSending task for conversation: {test_conversation_id}") - -try: - message = task_run_etl_pipeline.send(test_conversation_id) - print(f"Task sent successfully: {message}") - print(f"Message ID: {message.message_id}") - print(f"Waiting for result (5 minute timeout)...") - - result = message.get_result(block=True, timeout=300) - print(f"Result: {result}") - -except Exception as e: - print(f"Error: {type(e).__name__}: {e}") - import traceback - traceback.print_exc() diff --git a/echo/server/tests/k6_load_testing/.gitignore b/echo/server/tests/k6_load_testing/.gitignore deleted file mode 100644 index e9454623..00000000 --- a/echo/server/tests/k6_load_testing/.gitignore +++ /dev/null @@ -1,21 +0,0 @@ -# Environment files -.env - -# Results directory -results/ -*.json - -# Logs -*.log - -# OS generated files -.DS_Store -.DS_Store? -._* -.Spotlight-V100 -.Trashes -ehthumbs.db -Thumbs.db - -# Docker volumes -docker-data/ \ No newline at end of file diff --git a/echo/server/tests/k6_load_testing/env.example b/echo/server/tests/k6_load_testing/env.example deleted file mode 100644 index 064c663a..00000000 --- a/echo/server/tests/k6_load_testing/env.example +++ /dev/null @@ -1,9 +0,0 @@ -# RunPod API Configuration -RUNPOD_API_KEY=your_api_key_here -RUNPOD_ENDPOINT_ID=your_endpoint_id_here - -# Test Configuration -AUDIO_URL=https://github.com/runpod-workers/sample-inputs/raw/refs/heads/main/audio/Arthur.mp3 -BASE_URL=https://api.runpod.ai/v2 -WHISPER_MODEL=large-v3 -INITIAL_PROMPT=This is a test transcription. \ No newline at end of file diff --git a/echo/server/tests/k6_load_testing/run-tests.sh b/echo/server/tests/k6_load_testing/run-tests.sh deleted file mode 100755 index aeeca7a2..00000000 --- a/echo/server/tests/k6_load_testing/run-tests.sh +++ /dev/null @@ -1,197 +0,0 @@ -#!/bin/bash - -# K6 Load Testing Runner for RunPod API (Unified Script) -# Usage: ./run-tests.sh [test-type] [options] - -set -e - -# Colors for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -BLUE='\033[0;34m' -NC='\033[0m' # No Color - -# Default values -TEST_TYPE="smoke" -RESULTS_DIR="./results" -DOCKER_IMAGE="grafana/k6:latest" -K6_SCRIPT="scripts/k6_runpod_transcribe.js" - -# Function to print colored output -print_status() { - echo -e "${BLUE}[INFO]${NC} $1" -} - -print_success() { - echo -e "${GREEN}[SUCCESS]${NC} $1" -} - -print_warning() { - echo -e "${YELLOW}[WARNING]${NC} $1" -} - -print_error() { - echo -e "${RED}[ERROR]${NC} $1" -} - -# Function to show usage -show_usage() { - echo "Usage: $0 [test-type] [options]" - echo "" - echo "Test Types:" - echo " smoke - Basic functionality test (default)" - echo " load - Standard load test" - echo " stress - Stress test to find breaking points" - echo " spike - Spike test for sudden load increases" - echo " all - Run all test types sequentially" - echo "" - echo "Options:" - echo " -h, --help Show this help message" - echo " -c, --clean Clean results directory before running" - echo " -v, --verbose Verbose output" - echo "" - echo "Examples:" - echo " $0 smoke" - echo " $0 load --clean" - echo " $0 stress -v" - echo " $0 all" -} - -# Function to check prerequisites -check_prerequisites() { - print_status "Checking prerequisites..." - - # Check if Docker is installed and running - if ! command -v docker &> /dev/null; then - print_error "Docker is not installed. Please install Docker first." - exit 1 - fi - - if ! docker info &> /dev/null; then - print_error "Docker is not running. Please start Docker first." - exit 1 - fi - - # Check if .env file exists - if [ ! -f ".env" ]; then - print_error ".env file not found. Please create it from env.example" - print_status "Run: cp env.example .env" - print_status "Then edit .env with your RunPod API key and config." - exit 1 - fi - - # Check if API key is set - if ! grep -q "RUNPOD_API_KEY=.*[^[:space:]]" .env; then - print_error "RUNPOD_API_KEY is not set in .env file" - exit 1 - fi - print_success "Prerequisites check passed" -} - -# Function to create results directory -setup_results_dir() { - if [ "$CLEAN_RESULTS" = true ]; then - print_status "Cleaning results directory..." - rm -rf "$RESULTS_DIR" - fi - - mkdir -p "$RESULTS_DIR" - print_status "Results will be saved to: $RESULTS_DIR" -} - -# Function to run the test -run_test() { - print_status "Running $TEST_TYPE test..." - print_status "Script: $K6_SCRIPT" - - # Compose docker run command - local docker_cmd="docker run --rm" - docker_cmd="$docker_cmd -v $(pwd)/scripts:/scripts" - docker_cmd="$docker_cmd -v $(pwd)/$RESULTS_DIR:/results" - docker_cmd="$docker_cmd --env-file .env" - docker_cmd="$docker_cmd -e TEST_TYPE=$TEST_TYPE" - if [ "$VERBOSE" = true ]; then - docker_cmd="$docker_cmd -e K6_LOG_LEVEL=debug" - fi - docker_cmd="$docker_cmd $DOCKER_IMAGE run" - docker_cmd="$docker_cmd --out json=/results/${TEST_TYPE}-test-results.json" - docker_cmd="$docker_cmd /$K6_SCRIPT" - - print_status "Executing: $docker_cmd" - - # Run the test - if eval $docker_cmd; then - print_success "$TEST_TYPE test completed successfully" - print_status "Results saved to: $RESULTS_DIR/${TEST_TYPE}-test-results.json" - else - print_error "$TEST_TYPE test failed" - exit 1 - fi -} - -# Function to run all tests -run_all_tests() { - local tests=("smoke" "load" "stress" "spike") - print_status "Running all test types..." - for test in "${tests[@]}"; do - TEST_TYPE="$test" - print_status "Starting $test test..." - run_test - print_success "$test test completed" - echo "" - done - print_success "All tests completed!" -} - -# Parse command line arguments -CLEAN_RESULTS=false -VERBOSE=false - -while [[ $# -gt 0 ]]; do - case $1 in - smoke|load|stress|spike) - TEST_TYPE="$1" - shift - ;; - all) - TEST_TYPE="all" - shift - ;; - -c|--clean) - CLEAN_RESULTS=true - shift - ;; - -v|--verbose) - VERBOSE=true - shift - ;; - -h|--help) - show_usage - exit 0 - ;; - *) - print_error "Unknown option: $1" - show_usage - exit 1 - ;; - esac - done - -# Main execution -main() { - print_status "K6 Load Testing for RunPod API (Unified Script)" - print_status "Test type: $TEST_TYPE" - echo "" - check_prerequisites - setup_results_dir - if [ "$TEST_TYPE" = "all" ]; then - run_all_tests - else - run_test - fi - print_success "Testing completed!" -} - -# Run main function -main \ No newline at end of file diff --git a/echo/server/tests/k6_load_testing/scripts/k6_runpod_transcribe.js b/echo/server/tests/k6_load_testing/scripts/k6_runpod_transcribe.js deleted file mode 100644 index 3fe91fca..00000000 --- a/echo/server/tests/k6_load_testing/scripts/k6_runpod_transcribe.js +++ /dev/null @@ -1,125 +0,0 @@ -import http from 'k6/http'; -import { check, sleep } from 'k6'; - -// Read config from environment variables -const RUNPOD_API_KEY = __ENV.RUNPOD_API_KEY; -const RUNPOD_ENDPOINT_ID = __ENV.RUNPOD_ENDPOINT_ID; -const AUDIO_URL = __ENV.AUDIO_URL; -const BASE_URL = __ENV.BASE_URL || 'https://api.runpod.ai/v2'; -const WHISPER_MODEL = __ENV.WHISPER_MODEL || 'large-v3'; -const INITIAL_PROMPT = __ENV.INITIAL_PROMPT || ''; -const POLL_INTERVAL = parseInt(__ENV.POLL_INTERVAL || '10', 10); // seconds -const POLL_TIMEOUT = parseInt(__ENV.POLL_TIMEOUT || '1000', 10); // seconds - -if (!RUNPOD_API_KEY || !RUNPOD_ENDPOINT_ID || !AUDIO_URL) { - throw new Error( - 'Missing required environment variables: RUNPOD_API_KEY, RUNPOD_ENDPOINT_ID, AUDIO_URL' - ); -} - -// Choose scenario via TEST_TYPE env var: 'smoke', 'load', 'spike', 'stress' -const TEST_TYPE = (__ENV.TEST_TYPE || 'smoke').toLowerCase(); - -let options = {}; -switch (TEST_TYPE) { - case 'smoke': - options = { - vus: 1, - iterations: 1, - }; - break; - case 'load': - options = { - vus: 500, - duration: '10m', - }; - break; - case 'spike': - options = { - stages: [ - { duration: '30s', target: 1 }, - { duration: '5m', target: 500 }, - { duration: '30s', target: 1 }, - ], - }; - break; - case 'stress': - options = { - stages: [ - { duration: '2m', target: 1500 }, - { duration: '4m', target: 0 }, - ], - }; - break; - default: - options = { vus: 1, iterations: 1 }; -} - -export { options }; - -// Parse AUDIO_URL for multiple files -let audioFiles = [AUDIO_URL]; -if (AUDIO_URL.includes(',')) { - audioFiles = AUDIO_URL.split(',') - .map((f) => f.trim()) - .filter(Boolean); -} - -function submitTranscriptionJob() { - // Randomly select an audio file if multiple are provided - const selectedAudio = - audioFiles.length > 1 - ? audioFiles[Math.floor(Math.random() * audioFiles.length)] - : audioFiles[0]; - const url = `${BASE_URL}/${RUNPOD_ENDPOINT_ID}/run`; - const headers = { - Authorization: `Bearer ${RUNPOD_API_KEY}`, - 'Content-Type': 'application/json', - }; - const payload = JSON.stringify({ - input: { - audio: selectedAudio, - model: WHISPER_MODEL, - initial_prompt: INITIAL_PROMPT, - language: 'en', - }, - }); - const res = http.post(url, payload, { headers }); - check(res, { - 'job submitted': (r) => r.status === 200 && r.json('id'), - }); - return res.json('id'); -} - -function pollStatus(jobId) { - const statusUrl = `${BASE_URL}/${RUNPOD_ENDPOINT_ID}/status/${jobId}`; - const headers = { - Authorization: `Bearer ${RUNPOD_API_KEY}`, - 'Content-Type': 'application/json', - }; - let waited = 0; - while (waited < POLL_TIMEOUT) { - const res = http.get(statusUrl, { headers }); - const status = res.json('status'); - if (status === 'COMPLETED') { - check(res, { - 'transcription completed': (r) => !!r.json('output.transcription'), - }); - return res.json('output.transcription'); - } else if (status === 'FAILED') { - check(res, { 'transcription failed': () => false }); - return null; - } - sleep(POLL_INTERVAL); - waited += POLL_INTERVAL; - } - check(null, { 'timeout waiting for transcription': () => false }); - return null; -} - -export default function () { - const jobId = submitTranscriptionJob(); - if (jobId) { - pollStatus(jobId); - } -} diff --git a/echo/server/tests/test_audio_utils.py b/echo/server/tests/test_audio_utils.py index 360d9c86..fe5f04d0 100644 --- a/echo/server/tests/test_audio_utils.py +++ b/echo/server/tests/test_audio_utils.py @@ -6,7 +6,7 @@ from dembrane.s3 import s3_client, get_sanitized_s3_key from dembrane.utils import generate_uuid -from dembrane.config import BASE_DIR, STORAGE_S3_BUCKET, STORAGE_S3_ENDPOINT +from dembrane.settings import get_settings from dembrane.directus import directus from dembrane.audio_utils import ( probe_from_s3, @@ -20,6 +20,11 @@ logger = logging.getLogger(__name__) +settings = get_settings() +BASE_DIR = settings.base_dir +STORAGE_S3_BUCKET = settings.storage_s3_bucket +STORAGE_S3_ENDPOINT = settings.storage_s3_endpoint + AUDIO_FILES = [ "wav.wav", diff --git a/echo/server/tests/test_conversation_utils.py b/echo/server/tests/test_conversation_utils.py index b8061993..cdd16be4 100644 --- a/echo/server/tests/test_conversation_utils.py +++ b/echo/server/tests/test_conversation_utils.py @@ -3,10 +3,7 @@ from dembrane.utils import get_utc_timestamp from dembrane.directus import directus -from dembrane.conversation_utils import ( - collect_unfinished_conversations, - collect_unfinished_audio_processing_conversations, -) +from dembrane.conversation_utils import collect_unfinished_conversations from .common import ( create_project, @@ -14,8 +11,6 @@ create_conversation, delete_conversation, delete_conversation_chunk, - create_conversation_segment, - delete_conversation_segment, ) logger = logging.getLogger("test_conversation_utils") @@ -169,49 +164,3 @@ def test_collect_unfinished_conversations(): delete_conversation_chunk(cc3["id"]) delete_conversation(c["id"]) delete_project(p["id"]) - - -def test_collect_unfinished_audio_processing_conversations(): - # Setup project with enhanced audio processing enabled - p = create_project( - "test_p", - "en", - additional_data={"is_enhanced_audio_processing_enabled": True}, - ) - - # Conversation with audio processing not finished should be returned - c1 = create_conversation( - p["id"], - "c1_test", - additional_data={"is_audio_processing_finished": False}, - ) - - # Conversation marked finished but has unprocessed segment - c2 = create_conversation( - p["id"], - "c2_test", - additional_data={"is_audio_processing_finished": True}, - ) - seg2 = create_conversation_segment(c2["id"], False) - - # Conversation marked finished and all segments processed - c3 = create_conversation( - p["id"], - "c3_test", - additional_data={"is_audio_processing_finished": True}, - ) - seg3 = create_conversation_segment(c3["id"], True) - - res = collect_unfinished_audio_processing_conversations() - - assert c1["id"] in res, "Conversation with unfinished processing not returned" - assert c2["id"] in res, "Conversation with unprocessed segments not returned" - assert c3["id"] not in res, "Conversation with all segments processed should not be returned" - - # Cleanup - delete_conversation_segment(seg2["id"]) - delete_conversation_segment(seg3["id"]) - delete_conversation(c1["id"]) - delete_conversation(c2["id"]) - delete_conversation(c3["id"]) - delete_project(p["id"]) diff --git a/echo/server/tests/test_transcribe_assembly.py b/echo/server/tests/test_transcribe_assembly.py index 9eab1bec..8daa4f84 100644 --- a/echo/server/tests/test_transcribe_assembly.py +++ b/echo/server/tests/test_transcribe_assembly.py @@ -3,6 +3,8 @@ import pytest +TEST_AUDIO_URL = "https://storage.googleapis.com/aai-platform-public/samples/1765269382848385.wav" + from dembrane.s3 import delete_from_s3, save_to_s3_from_url from dembrane.utils import get_utc_timestamp from dembrane.directus import directus @@ -17,7 +19,6 @@ def _require_assemblyai(): pytest.skip("ASSEMBLYAI_API_KEY not set; skipping AssemblyAI tests") # Force provider selection to AssemblyAI in config by env flags os.environ["ENABLE_ASSEMBLYAI_TRANSCRIPTION"] = "true" - os.environ["ENABLE_RUNPOD_WHISPER_TRANSCRIPTION"] = "false" os.environ["ENABLE_LITELLM_WHISPER_TRANSCRIPTION"] = "false" @@ -40,10 +41,7 @@ def fixture_chunk_en(): {"project_id": p["id"], "participant_name": "test_assembly_en", "language": "en"}, )["data"] - path = save_to_s3_from_url( - "https://github.com/runpod-workers/sample-inputs/raw/refs/heads/main/audio/Arthur.mp3", - public=True, - ) + path = save_to_s3_from_url(TEST_AUDIO_URL, public=True) cc = directus.create_item( "conversation_chunk", @@ -87,10 +85,7 @@ def fixture_chunk_nl(): {"project_id": p["id"], "participant_name": "test_assembly_nl", "language": "nl"}, )["data"] - path = save_to_s3_from_url( - "https://github.com/runpod-workers/sample-inputs/raw/refs/heads/main/audio/Arthur.mp3", - public=True, - ) + path = save_to_s3_from_url(TEST_AUDIO_URL, public=True) cc = directus.create_item( "conversation_chunk", @@ -140,7 +135,7 @@ def test_transcribe_conversation_chunk_nl(self, fixture_chunk_nl): def test_transcribe_audio_assemblyai(): transcript, response = transcribe_audio_assemblyai( - audio_file_uri="https://github.com/runpod-workers/sample-inputs/raw/refs/heads/main/audio/Arthur.mp3", + audio_file_uri=TEST_AUDIO_URL, language="en", hotwords=["Arther"], ) diff --git a/echo/server/tests/test_transcribe_runpod.py b/echo/server/tests/test_transcribe_runpod.py deleted file mode 100644 index a29a7646..00000000 --- a/echo/server/tests/test_transcribe_runpod.py +++ /dev/null @@ -1,148 +0,0 @@ -import time -import logging - -import pytest - -from dembrane.s3 import delete_from_s3, save_to_s3_from_url -from dembrane.utils import get_utc_timestamp -from dembrane.directus import directus -from dembrane.transcribe import ( - _get_status_runpod, - queue_transcribe_audio_runpod, - transcribe_conversation_chunk, -) - -logger = logging.getLogger("test_transcribe") - - -@pytest.fixture -def fixture_english_chunk(): - logger.info("setup") - - p = directus.create_item( - "project", - { - "name": "test", - "language": "en", - "is_conversation_allowed": True, - }, - )["data"] - - c = directus.create_item( - "conversation", - {"project_id": p["id"], "participant_name": "test_english", "language": "en"}, - )["data"] - - path = save_to_s3_from_url( - "https://github.com/runpod-workers/sample-inputs/raw/refs/heads/main/audio/Arthur.mp3", - public=True, - ) - - cc = directus.create_item( - "conversation_chunk", - { - "conversation_id": c["id"], - "path": path, - "timestamp": str(get_utc_timestamp()), - }, - )["data"] - - yield cc["id"] - - logger.info("teardown") - - directus.delete_item("conversation_chunk", cc["id"]) - - directus.delete_item("conversation", c["id"]) - - directus.delete_item("project", p["id"]) - - delete_from_s3(path) - - -@pytest.fixture -def fixture_dutch_chunk(): - logger.info("setup") - - p = directus.create_item( - "project", - { - "name": "test", - "language": "nl", - "is_conversation_allowed": True, - }, - )["data"] - - c = directus.create_item( - "conversation", {"project_id": p["id"], "participant_name": "test_dutch", "language": "nl"} - )["data"] - - path = save_to_s3_from_url( - "https://github.com/runpod-workers/sample-inputs/raw/refs/heads/main/audio/Arthur.mp3", - public=True, - ) - - cc = directus.create_item( - "conversation_chunk", - { - "conversation_id": c["id"], - "path": path, - "timestamp": str(get_utc_timestamp()), - }, - )["data"] - - yield cc["id"] - - logger.info("teardown") - - directus.delete_item("conversation_chunk", cc["id"]) - - directus.delete_item("conversation", c["id"]) - - directus.delete_item("project", p["id"]) - - delete_from_s3(path) - - -@pytest.mark.parametrize("is_priority", [True, False]) -def test_queue_transcribe_audio_runpod(is_priority: bool): - job_id = queue_transcribe_audio_runpod( - audio_file_uri="https://github.com/runpod-workers/sample-inputs/raw/refs/heads/main/audio/Arthur.mp3", - whisper_prompt="", - language="en", - is_priority=is_priority, - ) - assert job_id is not None - - -def test_transcribe_conversation_chunk_english(fixture_english_chunk): - logger.info(f"fixture_english_chunk conversation_chunk_id: {fixture_english_chunk}") - result = transcribe_conversation_chunk(fixture_english_chunk) - logger.info(f"result: {result}") - assert result is not None - - -def test_transcribe_conversation_chunk_dutch(fixture_dutch_chunk): - logger.info(f"fixture_dutch_chunk: {fixture_dutch_chunk}") - result = transcribe_conversation_chunk(fixture_dutch_chunk) - - # get the conversation chunk - cc = dict(directus.get_item("conversation_chunk", result)) - - logger.info(f"cc: {cc}") - assert cc.get("runpod_job_status_link") is not None - - status, _ = _get_status_runpod(cc["runpod_job_status_link"]) - while status in ["IN_PROGRESS", "IN_QUEUE"]: - logger.info(f"waiting for job to finish: {status}") - time.sleep(2) - status, _ = _get_status_runpod(cc["runpod_job_status_link"]) - - # get the status of the job - status, data = _get_status_runpod(cc["runpod_job_status_link"]) - - logger.info(f"data: {data}") - - # get the output - assert data.get("output") is not None - assert data.get("output").get("joined_text") is not None diff --git a/echo/server/uv.lock b/echo/server/uv.lock index 1112b8ca..66acffce 100644 --- a/echo/server/uv.lock +++ b/echo/server/uv.lock @@ -101,24 +101,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] -[[package]] -name = "anthropic" -version = "0.43.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "anyio" }, - { name = "distro" }, - { name = "httpx" }, - { name = "jiter" }, - { name = "pydantic" }, - { name = "sniffio" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ca/d8/238c2bc59e41a787e7b62460adfc7b2edd88f28b0a14e292801a72725369/anthropic-0.43.1.tar.gz", hash = "sha256:c7f13e4b7b515ac4a3111142310b214527c0fc561485e5bc9b582e49fe3adba2", size = 195298, upload-time = "2025-01-17T19:49:18.635Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/89/2b/63b167d76401f759c8c4ff0266042e60aac6fd3cc0685b27437ceaaf95eb/anthropic-0.43.1-py3-none-any.whl", hash = "sha256:20759c25cd0f4072eb966b0180a41c061c156473bbb674da6a3f1e92e1ad78f8", size = 208170, upload-time = "2025-01-17T19:49:17.102Z" }, -] - [[package]] name = "anyio" version = "4.11.0" @@ -497,7 +479,6 @@ dependencies = [ { name = "aiofiles" }, { name = "aiohttp" }, { name = "alembic" }, - { name = "anthropic" }, { name = "apscheduler" }, { name = "asyncpg" }, { name = "backoff" }, @@ -518,22 +499,20 @@ dependencies = [ { name = "langchain" }, { name = "langchain-community" }, { name = "langchain-experimental" }, - { name = "langchain-openai" }, { name = "lightrag-dembrane" }, { name = "litellm" }, { name = "lz4" }, { name = "mypy" }, - { name = "neo4j" }, { name = "nest-asyncio" }, { name = "networkx" }, { name = "numpy" }, - { name = "openai" }, { name = "pandas" }, { name = "pandas-stubs" }, { name = "pgvector" }, { name = "pipmaster" }, { name = "psycopg", extra = ["binary", "pool"] }, { name = "pydantic" }, + { name = "pydantic-settings" }, { name = "pydub" }, { name = "pylance" }, { name = "pypdf" }, @@ -566,7 +545,6 @@ requires-dist = [ { name = "aiofiles", specifier = "==23.2.*" }, { name = "aiohttp", specifier = "==3.11.14" }, { name = "alembic", specifier = "==1.13.*" }, - { name = "anthropic", specifier = "==0.43.*" }, { name = "apscheduler", specifier = "==3.11.*" }, { name = "asyncpg", specifier = "==0.30.0" }, { name = "backoff", specifier = "==2.2.*" }, @@ -587,22 +565,20 @@ requires-dist = [ { name = "langchain", specifier = "==0.1.*" }, { name = "langchain-community", specifier = "==0.0.*" }, { name = "langchain-experimental", specifier = "==0.0.*" }, - { name = "langchain-openai", specifier = "==0.0.*" }, { name = "lightrag-dembrane", specifier = "==1.2.7.8" }, { name = "litellm", specifier = "==1.76.*" }, { name = "lz4", specifier = "==4.4.*" }, { name = "mypy", specifier = ">=1.16.0" }, - { name = "neo4j", specifier = "==5.28.1" }, { name = "nest-asyncio", specifier = "==1.6.0" }, { name = "networkx", specifier = "==3.4.*" }, { name = "numpy", specifier = "==1.26.*" }, - { name = "openai", specifier = "==1.99.*" }, { name = "pandas", specifier = "==2.2.*" }, { name = "pandas-stubs", specifier = ">=2.2.2.240514" }, { name = "pgvector", specifier = "==0.2.*" }, { name = "pipmaster", specifier = "==0.5.1" }, { name = "psycopg", extras = ["binary", "pool"], specifier = "==3.1.*" }, { name = "pydantic", specifier = "==2.10.6" }, + { name = "pydantic-settings", specifier = "==2.6.1" }, { name = "pydub", specifier = "==0.25.1" }, { name = "pylance", specifier = ">=0.30.0" }, { name = "pypdf", specifier = "==4.0.*" }, @@ -1446,20 +1422,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/14/16/fe6aaa26273e21e20a5564fd10d638ac32ce184e113908d560956025e03f/langchain_experimental-0.0.58-py3-none-any.whl", hash = "sha256:106d3bc7df3dd20687378db7534c2fc21e2589201d43de42f832a1e3913dd55b", size = 199359, upload-time = "2024-05-08T04:43:07.208Z" }, ] -[[package]] -name = "langchain-openai" -version = "0.0.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "langchain-core" }, - { name = "openai" }, - { name = "tiktoken" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c7/bb/451a3d2244350a40aa3dc822a1c405391bba1f79a8830986bd8b27d62260/langchain_openai-0.0.8.tar.gz", hash = "sha256:b7aba7fcc52305e78b08197ebc54fc45cc06dbc40ba5b913bc48a22b30a4f5c9", size = 25908, upload-time = "2024-02-27T12:26:41.204Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/81/63/012be16114559243aabcc9ec570366df84591dc9f8f3c2349a398e9b3626/langchain_openai-0.0.8-py3-none-any.whl", hash = "sha256:4862fc72cecbee0240aaa6df0234d5893dd30cd33ca23ac5cfdd86c11d2c44df", size = 32286, upload-time = "2024-02-27T12:26:39.99Z" }, -] - [[package]] name = "langchain-text-splitters" version = "0.0.2" @@ -1635,18 +1597,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, ] -[[package]] -name = "neo4j" -version = "5.28.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pytz" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4b/20/733dac16f7cedc80b23093415822c9763302519cba0e7c8bcdb5c01fc512/neo4j-5.28.1.tar.gz", hash = "sha256:ae8e37a1d895099062c75bc359b2cce62099baac7be768d0eba7180c1298e214", size = 231094, upload-time = "2025-02-10T08:36:22.566Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/57/94225fe5e9dabdc0ff60c88cbfcedf11277f4b34e7ab1373d3e62dbdd207/neo4j-5.28.1-py3-none-any.whl", hash = "sha256:6755ef9e5f4e14b403aef1138fb6315b120631a0075c138b5ddb2a06b87b09fd", size = 312258, upload-time = "2025-02-10T08:36:16.209Z" }, -] - [[package]] name = "nest-asyncio" version = "1.6.0" @@ -2015,6 +1965,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102, upload-time = "2024-12-18T11:28:28.593Z" }, ] +[[package]] +name = "pydantic-settings" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b5/d4/9dfbe238f45ad8b168f5c96ee49a3df0598ce18a0795a983b419949ce65b/pydantic_settings-2.6.1.tar.gz", hash = "sha256:e0f92546d8a9923cb8941689abf85d6601a8c19a23e97a34b2964a2e3f813ca0", size = 75646, upload-time = "2024-11-01T11:00:05.17Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/f9/ff95fd7d760af42f647ea87f9b8a383d891cdb5e5dbd4613edaeb094252a/pydantic_settings-2.6.1-py3-none-any.whl", hash = "sha256:7fb0637c786a558d3103436278a7c4f1cfd29ba8973238a50c5bb9a55387da87", size = 28595, upload-time = "2024-11-01T11:00:02.64Z" }, +] + [[package]] name = "pydub" version = "0.25.1" From d110a31757ded71a3a31c684b4b4493a3d54c027 Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti Date: Tue, 11 Nov 2025 20:09:01 +0000 Subject: [PATCH 17/23] wip --- echo/server/.env.sample | 41 +- echo/server/AGENTS.md | 3 + echo/server/dembrane/api/api.py | 2 - echo/server/dembrane/api/chat.py | 29 +- echo/server/dembrane/api/conversation.py | 8 +- echo/server/dembrane/api/dependency_auth.py | 4 +- echo/server/dembrane/api/participant.py | 4 +- echo/server/dembrane/api/verify.py | 2 +- echo/server/dembrane/audio_utils.py | 4 +- echo/server/dembrane/chat_utils.py | 11 +- echo/server/dembrane/database.py | 2 +- echo/server/dembrane/directus.py | 6 +- echo/server/dembrane/embedding.py | 24 +- echo/server/dembrane/llms.py | 70 +--- echo/server/dembrane/main.py | 8 +- echo/server/dembrane/prompts.py | 90 ++--- echo/server/dembrane/reply_utils.py | 21 +- echo/server/dembrane/report_utils.py | 34 +- echo/server/dembrane/s3.py | 10 +- echo/server/dembrane/scheduler.py | 2 +- echo/server/dembrane/sentry.py | 4 +- echo/server/dembrane/settings.py | 355 ++++++++++++++---- echo/server/dembrane/tasks.py | 27 +- echo/server/dembrane/transcribe.py | 56 +-- echo/server/tests/test_audio_utils.py | 4 +- echo/server/tests/test_transcribe_assembly.py | 6 +- 26 files changed, 472 insertions(+), 355 deletions(-) diff --git a/echo/server/.env.sample b/echo/server/.env.sample index 04678eb5..085d16fe 100644 --- a/echo/server/.env.sample +++ b/echo/server/.env.sample @@ -36,31 +36,40 @@ DISABLE_SENTRY=0 # Transcription providers ############################################################ TRANSCRIPTION_PROVIDER= -ENABLE_ASSEMBLYAI_TRANSCRIPTION=0 ASSEMBLYAI_API_KEY= ASSEMBLYAI_BASE_URL=https://api.eu.assemblyai.com -ENABLE_LITELLM_WHISPER_TRANSCRIPTION=0 + +# LiteLLM transcription (used when TRANSCRIPTION_PROVIDER=LiteLLM) +LITELLM_TRANSCRIPTION_MODEL=whisper-1 +LITELLM_TRANSCRIPTION_API_KEY= +LITELLM_TRANSCRIPTION_API_BASE=https://api.openai.com/v1 +LITELLM_TRANSCRIPTION_API_VERSION=2024-02-01 # Raw JSON or base64-encoded service account (set when TRANSCRIPTION_PROVIDER requires GCP) GCP_SA_JSON= ############################################################ -# LLM configuration (LiteLLM Configuration) +# LLM configuration (three model groups) ############################################################ -# Multi-modal Pro – high-context reasoning (Gemini Pro, Claude, etc.) -LLM__MULTI_MODAL_PRO__MODEL=gemini-2.0-pro -LLM__MULTI_MODAL_PRO__API_KEY= -LLM__MULTI_MODAL_PRO__API_BASE=https://generativelanguage.googleapis.com/v1beta +# Multi-modal Pro – high-context reasoning (Gemini Pro on Vertex) +LLM__MULTI_MODAL_PRO__MODEL=vertex_ai/gemini-2.5-pro +LLM__MULTI_MODAL_PRO__API_BASE=https://europe-west1-aiplatform.googleapis.com LLM__MULTI_MODAL_PRO__API_VERSION= -# Multi-modal Fast – streaming / whisper (Gemini Flash, etc.) -LLM__MULTI_MODAL_FAST__MODEL=gemini-2.0-flash -LLM__MULTI_MODAL_FAST__API_KEY= -LLM__MULTI_MODAL_FAST__API_BASE=https://generativelanguage.googleapis.com/v1beta +# Multi-modal Fast – Gemini Flash (Vertex) +LLM__MULTI_MODAL_FAST__MODEL=vertex_ai/gemini-2.5-flash +LLM__MULTI_MODAL_FAST__API_BASE=https://europe-west1-aiplatform.googleapis.com LLM__MULTI_MODAL_FAST__API_VERSION= -# Text Fast – lightweight text-only model (GPT-4o mini, etc.) -LLM__TEXT_FAST__MODEL=gpt-4o-mini -LLM__TEXT_FAST__API_KEY= -LLM__TEXT_FAST__API_BASE=https://api.openai.com/v1 -LLM__TEXT_FAST__API_VERSION=2024-02-01 +# Text Fast – Claude Sonnet on Vertex +LLM__TEXT_FAST__MODEL=vertex_ai/claude-3-5-sonnet-20241022 +LLM__TEXT_FAST__API_BASE=https://europe-west1-aiplatform.googleapis.com +LLM__TEXT_FAST__API_VERSION= + +############################################################ +# Embedding configuration +############################################################ +EMBEDDING_MODEL=vertex_ai/text-embedding-004 +EMBEDDING_API_KEY= +EMBEDDING_BASE_URL=https://europe-west1-aiplatform.googleapis.com +EMBEDDING_API_VERSION= diff --git a/echo/server/AGENTS.md b/echo/server/AGENTS.md index 064f0704..d201e2b4 100644 --- a/echo/server/AGENTS.md +++ b/echo/server/AGENTS.md @@ -21,6 +21,9 @@ Last updated: 2025-11-07T08:32:55Z # Repeating Patterns - `uv run` wraps all local entry points (uvicorn, python modules, dramatiq runners) to ensure env + dependencies stay consistent. Prefer this manager whenever spawning dev services. - For API handlers, favor Directus queries over raw SQLAlchemy sessions when reading project/conversation data to keep behavior consistent with the admin console. +- Config changes live in `dembrane/settings.py`: add new env vars as fields on `AppSettings`, expose grouped accessors (e.g., `feature_flags`, `directus`) if multiple modules read them, and fetch config at runtime with `settings = get_settings()`—never import env vars directly. +- Embeddings use `settings.embedding`; populate `EMBEDDING_*` env vars (model, key/base URL/version) before calling `dembrane.embedding.embed_text`. +- Ongoing clean-up: Several legacy modules and JSON templates were removed; see the pruning checklist (note 2025-11-11) before reviving anything. # Change Hotspots (last 90 days) - High-churn (watch for conflicts): `echo/server/dembrane/tasks.py`, `echo/server/dembrane/transcribe.py`, `echo/server/pyproject.toml` diff --git a/echo/server/dembrane/api/api.py b/echo/server/dembrane/api/api.py index ba16baca..afeeb0c5 100644 --- a/echo/server/dembrane/api/api.py +++ b/echo/server/dembrane/api/api.py @@ -7,7 +7,6 @@ from dembrane.api.chat import ChatRouter from dembrane.api.verify import VerifyRouter from dembrane.api.project import ProjectRouter -from dembrane.api.resource import ResourceRouter from dembrane.api.stateless import StatelessRouter from dembrane.api.participant import ParticipantRouter from dembrane.api.conversation import ConversationRouter @@ -24,7 +23,6 @@ async def health() -> dict: api.include_router(ChatRouter, prefix="/chats") api.include_router(ProjectRouter, prefix="/projects") -api.include_router(ResourceRouter, prefix="/resources") api.include_router(ParticipantRouter, prefix="/participant") api.include_router(ConversationRouter, prefix="/conversations") api.include_router(StatelessRouter, prefix="/stateless") diff --git a/echo/server/dembrane/api/chat.py b/echo/server/dembrane/api/chat.py index 7b9eba59..50996bf1 100644 --- a/echo/server/dembrane/api/chat.py +++ b/echo/server/dembrane/api/chat.py @@ -5,12 +5,13 @@ from typing import Any, Dict, List, Literal, Optional, AsyncGenerator import litellm +from litellm.utils import token_counter from fastapi import Query, APIRouter, HTTPException from pydantic import BaseModel from sqlalchemy.orm import selectinload from fastapi.responses import StreamingResponse -from dembrane.llms import MODELS, count_tokens, get_completion_kwargs +from dembrane.llms import MODELS, get_completion_kwargs from dembrane.utils import generate_uuid, get_utc_timestamp from dembrane.settings import get_settings from dembrane.prompts import render_prompt @@ -37,7 +38,7 @@ logger = logging.getLogger("dembrane.chat") settings = get_settings() -ENABLE_CHAT_AUTO_SELECT = settings.enable_chat_auto_select +ENABLE_CHAT_AUTO_SELECT = settings.feature_flags.enable_chat_auto_select async def is_followup_question( @@ -173,9 +174,9 @@ async def get_chat_context( if message.message_from in ["user", "assistant"]: # if tokens_count is not set, set it if message.tokens_count is None: - message.tokens_count = count_tokens( - MODELS.TEXT_FAST, - [{"role": message.message_from, "content": message.text}], + message.tokens_count = token_counter( + messages=[{"role": message.message_from, "content": message.text}], + **get_completion_kwargs(MODELS.TEXT_FAST), ) db.commit() @@ -555,9 +556,9 @@ async def post_chat( ] + conversation_history # Check context length - prompt_len = count_tokens( - MODELS.MULTI_MODAL_PRO, - formatted_messages, + prompt_len = token_counter( + messages=formatted_messages, + **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), ) if prompt_len > MAX_CHAT_CONTEXT_LENGTH: @@ -615,9 +616,9 @@ async def post_chat( ] + conversation_history # Check if adding this conversation would exceed 80% threshold - prompt_len = count_tokens( - MODELS.MULTI_MODAL_PRO, - temp_formatted_messages, + prompt_len = token_counter( + messages=temp_formatted_messages, + **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), ) if prompt_len > MAX_CONTEXT_THRESHOLD: @@ -665,9 +666,9 @@ async def post_chat( ] + conversation_history # Check context length - prompt_len = count_tokens( - MODELS.MULTI_MODAL_PRO, - formatted_messages, + prompt_len = token_counter( + messages=formatted_messages, + **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), ) if prompt_len > MAX_CHAT_CONTEXT_LENGTH: diff --git a/echo/server/dembrane/api/conversation.py b/echo/server/dembrane/api/conversation.py index aa378bfc..4e2a995d 100644 --- a/echo/server/dembrane/api/conversation.py +++ b/echo/server/dembrane/api/conversation.py @@ -8,6 +8,7 @@ from sqlalchemy.orm import noload, selectinload from fastapi.responses import RedirectResponse, StreamingResponse from fastapi.exceptions import HTTPException +from litellm.utils import token_counter from litellm.exceptions import ContentPolicyViolationError from dembrane.s3 import get_signed_url @@ -24,6 +25,7 @@ merge_multiple_audio_files_and_save_to_s3, ) from dembrane.reply_utils import generate_reply_for_conversation +from dembrane.llms import MODELS, get_completion_kwargs from dembrane.api.stateless import generate_summary from dembrane.async_helpers import run_in_thread_pool from dembrane.api.exceptions import ( @@ -430,9 +432,9 @@ async def get_conversation_token_count( # If not in cache, calculate the token count transcript = await get_conversation_transcript(conversation_id, auth) - token_count = count_tokens( - MODELS.MULTI_MODAL_PRO, - [{"role": "user", "content": transcript}], + token_count = token_counter( + messages=[{"role": "user", "content": transcript}], + **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), ) # Store the result in the cache diff --git a/echo/server/dembrane/api/dependency_auth.py b/echo/server/dembrane/api/dependency_auth.py index 5b1cf18d..12658426 100644 --- a/echo/server/dembrane/api/dependency_auth.py +++ b/echo/server/dembrane/api/dependency_auth.py @@ -9,8 +9,8 @@ logger = getLogger("api.session") settings = get_settings() -DIRECTUS_SECRET = settings.directus_secret -DIRECTUS_SESSION_COOKIE_NAME = settings.directus_session_cookie_name +DIRECTUS_SECRET = settings.directus.secret +DIRECTUS_SESSION_COOKIE_NAME = settings.directus.session_cookie_name class DirectusSession: diff --git a/echo/server/dembrane/api/participant.py b/echo/server/dembrane/api/participant.py index 2183d6c5..35903bc8 100644 --- a/echo/server/dembrane/api/participant.py +++ b/echo/server/dembrane/api/participant.py @@ -25,8 +25,8 @@ ParticipantRouter = APIRouter(tags=["participant"]) settings = get_settings() -STORAGE_S3_BUCKET = settings.storage_s3_bucket -STORAGE_S3_ENDPOINT = settings.storage_s3_endpoint +STORAGE_S3_BUCKET = settings.storage.bucket +STORAGE_S3_ENDPOINT = settings.storage.endpoint class PublicProjectTagSchema(BaseModel): diff --git a/echo/server/dembrane/api/verify.py b/echo/server/dembrane/api/verify.py index 821995a6..a65247fd 100644 --- a/echo/server/dembrane/api/verify.py +++ b/echo/server/dembrane/api/verify.py @@ -21,7 +21,7 @@ logger = logging.getLogger("api.verify") settings = get_settings() -GCP_SA_JSON = settings.gcp_sa_json +GCP_SA_JSON = settings.transcription.gcp_sa_json VerifyRouter = APIRouter(tags=["verify"]) diff --git a/echo/server/dembrane/audio_utils.py b/echo/server/dembrane/audio_utils.py index 1fa3418f..548ae46d 100644 --- a/echo/server/dembrane/audio_utils.py +++ b/echo/server/dembrane/audio_utils.py @@ -95,8 +95,8 @@ class FileTooSmallError(Exception): settings = get_settings() -STORAGE_S3_BUCKET = settings.storage_s3_bucket -STORAGE_S3_ENDPOINT = settings.storage_s3_endpoint +STORAGE_S3_BUCKET = settings.storage.bucket +STORAGE_S3_ENDPOINT = settings.storage.endpoint def convert_and_save_to_s3( diff --git a/echo/server/dembrane/chat_utils.py b/echo/server/dembrane/chat_utils.py index 0a03270a..74901ddb 100644 --- a/echo/server/dembrane/chat_utils.py +++ b/echo/server/dembrane/chat_utils.py @@ -6,6 +6,7 @@ import backoff from litellm import acompletion +from litellm.utils import token_counter from pydantic import BaseModel from sqlalchemy.orm import Session, selectinload from litellm.exceptions import ( @@ -16,7 +17,7 @@ ContextWindowExceededError, ) -from dembrane.llms import MODELS, count_tokens, get_completion_kwargs +from dembrane.llms import MODELS, get_completion_kwargs from dembrane.settings import get_settings from dembrane.prompts import render_prompt from dembrane.database import ConversationModel, ProjectChatMessageModel @@ -29,7 +30,7 @@ logger = logging.getLogger("chat_utils") settings = get_settings() -DISABLE_CHAT_TITLE_GENERATION = settings.disable_chat_title_generation +DISABLE_CHAT_TITLE_GENERATION = settings.feature_flags.disable_chat_title_generation class ClientAttachment(BaseModel): @@ -421,9 +422,9 @@ async def _process_single_batch( # Validate prompt size before sending try: - prompt_tokens = count_tokens( - MODELS.TEXT_FAST, - [{"role": "user", "content": prompt}], + prompt_tokens = token_counter( + messages=[{"role": "user", "content": prompt}], + **get_completion_kwargs(MODELS.TEXT_FAST), ) MAX_BATCH_CONTEXT = 100000 # Leave headroom for response diff --git a/echo/server/dembrane/database.py b/echo/server/dembrane/database.py index 118ef22d..87c55d9e 100644 --- a/echo/server/dembrane/database.py +++ b/echo/server/dembrane/database.py @@ -38,7 +38,7 @@ # Create the engine and connect to the SQLite database file settings = get_settings() -DATABASE_URL = settings.database_url +DATABASE_URL = settings.database.database_url logger.debug(f"Connecting to database: {DATABASE_URL}") engine = create_engine(DATABASE_URL) diff --git a/echo/server/dembrane/directus.py b/echo/server/dembrane/directus.py index d8b0ce5c..57b49527 100644 --- a/echo/server/dembrane/directus.py +++ b/echo/server/dembrane/directus.py @@ -10,11 +10,9 @@ logger = getLogger("directus") settings = get_settings() -directus_token = settings.directus_token -if directus_token: - logger.debug("Directus token retrieved from settings") +directus_token = settings.directus.token -directus = DirectusClient(url=settings.directus_base_url, token=directus_token) +directus = DirectusClient(url=settings.directus.base_url, token=directus_token) class DirectusGenericException(Exception): diff --git a/echo/server/dembrane/embedding.py b/echo/server/dembrane/embedding.py index f04fe175..e02024a9 100644 --- a/echo/server/dembrane/embedding.py +++ b/echo/server/dembrane/embedding.py @@ -4,26 +4,36 @@ import backoff import litellm -from dembrane.llms import MODELS, resolve_config +from dembrane.settings import get_settings EMBEDDING_DIM = 3072 logger = logging.getLogger("embedding") logger.setLevel(logging.DEBUG) +settings = get_settings() +embedding_settings = settings.embedding + @backoff.on_exception(backoff.expo, (Exception), max_tries=5) def embed_text(text: str) -> List[float]: text = text.replace("\n", " ").strip() try: - config = resolve_config(MODELS.MULTI_MODAL_PRO) - if not config.model: + if not embedding_settings.model: raise ValueError("Embedding model is not configured.") + + embedding_kwargs = { + "model": embedding_settings.model, + } + if embedding_settings.api_key: + embedding_kwargs["api_key"] = embedding_settings.api_key + if embedding_settings.base_url: + embedding_kwargs["api_base"] = embedding_settings.base_url + if embedding_settings.api_version: + embedding_kwargs["api_version"] = embedding_settings.api_version + response = litellm.embedding( - api_key=config.api_key, - api_base=config.api_base, - api_version=config.api_version, - model=config.model, + **embedding_kwargs, input=text, ) return response["data"][0]["embedding"] diff --git a/echo/server/dembrane/llms.py b/echo/server/dembrane/llms.py index 89879eac..56a1152d 100644 --- a/echo/server/dembrane/llms.py +++ b/echo/server/dembrane/llms.py @@ -2,11 +2,9 @@ import logging from enum import Enum -from typing import Any, Dict, Mapping, Optional, Sequence +from typing import Any, Dict -import litellm - -from dembrane.settings import LLMProviderConfig, ResolvedLLMConfig, get_settings +from dembrane.settings import get_settings logger = logging.getLogger(__name__) @@ -24,66 +22,30 @@ class MODELS(Enum): } -def _get_provider_config(model: MODELS) -> LLMProviderConfig: +def get_completion_kwargs(model: MODELS, **overrides: Any) -> Dict[str, Any]: + """ + Return the kwargs to pass into LiteLLM helpers for a configured model. + """ settings = get_settings() attr = MODEL_REGISTRY[model]["settings_attr"] provider = getattr(settings.llms, attr, None) if provider is None: raise ValueError(f"No configuration found for model group {model.value}.") - return provider - - -def resolve_config(model: MODELS) -> ResolvedLLMConfig: - """ - Load the configured model credentials for the requested model group. - """ - provider = _get_provider_config(model) - return provider.resolve() + if not provider.model: + raise ValueError(f"Model name is not configured for {model.value}") + kwargs: Dict[str, Any] = {"model": provider.model} -def get_completion_kwargs(model: MODELS, **overrides: Any) -> Dict[str, Any]: - """ - Return the kwargs to pass into LiteLLM completion helpers for a configured model. - """ - resolved = resolve_config(model) - kwargs: Dict[str, Any] = {"model": resolved.model} - - if resolved.api_key: - kwargs["api_key"] = resolved.api_key - if resolved.api_base: - kwargs["api_base"] = resolved.api_base - if resolved.api_version: - kwargs["api_version"] = resolved.api_version + if provider.api_key: + kwargs["api_key"] = provider.api_key + if provider.api_base: + kwargs["api_base"] = provider.api_base + if provider.api_version: + kwargs["api_version"] = provider.api_version # Allow callers to override any field (e.g., temperature, max_tokens) kwargs.update(overrides) return kwargs -def count_tokens( - model: MODELS, - messages: Optional[Sequence[Mapping[str, Any]]] = None, - *, - text: Optional[str | Sequence[str]] = None, - **litellm_kwargs: Any, -) -> int: - """ - Count prompt tokens using the tokenizer associated with the configured model. - """ - resolved = resolve_config(model) - try: - return litellm.token_counter( - model=resolved.model, - messages=list(messages) if messages is not None else None, - text=text, - **litellm_kwargs, - ) - except Exception as exc: # pragma: no cover - defensive logging - logger.debug( - "Failed to count tokens", - extra={"model": resolved.model, "error": str(exc)}, - ) - raise - - -__all__ = ["MODELS", "resolve_config", "get_completion_kwargs", "count_tokens"] +__all__ = ["MODELS", "get_completion_kwargs"] diff --git a/echo/server/dembrane/main.py b/echo/server/dembrane/main.py index 7b4fbcf2..098467a6 100644 --- a/echo/server/dembrane/main.py +++ b/echo/server/dembrane/main.py @@ -25,10 +25,10 @@ logger = getLogger("server") settings = get_settings() -DISABLE_CORS = settings.disable_cors -ADMIN_BASE_URL = str(settings.admin_base_url) -PARTICIPANT_BASE_URL = str(settings.participant_base_url) -SERVE_API_DOCS = settings.serve_api_docs +DISABLE_CORS = settings.feature_flags.disable_cors +ADMIN_BASE_URL = str(settings.urls.admin_base_url) +PARTICIPANT_BASE_URL = str(settings.urls.participant_base_url) +SERVE_API_DOCS = settings.feature_flags.serve_api_docs @asynccontextmanager diff --git a/echo/server/dembrane/prompts.py b/echo/server/dembrane/prompts.py index 17160987..146c7bf8 100644 --- a/echo/server/dembrane/prompts.py +++ b/echo/server/dembrane/prompts.py @@ -13,30 +13,40 @@ PROMPT_TEMPLATE_LIST (list[str]): List of available template filenames """ -import os -import json import logging -from typing import Any, Optional +from typing import Any from collections import defaultdict +from pathlib import Path -from jinja2 import Environment, FileSystemLoader, select_autoescape +from jinja2 import Environment, FileSystemLoader, DictLoader, select_autoescape from dembrane.settings import get_settings settings = get_settings() -JSON_TEMPLATES_DIR = settings.json_templates_dir PROMPT_TEMPLATES_DIR = settings.prompt_templates_dir logger = logging.getLogger("prompts") -prompt_env = Environment( - loader=FileSystemLoader(PROMPT_TEMPLATES_DIR), autoescape=select_autoescape() -) +prompt_templates_path = Path(PROMPT_TEMPLATES_DIR) +if prompt_templates_path.exists() and prompt_templates_path.is_dir(): + prompt_loader = FileSystemLoader(prompt_templates_path) + prompt_template_names = [ + entry.name + for entry in prompt_templates_path.iterdir() + if entry.is_file() and entry.name.endswith(".jinja") + ] +else: + logger.warning( + "Prompt templates directory %s not found; continuing with empty template set", + prompt_templates_path, + ) + prompt_loader = DictLoader({}) + prompt_template_names = [] + +prompt_env = Environment(loader=prompt_loader, autoescape=select_autoescape()) # Load all the files from PROMPT_TEMPLATES_DIR that end with .jinja -PROMPT_TEMPLATE_LIST = [ - f.name for f in os.scandir(PROMPT_TEMPLATES_DIR) if f.is_file() and f.name.endswith(".jinja") -] +PROMPT_TEMPLATE_LIST = prompt_template_names # Create a dictionary to map template names to their supported languages template_support = defaultdict(set) @@ -93,61 +103,3 @@ def render_prompt(prompt_name: str, language: str, kwargs: dict[str, Any]) -> st template = prompt_env.get_template(full_prompt_name) return template.render(**kwargs) - - -JSON_TEMPLATE_LIST = [ - f.name for f in os.scandir(JSON_TEMPLATES_DIR) if f.is_file() and f.name.endswith(".jinja") -] - -json_env = Environment(loader=FileSystemLoader(JSON_TEMPLATES_DIR), autoescape=select_autoescape()) - -for name in set(JSON_TEMPLATE_LIST): - logger.info(f"JSON template {name} found in {JSON_TEMPLATES_DIR}") - - -def render_json( - prompt_name: str, - language: str, - kwargs: dict[str, Any], - # json keys to validate - keys_to_validate: Optional[list[str]] = None, -) -> dict[str, Any]: - """Render a message template with the given arguments and return a dictionary object. - - Args: - prompt_name: Name of the prompt template file (without .jinja extension) - language: ISO 639-1 language code of the prompt template file (example: "en", "nl", "fr", "es", "de". etc.) - kwargs: Dictionary of arguments to pass to the template renderer - keys_to_validate: List of keys to validate in the message - - """ - if keys_to_validate is None: - keys_to_validate = [] - full_json_template_name = f"{prompt_name}.{language}.jinja" - if full_json_template_name not in JSON_TEMPLATE_LIST: - default_json_template_name = f"{prompt_name}.en.jinja" - if default_json_template_name in JSON_TEMPLATE_LIST: - logger.warning( - f"JSON template {full_json_template_name} not found, using default {default_json_template_name}." - ) - full_json_template_name = default_json_template_name - else: - raise ValueError( - f"JSON template {full_json_template_name} not found and no default available" - ) - template = json_env.get_template(full_json_template_name) - rendered_prompt = template.render(**kwargs) - try: - message = json.loads(rendered_prompt) - except json.JSONDecodeError as e: - logger.error(f"Failed to parse JSON from rendered prompt: {rendered_prompt}") - raise ValueError(f"Error: {e}") from e - - missing_keys = [key for key in keys_to_validate if key not in message] - if missing_keys: - raise ValueError( - f"Missing keys in message: {missing_keys}. Please check the prompt template: {prompt_name}. \n" - f"Message: {message}" - ) - - return message diff --git a/echo/server/dembrane/reply_utils.py b/echo/server/dembrane/reply_utils.py index fb7242d6..994d1e26 100644 --- a/echo/server/dembrane/reply_utils.py +++ b/echo/server/dembrane/reply_utils.py @@ -4,10 +4,11 @@ import sentry_sdk from litellm import acompletion +from litellm.utils import token_counter from pydantic import BaseModel from litellm.exceptions import ContentPolicyViolationError -from dembrane.llms import MODELS, count_tokens, get_completion_kwargs +from dembrane.llms import MODELS, get_completion_kwargs from dembrane.prompts import render_prompt from dembrane.directus import directus @@ -234,9 +235,9 @@ async def generate_reply_for_conversation( # Check tokens for this conversation formatted_conv = format_conversation(c) - tokens = count_tokens( - MODELS.TEXT_FAST, - [{"role": "user", "content": formatted_conv}], + tokens = token_counter( + messages=[{"role": "user", "content": formatted_conv}], + **get_completion_kwargs(MODELS.TEXT_FAST), ) candidate_conversations.append((formatted_conv, tokens)) @@ -257,9 +258,9 @@ async def generate_reply_for_conversation( # First check tokens for this conversation formatted_conv = format_conversation(c) - tokens = count_tokens( - MODELS.TEXT_FAST, - [{"role": "user", "content": formatted_conv}], + tokens = token_counter( + messages=[{"role": "user", "content": formatted_conv}], + **get_completion_kwargs(MODELS.TEXT_FAST), ) # If conversation is too large, truncate it @@ -269,9 +270,9 @@ async def generate_reply_for_conversation( truncated_transcript = c.transcript[: int(len(c.transcript) * truncation_ratio)] c.transcript = truncated_transcript + "\n[Truncated for brevity...]" formatted_conv = format_conversation(c) - tokens = count_tokens( - MODELS.TEXT_FAST, - [{"role": "user", "content": formatted_conv}], + tokens = token_counter( + messages=[{"role": "user", "content": formatted_conv}], + **get_completion_kwargs(MODELS.TEXT_FAST), ) candidate_conversations.append((formatted_conv, tokens)) diff --git a/echo/server/dembrane/report_utils.py b/echo/server/dembrane/report_utils.py index 7ae98b19..6906b506 100644 --- a/echo/server/dembrane/report_utils.py +++ b/echo/server/dembrane/report_utils.py @@ -2,8 +2,9 @@ import logging from litellm import completion +from litellm.utils import get_max_tokens, token_counter -from dembrane.llms import MODELS, count_tokens, get_completion_kwargs, resolve_config +from dembrane.llms import MODELS, get_completion_kwargs from dembrane.prompts import render_prompt from dembrane.directus import directus from dembrane.api.conversation import get_conversation_transcript @@ -11,14 +12,21 @@ logger = logging.getLogger("report_utils") -TEXT_PROVIDER_CONFIG = resolve_config(MODELS.TEXT_FAST) +TEXT_PROVIDER_KWARGS = get_completion_kwargs(MODELS.TEXT_FAST) +TEXT_PROVIDER_MODEL = TEXT_PROVIDER_KWARGS["model"] +TOKEN_COUNT_KWARGS = TEXT_PROVIDER_KWARGS.copy() -if "4.1" in str(TEXT_PROVIDER_CONFIG.model): - logger.info("using 700k context length for report") - MAX_REPORT_CONTEXT_LENGTH = 700000 +_max_tokens = get_max_tokens(TEXT_PROVIDER_MODEL) + +if _max_tokens is None: + logger.error(f"Could not get max tokens for model {TEXT_PROVIDER_MODEL}") + MAX_REPORT_CONTEXT_LENGTH = 128000 # good default else: - logger.info("using 128k context length for report") - MAX_REPORT_CONTEXT_LENGTH = 128000 + MAX_REPORT_CONTEXT_LENGTH = int(_max_tokens * 0.8) + +logger.info( + f"Using {TEXT_PROVIDER_MODEL} for report generation with context length {MAX_REPORT_CONTEXT_LENGTH}" +) class ContextTooLongException(Exception): @@ -64,9 +72,9 @@ async def get_report_content_for_project(project_id: str, language: str) -> str: continue # Count tokens before adding - summary_tokens = count_tokens( - MODELS.TEXT_FAST, - [{"role": "user", "content": conversation["summary"]}], + summary_tokens = token_counter( + messages=[{"role": "user", "content": conversation["summary"]}], + **TOKEN_COUNT_KWARGS, ) # Check if adding this conversation would exceed the limit @@ -122,9 +130,9 @@ async def get_report_content_for_project(project_id: str, language: str) -> str: continue # Calculate token count for the transcript - transcript_tokens = count_tokens( - MODELS.TEXT_FAST, - [{"role": "user", "content": transcript}], + transcript_tokens = token_counter( + messages=[{"role": "user", "content": transcript}], + **TOKEN_COUNT_KWARGS, ) if token_count + transcript_tokens < MAX_REPORT_CONTEXT_LENGTH: diff --git a/echo/server/dembrane/s3.py b/echo/server/dembrane/s3.py index b9db4a7f..ee05a4fb 100644 --- a/echo/server/dembrane/s3.py +++ b/echo/server/dembrane/s3.py @@ -56,11 +56,11 @@ logger = logging.getLogger("s3") settings = get_settings() -STORAGE_S3_KEY = settings.storage_s3_key -STORAGE_S3_BUCKET = settings.storage_s3_bucket -STORAGE_S3_REGION = settings.storage_s3_region -STORAGE_S3_SECRET = settings.storage_s3_secret -STORAGE_S3_ENDPOINT = settings.storage_s3_endpoint +STORAGE_S3_KEY = settings.storage.key +STORAGE_S3_BUCKET = settings.storage.bucket +STORAGE_S3_REGION = settings.storage.region +STORAGE_S3_SECRET = settings.storage.secret +STORAGE_S3_ENDPOINT = settings.storage.endpoint session = boto3.session.Session() diff --git a/echo/server/dembrane/scheduler.py b/echo/server/dembrane/scheduler.py index 5e123837..6d7ea29b 100644 --- a/echo/server/dembrane/scheduler.py +++ b/echo/server/dembrane/scheduler.py @@ -15,7 +15,7 @@ scheduler.configure(jobstores=jobstores, timezone=utc) settings = get_settings() -DEBUG_MODE = settings.debug_mode +DEBUG_MODE = settings.feature_flags.debug_mode # Add periodic tasks scheduler.add_job( diff --git a/echo/server/dembrane/sentry.py b/echo/server/dembrane/sentry.py index 1b05fc96..690256b9 100644 --- a/echo/server/dembrane/sentry.py +++ b/echo/server/dembrane/sentry.py @@ -10,8 +10,8 @@ ATTEMPTED_SENTRY_INIT = False settings = get_settings() ENVIRONMENT = settings.environment -BUILD_VERSION = settings.build_version -DISABLE_SENTRY = settings.disable_sentry +BUILD_VERSION = settings.build.build_version +DISABLE_SENTRY = settings.feature_flags.disable_sentry def init_sentry() -> None: diff --git a/echo/server/dembrane/settings.py b/echo/server/dembrane/settings.py index a977a194..19a0c0df 100644 --- a/echo/server/dembrane/settings.py +++ b/echo/server/dembrane/settings.py @@ -1,9 +1,14 @@ """ -Centralized application settings loaded from environment variables. - -This module replaces the legacy ``dembrane.config`` globals with a single -typed settings object. Consumers should call ``get_settings()`` and read the -fields they need instead of importing environment variables directly. +Centralized application settings grouped into cohesive sections. + +Each section is responsible for loading its own environment variables via +``pydantic-settings``. The top-level ``AppSettings`` simply aggregates these +sections and exposes a friendly, typed surface area for the rest of the app. +# NOTE: Each field keeps aliases for both legacy flat env vars and the new +# namespaced form so existing deployments keep working. Update AGENTS.md if new +# patterns emerge. +# TODO(settings): drop the legacy env aliases once infra uses the namespaced +# variables everywhere. """ from __future__ import annotations @@ -11,14 +16,14 @@ import json import base64 import logging -from typing import Any, Dict, Literal, Optional, Mapping +from typing import Any, Dict, Literal, Optional from pathlib import Path from functools import lru_cache -from pydantic import Field, BaseModel, field_validator, model_validator +from pydantic import Field, BaseModel, AliasChoices, field_validator from pydantic_settings import BaseSettings, SettingsConfigDict -TranscriptionProvider = Optional[Literal["LiteLLM", "AssemblyAI", "Dembrane-25-09"]] +TranscriptionProvider = Literal["LiteLLM", "AssemblyAI", "Dembrane-25-09"] class ResolvedLLMConfig(BaseModel): @@ -60,69 +65,117 @@ class LLMSettings(BaseSettings): text_fast: LLMProviderConfig = Field(default_factory=LLMProviderConfig) -class AppSettings(BaseSettings): - """ - All environment-driven configuration for the Dembrane ECHO server. - """ - +class BuildSettings(BaseSettings): model_config = SettingsConfigDict(env_file=".env", extra="ignore", case_sensitive=False) - # General application configuration - base_dir: Path = Field(default_factory=lambda: Path(__file__).resolve().parent.parent) - build_version: str = Field(default="dev", alias="BUILD_VERSION") - api_base_url: str = Field(default="http://localhost:8000", alias="API_BASE_URL") - admin_base_url: str = Field(default="http://localhost:3000", alias="ADMIN_BASE_URL") - participant_base_url: str = Field(default="http://localhost:3001", alias="PARTICIPANT_BASE_URL") - - # Features - debug_mode: bool = Field(default=False, alias="DEBUG_MODE") - disable_cors: bool = Field(default=False, alias="DISABLE_CORS") - disable_redaction: bool = Field(default=False, alias="DISABLE_REDACTION") - disable_chat_title_generation: bool = Field( - default=False, alias="DISABLE_CHAT_TITLE_GENERATION" + build_version: str = Field( + default="dev", + alias="BUILD_VERSION", + validation_alias=AliasChoices("BUILD_VERSION", "BUILD__VERSION"), ) - enable_chat_auto_select: bool = Field(default=False, alias="ENABLE_CHAT_AUTO_SELECT") - serve_api_docs: bool = Field(default=False, alias="SERVE_API_DOCS") - disable_sentry: bool = Field(default=False, alias="DISABLE_SENTRY") - # Directus / database / cache / storage - directus_base_url: str = Field(default="http://directus:8055", alias="DIRECTUS_BASE_URL") - directus_secret: str = Field(..., alias="DIRECTUS_SECRET") - directus_token: str = Field(..., alias="DIRECTUS_TOKEN") - directus_session_cookie_name: str = Field( - default="directus_session_token", alias="DIRECTUS_SESSION_COOKIE_NAME" + +class URLSettings(BaseSettings): + model_config = SettingsConfigDict(env_file=".env", extra="ignore", case_sensitive=False) + + api_base_url: str = Field( + default="http://localhost:8000", + alias="API_BASE_URL", + validation_alias=AliasChoices("API_BASE_URL", "URLS__API_BASE_URL"), + ) + admin_base_url: str = Field( + default="http://localhost:3000", + alias="ADMIN_BASE_URL", + validation_alias=AliasChoices("ADMIN_BASE_URL", "URLS__ADMIN_BASE_URL"), + ) + participant_base_url: str = Field( + default="http://localhost:3001", + alias="PARTICIPANT_BASE_URL", + validation_alias=AliasChoices("PARTICIPANT_BASE_URL", "URLS__PARTICIPANT_BASE_URL"), ) - database_url: str = Field(..., alias="DATABASE_URL") - redis_url: str = Field(..., alias="REDIS_URL") - storage_s3_bucket: str = Field(..., alias="STORAGE_S3_BUCKET") - storage_s3_region: Optional[str] = Field(default=None, alias="STORAGE_S3_REGION") - storage_s3_endpoint: str = Field(..., alias="STORAGE_S3_ENDPOINT") - storage_s3_key: str = Field(..., alias="STORAGE_S3_KEY") - storage_s3_secret: str = Field(..., alias="STORAGE_S3_SECRET") +class FeatureFlagSettings(BaseSettings): + model_config = SettingsConfigDict(env_file=".env", extra="ignore", case_sensitive=False) - # Transcription providers - transcription_provider: TranscriptionProvider = Field( - default=None, alias="TRANSCRIPTION_PROVIDER" + debug_mode: bool = Field( + default=False, + alias="DEBUG_MODE", + validation_alias=AliasChoices("DEBUG_MODE", "FEATURE_FLAGS__DEBUG_MODE"), ) - gcp_sa_json: Optional[Dict[str, Any]] = Field(default=None, alias="GCP_SA_JSON") - - enable_assemblyai_transcription: bool = Field( - default=False, alias="ENABLE_ASSEMBLYAI_TRANSCRIPTION" + disable_cors: bool = Field( + default=False, + alias="DISABLE_CORS", + validation_alias=AliasChoices("DISABLE_CORS", "FEATURE_FLAGS__DISABLE_CORS"), ) - assemblyai_api_key: Optional[str] = Field(default=None, alias="ASSEMBLYAI_API_KEY") - assemblyai_base_url: str = Field( - default="https://api.eu.assemblyai.com", alias="ASSEMBLYAI_BASE_URL" + disable_redaction: bool = Field( + default=False, + alias="DISABLE_REDACTION", + validation_alias=AliasChoices("DISABLE_REDACTION", "FEATURE_FLAGS__DISABLE_REDACTION"), + ) + disable_chat_title_generation: bool = Field( + default=False, + alias="DISABLE_CHAT_TITLE_GENERATION", + validation_alias=AliasChoices( + "DISABLE_CHAT_TITLE_GENERATION", "FEATURE_FLAGS__DISABLE_CHAT_TITLE_GENERATION" + ), ) + enable_chat_auto_select: bool = Field( + default=False, + alias="ENABLE_CHAT_AUTO_SELECT", + validation_alias=AliasChoices( + "ENABLE_CHAT_AUTO_SELECT", "FEATURE_FLAGS__ENABLE_CHAT_AUTO_SELECT" + ), + ) + serve_api_docs: bool = Field( + default=False, + alias="SERVE_API_DOCS", + validation_alias=AliasChoices("SERVE_API_DOCS", "FEATURE_FLAGS__SERVE_API_DOCS"), + ) + disable_sentry: bool = Field( + default=False, + alias="DISABLE_SENTRY", + validation_alias=AliasChoices("DISABLE_SENTRY", "FEATURE_FLAGS__DISABLE_SENTRY"), + ) + + +class DirectusSettings(BaseSettings): + model_config = SettingsConfigDict(env_file=".env", extra="ignore", case_sensitive=False) - enable_litellm_whisper_transcription: bool = Field( - default=False, alias="ENABLE_LITELLM_WHISPER_TRANSCRIPTION" + base_url: str = Field( + default="http://directus:8055", + alias="DIRECTUS_BASE_URL", + validation_alias=AliasChoices("DIRECTUS_BASE_URL", "DIRECTUS__BASE_URL"), + ) + secret: str = Field( + ..., + alias="DIRECTUS_SECRET", + validation_alias=AliasChoices("DIRECTUS_SECRET", "DIRECTUS__SECRET"), + ) + token: str = Field( + ..., + alias="DIRECTUS_TOKEN", + validation_alias=AliasChoices("DIRECTUS_TOKEN", "DIRECTUS__TOKEN"), ) + session_cookie_name: str = Field( + default="directus_session_token", + alias="DIRECTUS_SESSION_COOKIE_NAME", + validation_alias=AliasChoices( + "DIRECTUS_SESSION_COOKIE_NAME", "DIRECTUS__SESSION_COOKIE_NAME" + ), + ) + - llms: LLMSettings = Field(default_factory=LLMSettings) +class DatabaseSettings(BaseSettings): + model_config = SettingsConfigDict(env_file=".env", extra="ignore", case_sensitive=False) + + database_url: str = Field( + ..., + alias="DATABASE_URL", + validation_alias=AliasChoices("DATABASE_URL", "DATABASE__URL"), + ) - @field_validator("database_url", mode="before") + @field_validator("database_url") @classmethod def normalize_database_url(cls, value: str) -> str: if value.startswith("postgresql+psycopg://"): @@ -131,17 +184,135 @@ def normalize_database_url(cls, value: str) -> str: return value.replace("postgresql://", "postgresql+psycopg://", 1) raise ValueError("DATABASE_URL must start with postgresql+psycopg://") + +class CacheSettings(BaseSettings): + model_config = SettingsConfigDict(env_file=".env", extra="ignore", case_sensitive=False) + + redis_url: str = Field( + ..., + alias="REDIS_URL", + validation_alias=AliasChoices("REDIS_URL", "CACHE__REDIS_URL"), + ) + + +class StorageSettings(BaseSettings): + model_config = SettingsConfigDict(env_file=".env", extra="ignore", case_sensitive=False) + + bucket: str = Field( + ..., + alias="STORAGE_S3_BUCKET", + validation_alias=AliasChoices("STORAGE_S3_BUCKET", "STORAGE__BUCKET"), + ) + region: Optional[str] = Field( + default=None, + alias="STORAGE_S3_REGION", + validation_alias=AliasChoices("STORAGE_S3_REGION", "STORAGE__REGION"), + ) + endpoint: str = Field( + ..., + alias="STORAGE_S3_ENDPOINT", + validation_alias=AliasChoices("STORAGE_S3_ENDPOINT", "STORAGE__ENDPOINT"), + ) + key: str = Field( + ..., + alias="STORAGE_S3_KEY", + validation_alias=AliasChoices("STORAGE_S3_KEY", "STORAGE__KEY"), + ) + secret: str = Field( + ..., + alias="STORAGE_S3_SECRET", + validation_alias=AliasChoices("STORAGE_S3_SECRET", "STORAGE__SECRET"), + ) + + +class EmbeddingSettings(BaseSettings): + model_config = SettingsConfigDict(env_file=".env", extra="ignore", case_sensitive=False) + + model: str = Field( + default="text-embedding-3-small", + alias="EMBEDDING_MODEL", + validation_alias=AliasChoices("EMBEDDING_MODEL", "EMBEDDING__MODEL"), + ) + api_key: Optional[str] = Field( + default=None, + alias="EMBEDDING_API_KEY", + validation_alias=AliasChoices("EMBEDDING_API_KEY", "EMBEDDING__API_KEY"), + ) + base_url: Optional[str] = Field( + default=None, + alias="EMBEDDING_BASE_URL", + validation_alias=AliasChoices( + "EMBEDDING_BASE_URL", + "EMBEDDING_API_BASE", + "EMBEDDING__BASE_URL", + ), + ) + api_version: Optional[str] = Field( + default=None, + alias="EMBEDDING_API_VERSION", + validation_alias=AliasChoices("EMBEDDING_API_VERSION", "EMBEDDING__API_VERSION"), + ) + + +class TranscriptionSettings(BaseSettings): + model_config = SettingsConfigDict(env_file=".env", extra="ignore", case_sensitive=False) + + provider: Optional[TranscriptionProvider] = Field( + default=None, + alias="TRANSCRIPTION_PROVIDER", + validation_alias=AliasChoices("TRANSCRIPTION_PROVIDER", "TRANSCRIPTION__PROVIDER"), + ) + gcp_sa_json: Optional[Dict[str, Any]] = Field( + default=None, + alias="GCP_SA_JSON", + validation_alias=AliasChoices("GCP_SA_JSON", "TRANSCRIPTION__GCP_SA_JSON"), + ) + assemblyai_api_key: Optional[str] = Field( + default=None, + alias="ASSEMBLYAI_API_KEY", + validation_alias=AliasChoices("ASSEMBLYAI_API_KEY", "TRANSCRIPTION__ASSEMBLYAI__API_KEY"), + ) + assemblyai_base_url: str = Field( + default="https://api.eu.assemblyai.com", + alias="ASSEMBLYAI_BASE_URL", + validation_alias=AliasChoices("ASSEMBLYAI_BASE_URL", "TRANSCRIPTION__ASSEMBLYAI__BASE_URL"), + ) + litellm_model: Optional[str] = Field( + default=None, + alias="LITELLM_TRANSCRIPTION_MODEL", + validation_alias=AliasChoices( + "LITELLM_TRANSCRIPTION_MODEL", "TRANSCRIPTION__LITELLM__MODEL" + ), + ) + litellm_api_key: Optional[str] = Field( + default=None, + alias="LITELLM_TRANSCRIPTION_API_KEY", + validation_alias=AliasChoices( + "LITELLM_TRANSCRIPTION_API_KEY", "TRANSCRIPTION__LITELLM__API_KEY" + ), + ) + litellm_api_base: Optional[str] = Field( + default=None, + alias="LITELLM_TRANSCRIPTION_API_BASE", + validation_alias=AliasChoices( + "LITELLM_TRANSCRIPTION_API_BASE", "TRANSCRIPTION__LITELLM__API_BASE" + ), + ) + litellm_api_version: Optional[str] = Field( + default=None, + alias="LITELLM_TRANSCRIPTION_API_VERSION", + validation_alias=AliasChoices( + "LITELLM_TRANSCRIPTION_API_VERSION", "TRANSCRIPTION__LITELLM__API_VERSION" + ), + ) + @field_validator("gcp_sa_json", mode="before") @classmethod - def parse_gcp_sa_json( - cls, value: Optional[Any] - ) -> Optional[Dict[str, Any]]: + def parse_gcp_sa_json(cls, value: Optional[Any]) -> Optional[Dict[str, Any]]: if value is None: return None - - if isinstance(value, Mapping): - return dict(value) - + if isinstance(value, dict): + return value if isinstance(value, str): trimmed = value.strip() if trimmed in {"", "null", "None"}: @@ -163,48 +334,66 @@ def parse_gcp_sa_json( except (ValueError, json.JSONDecodeError, TypeError) as exc: raise ValueError("GCP_SA_JSON must be valid JSON or base64-encoded JSON") from exc - @model_validator(mode="after") - def validate_transcription_dependencies(self) -> "AppSettings": - if self.enable_assemblyai_transcription and not self.assemblyai_api_key: - raise ValueError( - "ASSEMBLYAI_API_KEY must be set when AssemblyAI transcription is enabled" - ) - - if self.enable_litellm_whisper_transcription: + def ensure_valid(self) -> None: + if self.provider == "AssemblyAI": + if not self.assemblyai_api_key: + raise ValueError( + "ASSEMBLYAI_API_KEY must be set when TRANSCRIPTION_PROVIDER=AssemblyAI" + ) + elif self.provider == "LiteLLM": missing = [ name for name, value in [ - ("LLM__MULTI_MODAL_FAST__MODEL", self.llms.multi_modal_fast.model), - ("LLM__MULTI_MODAL_FAST__API_KEY", self.llms.multi_modal_fast.api_key), + ("LITELLM_TRANSCRIPTION_MODEL", self.litellm_model), + ("LITELLM_TRANSCRIPTION_API_KEY", self.litellm_api_key), ] if value in (None, "") ] if missing: raise ValueError( - "Missing required LiteLLM Whisper configuration when transcription is enabled: " - + ", ".join(missing) + "Missing required LiteLLM transcription configuration: " + ", ".join(missing) + ) + elif self.provider == "Dembrane-25-09": + if self.gcp_sa_json is None: + raise ValueError( + "GCP_SA_JSON must be provided when TRANSCRIPTION_PROVIDER=Dembrane-25-09" ) - return self + +class AppSettings: + """ + Aggregate application settings composed from modular sections. + """ + + def __init__(self) -> None: + self.base_dir: Path = Path(__file__).resolve().parent.parent + + self.build = BuildSettings() + self.urls = URLSettings() + self.feature_flags = FeatureFlagSettings() + self.directus = DirectusSettings() + self.database = DatabaseSettings() + self.cache = CacheSettings() + self.storage = StorageSettings() + self.transcription = TranscriptionSettings() + self.llms = LLMSettings() + self.embedding = EmbeddingSettings() + + self.transcription.ensure_valid() @property def environment(self) -> str: - return "production" if self.build_version != "dev" else "development" + return "production" if self.build.build_version != "dev" else "development" @property def prompt_templates_dir(self) -> Path: return self.base_dir / "prompt_templates" - @property - def json_templates_dir(self) -> Path: - return self.base_dir / "json_templates" - - @lru_cache def get_settings() -> AppSettings: settings = AppSettings() - if settings.debug_mode: + if settings.feature_flags.debug_mode: logging.getLogger().setLevel(logging.DEBUG) for noisy in [ diff --git a/echo/server/dembrane/tasks.py b/echo/server/dembrane/tasks.py index 760acee4..1e302fe9 100644 --- a/echo/server/dembrane/tasks.py +++ b/echo/server/dembrane/tasks.py @@ -15,7 +15,6 @@ from dembrane.utils import generate_uuid, get_utc_timestamp from dembrane.settings import get_settings from dembrane.sentry import init_sentry -from dembrane.prompts import render_json from dembrane.directus import ( DirectusBadRequest, DirectusServerError, @@ -32,7 +31,7 @@ ) settings = get_settings() -REDIS_URL = settings.redis_url +REDIS_URL = settings.cache.redis_url init_sentry() @@ -454,28 +453,8 @@ def task_create_project_library(project_id: str, language: str) -> None: logger.error(f"Can retry. Failed to create project analysis run: {e}") raise e from e - default_view_name_list = ["default_view_recurring_themes"] - messages = [] - - for view_name in default_view_name_list: - message = render_json(view_name, language, {}, ["user_query", "user_query_context"]) - logger.info(f"Message: {message}") - messages.append( - task_create_view.message( - project_analysis_run_id=new_run_id, - user_query=message["user_query"], - user_query_context=message["user_query_context"], - language=language, - ) - ) - - group(messages).run() - - status_ctx.set_exit_message( - f"Successfully created {len(messages)} views for project: {project_id}" - ) logger.info( - f"Successfully created {len(messages)} views for project: {project_id} (language: {language})" + "Skipping default view generation for project %s; JSON templates have been removed.", + project_id, ) - return diff --git a/echo/server/dembrane/transcribe.py b/echo/server/dembrane/transcribe.py index d095b228..591a0914 100644 --- a/echo/server/dembrane/transcribe.py +++ b/echo/server/dembrane/transcribe.py @@ -20,7 +20,7 @@ from dembrane.s3 import get_signed_url, get_stream_from_s3 from dembrane.settings import get_settings -from dembrane.llms import MODELS, get_completion_kwargs, resolve_config +from dembrane.llms import MODELS, get_completion_kwargs from dembrane.prompts import render_prompt from dembrane.service import file_service, conversation_service from dembrane.directus import directus @@ -28,13 +28,15 @@ logger = logging.getLogger("transcribe") settings = get_settings() -GCP_SA_JSON = settings.gcp_sa_json -API_BASE_URL = settings.api_base_url -ASSEMBLYAI_API_KEY = settings.assemblyai_api_key -ASSEMBLYAI_BASE_URL = settings.assemblyai_base_url -TRANSCRIPTION_PROVIDER = settings.transcription_provider -ENABLE_ASSEMBLYAI_TRANSCRIPTION = settings.enable_assemblyai_transcription -ENABLE_LITELLM_WHISPER_TRANSCRIPTION = settings.enable_litellm_whisper_transcription +transcription_cfg = settings.transcription +GCP_SA_JSON = transcription_cfg.gcp_sa_json +ASSEMBLYAI_API_KEY = transcription_cfg.assemblyai_api_key +ASSEMBLYAI_BASE_URL = transcription_cfg.assemblyai_base_url +TRANSCRIPTION_PROVIDER = transcription_cfg.provider +LITELLM_TRANSCRIPTION_MODEL = transcription_cfg.litellm_model +LITELLM_TRANSCRIPTION_API_KEY = transcription_cfg.litellm_api_key +LITELLM_TRANSCRIPTION_API_BASE = transcription_cfg.litellm_api_base +LITELLM_TRANSCRIPTION_API_VERSION = transcription_cfg.litellm_api_version class TranscriptionError(Exception): @@ -58,18 +60,23 @@ def transcribe_audio_litellm( raise TranscriptionError(f"Failed to get audio stream from S3: {exc}") from exc try: - whisper_config = resolve_config(MODELS.MULTI_MODAL_FAST) - if not whisper_config.model or not whisper_config.api_key: - raise TranscriptionError("LiteLLM Whisper configuration is incomplete.") - response = litellm.transcription( - model=whisper_config.model, - file=file_upload, - api_key=whisper_config.api_key, - api_base=whisper_config.api_base, - api_version=whisper_config.api_version, - language=language, - prompt=whisper_prompt, - ) + if not LITELLM_TRANSCRIPTION_MODEL or not LITELLM_TRANSCRIPTION_API_KEY: + raise TranscriptionError("LiteLLM transcription configuration is incomplete.") + + request_kwargs: dict[str, Any] = { + "model": LITELLM_TRANSCRIPTION_MODEL, + "file": file_upload, + "language": language, + "prompt": whisper_prompt, + "api_key": LITELLM_TRANSCRIPTION_API_KEY, + } + + if LITELLM_TRANSCRIPTION_API_BASE: + request_kwargs["api_base"] = LITELLM_TRANSCRIPTION_API_BASE + if LITELLM_TRANSCRIPTION_API_VERSION: + request_kwargs["api_version"] = LITELLM_TRANSCRIPTION_API_VERSION + + response = litellm.transcription(**request_kwargs) return response["text"] except Exception as e: logger.error(f"LiteLLM transcription failed: {e}") @@ -356,12 +363,7 @@ def _build_hotwords(conversation: dict) -> Optional[List[str]]: def _get_transcript_provider() -> Literal["LiteLLM", "AssemblyAI", "Dembrane-25-09"]: if TRANSCRIPTION_PROVIDER: return TRANSCRIPTION_PROVIDER - elif ENABLE_ASSEMBLYAI_TRANSCRIPTION: - return "AssemblyAI" - elif ENABLE_LITELLM_WHISPER_TRANSCRIPTION: - return "LiteLLM" - else: - raise TranscriptionError("No valid transcription configuration found.") + raise TranscriptionError("No valid transcription configuration found.") def transcribe_conversation_chunk( @@ -434,6 +436,8 @@ def transcribe_conversation_chunk( ) _save_transcript(conversation_chunk_id, transcript, diarization=None) return conversation_chunk_id + case _: + raise TranscriptionError(f"Unsupported transcription provider: {transcript_provider}") except Exception as e: logger.error("Failed to process conversation chunk %s: %s", conversation_chunk_id, e) diff --git a/echo/server/tests/test_audio_utils.py b/echo/server/tests/test_audio_utils.py index fe5f04d0..697f7c6d 100644 --- a/echo/server/tests/test_audio_utils.py +++ b/echo/server/tests/test_audio_utils.py @@ -22,8 +22,8 @@ settings = get_settings() BASE_DIR = settings.base_dir -STORAGE_S3_BUCKET = settings.storage_s3_bucket -STORAGE_S3_ENDPOINT = settings.storage_s3_endpoint +STORAGE_S3_BUCKET = settings.storage.bucket +STORAGE_S3_ENDPOINT = settings.storage.endpoint AUDIO_FILES = [ diff --git a/echo/server/tests/test_transcribe_assembly.py b/echo/server/tests/test_transcribe_assembly.py index 8daa4f84..3e02006a 100644 --- a/echo/server/tests/test_transcribe_assembly.py +++ b/echo/server/tests/test_transcribe_assembly.py @@ -3,6 +3,8 @@ import pytest +os.environ.setdefault("TRANSCRIPTION_PROVIDER", "AssemblyAI") + TEST_AUDIO_URL = "https://storage.googleapis.com/aai-platform-public/samples/1765269382848385.wav" from dembrane.s3 import delete_from_s3, save_to_s3_from_url @@ -17,9 +19,7 @@ def _require_assemblyai(): """Ensure AssemblyAI is enabled and credentials are present or skip.""" if not os.environ.get("ASSEMBLYAI_API_KEY"): pytest.skip("ASSEMBLYAI_API_KEY not set; skipping AssemblyAI tests") - # Force provider selection to AssemblyAI in config by env flags - os.environ["ENABLE_ASSEMBLYAI_TRANSCRIPTION"] = "true" - os.environ["ENABLE_LITELLM_WHISPER_TRANSCRIPTION"] = "false" + os.environ["TRANSCRIPTION_PROVIDER"] = "AssemblyAI" @pytest.fixture From 5323d9928b56c182c70090429e18e82aef0b88af Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti Date: Tue, 11 Nov 2025 20:09:13 +0000 Subject: [PATCH 18/23] add env gitignore --- echo/.gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/echo/.gitignore b/echo/.gitignore index 731648fd..dc5fd8b1 100644 --- a/echo/.gitignore +++ b/echo/.gitignore @@ -23,4 +23,5 @@ __queuestorage__echo/server/dembrane/workspace_script.py .coverage .pytest_cache -.DS_Store \ No newline at end of file +.DS_Store +.env.backup* \ No newline at end of file From d9b7e49430d874aea785531d5e67b3867d9052ee Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti Date: Wed, 12 Nov 2025 17:04:56 +0000 Subject: [PATCH 19/23] wip --- echo/server/dembrane/api/chat.py | 873 ++++++++----------- echo/server/dembrane/api/conversation.py | 63 +- echo/server/dembrane/api/project.py | 252 +++--- echo/server/dembrane/chat_utils.py | 135 +-- echo/server/dembrane/database.py | 596 ------------- echo/server/dembrane/main.py | 19 +- echo/server/dembrane/prompts.py | 6 +- echo/server/dembrane/schemas.py | 201 ----- echo/server/dembrane/service/__init__.py | 13 + echo/server/dembrane/service/chat.py | 280 ++++++ echo/server/dembrane/service/conversation.py | 133 ++- echo/server/dembrane/service/project.py | 47 +- echo/server/dembrane/tasks.py | 2 +- echo/server/pyproject.toml | 6 - echo/server/tests/test_quote_utils.py | 277 ------ 15 files changed, 1070 insertions(+), 1833 deletions(-) delete mode 100644 echo/server/dembrane/database.py delete mode 100644 echo/server/dembrane/schemas.py create mode 100644 echo/server/dembrane/service/chat.py delete mode 100644 echo/server/tests/test_quote_utils.py diff --git a/echo/server/dembrane/api/chat.py b/echo/server/dembrane/api/chat.py index 50996bf1..0d7ae882 100644 --- a/echo/server/dembrane/api/chat.py +++ b/echo/server/dembrane/api/chat.py @@ -1,28 +1,18 @@ -# TODO: -# - Change db calls to directus calls import json import logging -from typing import Any, Dict, List, Literal, Optional, AsyncGenerator +from typing import Dict, List, Literal, Iterable, Optional, AsyncGenerator import litellm -from litellm.utils import token_counter from fastapi import Query, APIRouter, HTTPException from pydantic import BaseModel -from sqlalchemy.orm import selectinload +from litellm.utils import token_counter from fastapi.responses import StreamingResponse from dembrane.llms import MODELS, get_completion_kwargs -from dembrane.utils import generate_uuid, get_utc_timestamp -from dembrane.settings import get_settings +from dembrane.utils import generate_uuid from dembrane.prompts import render_prompt -from dembrane.database import ( - DatabaseSession, - ProjectChatModel, - ConversationModel, - ProjectChatMessageModel, - DependencyInjectDatabase, -) -from dembrane.directus import directus +from dembrane.service import chat_service, conversation_service +from dembrane.settings import get_settings from dembrane.chat_utils import ( MAX_CHAT_CONTEXT_LENGTH, generate_title, @@ -30,6 +20,8 @@ auto_select_conversations, create_system_messages_for_chat, ) +from dembrane.service.chat import ChatServiceException, ChatNotFoundException +from dembrane.async_helpers import run_in_thread_pool from dembrane.api.conversation import get_conversation_token_count from dembrane.api.dependency_auth import DirectusSession, DependencyDirectusSession @@ -112,85 +104,101 @@ class ChatContextSchema(BaseModel): auto_select_bool: bool -def raise_if_chat_not_found_or_not_authorized(chat_id: str, auth_session: DirectusSession) -> None: - chat_directus = directus.get_items( - "project_chat", - { - "query": { - "filter": {"id": {"_eq": chat_id}}, - "fields": ["project_id.directus_user_id"], - }, - }, - ) - - if chat_directus is None: - logger.debug("Chat directus not found") - raise HTTPException(status_code=404, detail="Chat not found") - - # access is denied only if the user is both not an admin AND not the project owner. - if (not auth_session.is_admin) and ( - not chat_directus[0]["project_id"]["directus_user_id"] == auth_session.user_id - ): +async def raise_if_chat_not_found_or_not_authorized( + chat_id: str, + auth_session: DirectusSession, + *, + include_used_conversations: bool = False, +) -> dict: + try: + chat = await run_in_thread_pool( + chat_service.get_by_id_or_raise, + chat_id, + include_used_conversations, + ) + except ChatNotFoundException as exc: + logger.debug("Chat %s not found when performing authorization", chat_id) + raise HTTPException(status_code=404, detail="Chat not found") from exc + except ChatServiceException as exc: + logger.error("Failed to fetch chat %s: %s", chat_id, exc) + raise HTTPException(status_code=500, detail="Failed to load chat") from exc + + project_owner: Optional[str] = None + project_info = chat.get("project_id") + if isinstance(project_info, dict): + project_owner = project_info.get("directus_user_id") + + if not auth_session.is_admin and project_owner != auth_session.user_id: logger.debug( - f"Chat not authorized. is_admin={auth_session.is_admin} and user_id={auth_session.user_id} and chat_directus_user_id = {chat_directus[0]['project_id']['directus_user_id']}" + "Chat %s not authorized for user %s (owner=%s)", + chat_id, + auth_session.user_id, + project_owner, ) raise HTTPException(status_code=403, detail="You are not authorized to access this chat") + return chat + @ChatRouter.get("/{chat_id}/context", response_model=ChatContextSchema) -async def get_chat_context( - chat_id: str, db: DependencyInjectDatabase, auth: DependencyDirectusSession -) -> ChatContextSchema: - raise_if_chat_not_found_or_not_authorized(chat_id, auth) - - chat = ( - db.query(ProjectChatModel) - .options(selectinload(ProjectChatModel.used_conversations)) - .filter(ProjectChatModel.id == chat_id) - .first() +async def get_chat_context(chat_id: str, auth: DependencyDirectusSession) -> ChatContextSchema: + chat = await raise_if_chat_not_found_or_not_authorized( + chat_id, + auth, + include_used_conversations=True, ) - if chat is None: - # i still have to check for this because: mypy - raise HTTPException(status_code=404, detail="Chat not found") - - messages = ( - db.query(ProjectChatMessageModel) - .options(selectinload(ProjectChatMessageModel.used_conversations)) - .filter(ProjectChatMessageModel.project_chat_id == chat_id) - .all() + messages = await run_in_thread_pool( + chat_service.list_messages, + chat_id, + include_relationships=True, + order="asc", ) - # conversation is locked when any chat message is using a conversation - locked_conversations = set() - for message in messages: - for conversation in message.used_conversations: - locked_conversations.add(conversation.id) # Add directus call here - + locked_conversations: set[str] = set() user_message_token_count = 0 assistant_message_token_count = 0 for message in messages: - if message.message_from in ["user", "assistant"]: - # if tokens_count is not set, set it - if message.tokens_count is None: - message.tokens_count = token_counter( - messages=[{"role": message.message_from, "content": message.text}], + for relation in message.get("used_conversations") or []: + conversation_ref = relation.get("conversation_id") or {} + conversation_id = conversation_ref.get("id") + if conversation_id: + locked_conversations.add(conversation_id) + + message_from = message.get("message_from") + if message_from in ["user", "assistant"]: + message_text = message.get("text", "") + tokens_count = message.get("tokens_count") + if tokens_count is None: + tokens_count = token_counter( + messages=[{"role": message_from, "content": message_text}], **get_completion_kwargs(MODELS.TEXT_FAST), ) - db.commit() - - if message.message_from == "user": - user_message_token_count += message.tokens_count - elif message.message_from == "assistant": - assistant_message_token_count += message.tokens_count + try: + await run_in_thread_pool( + chat_service.update_message, + message.get("id"), + {"tokens_count": tokens_count}, + ) + except ChatServiceException as exc: # pragma: no cover - informational only + logger.warning( + "Failed to persist token count for message %s: %s", + message.get("id"), + exc, + ) + if tokens_count is not None: + if message_from == "user": + user_message_token_count += tokens_count + else: + assistant_message_token_count += tokens_count - used_conversations = chat.used_conversations + used_conversation_links = chat.get("used_conversations") or [] - if chat.auto_select_bool is None: + auto_select_value = chat.get("auto_select") + if auto_select_value is None: raise HTTPException(status_code=400, detail="Auto select is not boolean") - # initialize response context = ChatContextSchema( conversations=[], conversation_id_list=[], @@ -205,25 +213,29 @@ async def get_chat_context( token_usage=assistant_message_token_count / MAX_CHAT_CONTEXT_LENGTH, ), ], - auto_select_bool=chat.auto_select_bool, + auto_select_bool=bool(auto_select_value), ) - for conversation in used_conversations: - is_conversation_locked = conversation.id in locked_conversations # Verify with directus + for link in used_conversation_links: + conversation_ref = link.get("conversation_id") or {} + conversation_id = conversation_ref.get("id") + if not conversation_id: + continue + + participant_name = conversation_ref.get("participant_name") + is_locked = conversation_id in locked_conversations + token_count = await get_conversation_token_count(conversation_id, auth) + chat_context_resource = ChatContextConversationSchema( - conversation_id=conversation.id, - conversation_participant_name=conversation.participant_name, - locked=is_conversation_locked, - # TODO: if quotes for this convo are present then just use RAG - token_usage=( - await get_conversation_token_count(conversation.id, db, auth) - / MAX_CHAT_CONTEXT_LENGTH - ), + conversation_id=conversation_id, + conversation_participant_name=participant_name, + locked=is_locked, + token_usage=token_count / MAX_CHAT_CONTEXT_LENGTH, ) context.conversations.append(chat_context_resource) - context.conversation_id_list.append(conversation.id) - if is_conversation_locked: - context.locked_conversation_id_list.append(conversation.id) + context.conversation_id_list.append(conversation_id) + if is_locked: + context.locked_conversation_id_list.append(conversation_id) return context @@ -237,10 +249,13 @@ class ChatAddContextSchema(BaseModel): async def add_chat_context( chat_id: str, body: ChatAddContextSchema, - db: DependencyInjectDatabase, auth: DependencyDirectusSession, ) -> None: - raise_if_chat_not_found_or_not_authorized(chat_id, auth) + chat = await raise_if_chat_not_found_or_not_authorized( + chat_id, + auth, + include_used_conversations=True, + ) if body.conversation_id is None and body.auto_select_bool is None: raise HTTPException( @@ -252,46 +267,54 @@ async def add_chat_context( status_code=400, detail="conversation_id and auto_select_bool cannot both be provided" ) - chat = db.get(ProjectChatModel, chat_id) - - if chat is None: - raise HTTPException(status_code=404, detail="Chat not found") - if body.conversation_id is not None: - conversation = db.get(ConversationModel, body.conversation_id) - - if conversation is None: - raise HTTPException(status_code=404, detail="Conversation not found") - - # check if the conversation is already in the chat - for i_conversation in chat.used_conversations: - if i_conversation.id == conversation.id: - raise HTTPException(status_code=400, detail="Conversation already in the chat") - - # check if the conversation is too long - if await get_conversation_token_count(conversation.id, db, auth) > MAX_CHAT_CONTEXT_LENGTH: + try: + await run_in_thread_pool( + conversation_service.get_by_id_or_raise, + body.conversation_id, + True, + False, + ) + except Exception as exc: + raise HTTPException(status_code=404, detail="Conversation not found") from exc + + existing_ids = { + (link.get("conversation_id") or {}).get("id") + for link in (chat.get("used_conversations") or []) + } + if body.conversation_id in existing_ids: + raise HTTPException(status_code=400, detail="Conversation already in the chat") + + token_count = await get_conversation_token_count(body.conversation_id, auth) + if token_count > MAX_CHAT_CONTEXT_LENGTH: raise HTTPException(status_code=400, detail="Conversation is too long") - # sum of all other conversations - chat_context = await get_chat_context(chat_id, db, auth) + chat_context = await get_chat_context(chat_id, auth) chat_context_token_usage = sum( - conversation.token_usage for conversation in chat_context.conversations + conversation_entry.token_usage for conversation_entry in chat_context.conversations ) - conversation_to_add_token_usage = ( - await get_conversation_token_count(conversation.id, db, auth) / MAX_CHAT_CONTEXT_LENGTH - ) - if chat_context_token_usage + conversation_to_add_token_usage > 1: + conversation_to_add_usage = token_count / MAX_CHAT_CONTEXT_LENGTH + if chat_context_token_usage + conversation_to_add_usage > 1: raise HTTPException( status_code=400, detail="Chat context is too long. Remove other conversations to proceed.", ) - chat.used_conversations.append(conversation) - db.commit() + + await run_in_thread_pool( + chat_service.attach_conversations, + chat_id, + [body.conversation_id], + ) + + chat = await raise_if_chat_not_found_or_not_authorized( + chat_id, + auth, + include_used_conversations=True, + ) if body.auto_select_bool is not None: - chat.auto_select_bool = body.auto_select_bool - db.commit() + await run_in_thread_pool(chat_service.set_auto_select, chat_id, body.auto_select_bool) class ChatDeleteContextSchema(BaseModel): @@ -303,10 +326,9 @@ class ChatDeleteContextSchema(BaseModel): async def delete_chat_context( chat_id: str, body: ChatDeleteContextSchema, - db: DependencyInjectDatabase, auth: DependencyDirectusSession, ) -> None: - raise_if_chat_not_found_or_not_authorized(chat_id, auth) + await raise_if_chat_not_found_or_not_authorized(chat_id, auth) if body.conversation_id is None and body.auto_select_bool is None: raise HTTPException( status_code=400, detail="conversation_id or auto_select_bool is required" @@ -320,89 +342,84 @@ async def delete_chat_context( if body.auto_select_bool is True: raise HTTPException(status_code=400, detail="auto_select_bool cannot be True") - chat = db.get(ProjectChatModel, chat_id) - - if chat is None: - raise HTTPException(status_code=404, detail="Chat not found") - if body.conversation_id is not None: - conversation = db.get(ConversationModel, body.conversation_id) + chat_context = await get_chat_context(chat_id, auth) - if conversation is None: - raise HTTPException(status_code=404, detail="Conversation not found") + conversation_entry = next( + ( + conversation_resource + for conversation_resource in chat_context.conversations + if conversation_resource.conversation_id == body.conversation_id + ), + None, + ) - chat_context = await get_chat_context(chat_id, db, auth) + if conversation_entry is None: + raise HTTPException(status_code=404, detail="Conversation not found in the chat") - # check if conversation exists in chat_context - for project_chat_conversation in chat_context.conversations: - if project_chat_conversation.conversation_id == conversation.id: - if project_chat_conversation.locked: - raise HTTPException(status_code=400, detail="Conversation is locked") - else: - chat.used_conversations.remove(conversation) - db.commit() - return + if conversation_entry.locked: + raise HTTPException(status_code=400, detail="Conversation is locked") - raise HTTPException(status_code=404, detail="Conversation not found in the chat") + await run_in_thread_pool( + chat_service.detach_conversation, + chat_id, + body.conversation_id, + ) if body.auto_select_bool is not None: - chat.auto_select_bool = body.auto_select_bool - db.commit() + await run_in_thread_pool(chat_service.set_auto_select, chat_id, body.auto_select_bool) @ChatRouter.post("/{chat_id}/lock-conversations", response_model=None) async def lock_conversations( chat_id: str, - db: DependencyInjectDatabase, auth: DependencyDirectusSession, -) -> List[ConversationModel]: - raise_if_chat_not_found_or_not_authorized(chat_id, auth) - - db_messages = ( - db.query(ProjectChatMessageModel) - .options(selectinload(ProjectChatMessageModel.used_conversations)) - .filter(ProjectChatMessageModel.project_chat_id == chat_id) - .order_by(ProjectChatMessageModel.date_created.desc()) - .all() +) -> List[dict]: + await raise_if_chat_not_found_or_not_authorized(chat_id, auth) + + messages = await run_in_thread_pool( + chat_service.list_messages, + chat_id, + include_relationships=True, + order="desc", ) - set_conversations_already_in_chat = set() - - for message in db_messages: - if message.used_conversations: - for conversation in message.used_conversations: - set_conversations_already_in_chat.add(conversation.id) + conversations_already_locked: set[str] = set() + for message in messages: + for relation in message.get("used_conversations") or []: + conversation_ref = relation.get("conversation_id") or {} + conv_id = conversation_ref.get("id") + if conv_id: + conversations_already_locked.add(conv_id) - current_context = await get_chat_context(chat_id, db, auth) + current_context = await get_chat_context(chat_id, auth) set_all_conversations = set(current_context.conversation_id_list) - set_conversations_to_add = set_all_conversations - set_conversations_already_in_chat - - if len(set_conversations_to_add) > 0: - # Fetch ConversationModel objects for added_conversations - added_conversations = ( - db.query(ConversationModel) - .filter(ConversationModel.id.in_(set_conversations_to_add)) - .all() + set_conversations_to_add = set_all_conversations - conversations_already_locked + + if set_conversations_to_add: + added_count = len(set_conversations_to_add) + message_text = ( + f"You added {added_count} conversations as context to the chat." + if added_count > 1 + else "You added 1 conversation as context to the chat." ) - dembrane_search_complete_message = ProjectChatMessageModel( - id=generate_uuid(), - date_created=get_utc_timestamp(), - message_from="dembrane", - text=f"You added {len(set_conversations_to_add)} conversations as context to the chat.", - project_chat_id=chat_id, - used_conversations=added_conversations, - added_conversations=added_conversations, + await run_in_thread_pool( + chat_service.create_message, + chat_id, + "dembrane", + message_text, + message_id=generate_uuid(), + used_conversation_ids=set_conversations_to_add, + added_conversation_ids=set_conversations_to_add, ) - db.add(dembrane_search_complete_message) - db.commit() - - # Fetch ConversationModel objects for used_conversations - used_conversations = ( - db.query(ConversationModel) - .filter(ConversationModel.id.in_(current_context.conversation_id_list)) - .all() + + used_conversations = await run_in_thread_pool( + conversation_service.list_by_ids, + current_context.conversation_id_list, + with_chunks=False, + with_tags=True, ) return used_conversations @@ -422,402 +439,240 @@ class ChatBodySchema(BaseModel): async def post_chat( chat_id: str, body: ChatBodySchema, - db: DependencyInjectDatabase, auth: DependencyDirectusSession, protocol: str = Query("data"), language: str = Query("en"), -) -> StreamingResponse: # ignore: type - """ - Handle a chat interaction: persist the user's message, optionally generate a title, and stream an LLM-generated response. - This endpoint records the incoming user message into the chat, may asynchronously generate and persist a chat title if missing, and then produces a streaming response from the configured LLM. Two generation modes are supported: - - Auto-select (when enabled for the chat): builds a RAG prompt, retrieves conversation references and citations, and streams the model output. - - Manual-select: builds system messages from locked conversations and streams the model output. - Side effects: - - Persists a new ProjectChatMessageModel for the user message. - - May update the chat name and the message's template key. - - On generation failure the in-flight user message is deleted. - Parameters: - - chat_id: ID of the target chat (used to validate access and load context). - - body: ChatBodySchema containing the messages (the last user message is used as the prompt) and optional template_key. - - protocol: Response protocol; "data" (default) yields structured data frames, "text" yields raw text chunks. - - language: Language code used for title generation and system message creation. - Returns: - - StreamingResponse that yields streamed model content and, in auto-select mode, header payloads containing conversation references and citations. - Raises: - - HTTPException: 404 if the chat (or required conversation data) is not found; 400 when auto-select cannot satisfy context-length constraints or request validation fails. - """ - raise_if_chat_not_found_or_not_authorized(chat_id, auth) +) -> StreamingResponse: + chat = await raise_if_chat_not_found_or_not_authorized( + chat_id, + auth, + include_used_conversations=True, + ) - chat = db.get(ProjectChatModel, chat_id) + project_info = chat.get("project_id") + project_id: Optional[str] + if isinstance(project_info, dict): + project_id = project_info.get("id") + else: + project_id = project_info # directus may return an ID string - if chat is None: - raise HTTPException(status_code=404, detail="Chat not found") + if not project_id: + raise HTTPException(status_code=500, detail="Chat is missing a project reference") - """ - Put longform data at the top: - Place your long documents and inputs (~20K+ tokens) near the top of your prompt, above your query, instructions, and examples. - This can significantly improve performance across all models. - """ + user_message_content = body.messages[-1].content + user_message_id = generate_uuid() - user_message = ProjectChatMessageModel( - id=generate_uuid(), - date_created=get_utc_timestamp(), - message_from="user", - text=body.messages[-1].content, - project_chat_id=chat.id, + user_message = await run_in_thread_pool( + chat_service.create_message, + chat_id, + "user", + user_message_content, + message_id=user_message_id, ) - db.add(user_message) - db.commit() - try: - if chat.name is None: - chat.name = await generate_title(body.messages[-1].content, language) - db.commit() - except Exception as e: - logger.warning(f"Error generating title: {str(e)}") + if not chat.get("name"): + generated_title = await generate_title(user_message_content, language) + if generated_title: + await run_in_thread_pool(chat_service.set_chat_name, chat_id, generated_title) - try: - logger.debug("checking if user submitted template key") if body.template_key is not None: - logger.debug(f"updating template key to: {body.template_key}") - directus.update_item( - "project_chat_message", user_message.id, {"template_key": body.template_key} + await run_in_thread_pool( + chat_service.update_message, + user_message_id, + {"template_key": body.template_key}, ) - except Exception as e: - logger.error(f"Error updating template key: {str(e)}") - - project_id = directus.get_items( - "project_chat", - { - "query": { - "filter": {"id": {"_eq": chat.id}}, - "fields": ["project_id"], - }, - }, - )[0]["project_id"] - - messages = get_project_chat_history(chat_id, db) - if len(messages) == 0: - logger.debug("initializing chat") + messages = await get_project_chat_history(chat_id) + if len(messages) == 0: + logger.debug("initializing chat") - chat_context = await get_chat_context(chat_id, db, auth) + chat_context = await get_chat_context(chat_id, auth) + locked_conversation_id_list = chat_context.locked_conversation_id_list - locked_conversation_id_list = chat_context.locked_conversation_id_list # Verify with directus + conversation_history = [ + {"role": message["role"], "content": message["content"]} + for message in messages + if message["role"] in ["user", "assistant"] + ] - logger.debug(f"ENABLE_CHAT_AUTO_SELECT: {ENABLE_CHAT_AUTO_SELECT}") - logger.debug(f"chat_context.auto_select_bool: {chat_context.auto_select_bool}") - if ENABLE_CHAT_AUTO_SELECT and chat_context.auto_select_bool: - filtered_messages: List[Dict[str, Any]] = [] - for message in messages: - if message["role"] in ["user", "assistant"]: - filtered_messages.append(message) if ( - len(filtered_messages) >= 2 - and filtered_messages[-2]["role"] == "user" - and filtered_messages[-1]["role"] == "user" - and filtered_messages[-2]["content"] == filtered_messages[-1]["content"] + len(conversation_history) >= 2 + and conversation_history[-2]["role"] == "user" + and conversation_history[-1]["role"] == "user" + and conversation_history[-2]["content"] == conversation_history[-1]["content"] ): - filtered_messages = filtered_messages[:-1] + conversation_history = conversation_history[:-1] - query = filtered_messages[-1]["content"] - conversation_history = filtered_messages - - # Track newly added conversations for displaying in the frontend - conversations_added: list[ConversationModel] = [] - - # Check if this is a follow-up question (only if we have locked conversations) - should_reuse_locked = False - if locked_conversation_id_list: - is_followup = await is_followup_question(conversation_history, language) - if is_followup: - logger.info("Detected follow-up question - reusing locked conversations") - should_reuse_locked = True + async def build_formatted_messages(conversation_ids: Iterable[str]) -> List[Dict[str, str]]: + system_messages_result = await create_system_messages_for_chat( + list(conversation_ids), + language, + project_id, + ) + formatted: List[Dict[str, str]] = [] + if isinstance(system_messages_result, list): + formatted.extend( + {"role": "system", "content": message["text"]} + for message in system_messages_result + ) else: - logger.info("New independent question - running auto-select") + formatted.append({"role": "system", "content": system_messages_result}) - if should_reuse_locked: - # Reuse existing locked conversations for follow-up questions - updated_conversation_id_list = locked_conversation_id_list + formatted.extend(conversation_history) + return formatted - system_messages = await create_system_messages_for_chat( - updated_conversation_id_list, db, language, project_id - ) + conversations_added_ids: List[str] = [] + conversation_references: dict[str, List[Dict[str, str]]] = {"references": []} - formatted_messages = [] - if isinstance(system_messages, list): - for msg in system_messages: - formatted_messages.append({"role": "system", "content": msg["text"]}) - formatted_messages.extend(conversation_history) - else: - formatted_messages = [ - {"role": "system", "content": system_messages} - ] + conversation_history - - # Check context length - prompt_len = token_counter( - messages=formatted_messages, - **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), - ) + formatted_messages: List[Dict[str, str]] + should_reuse_locked = False + if locked_conversation_id_list: + should_reuse_locked = await is_followup_question(conversation_history, language) - if prompt_len > MAX_CHAT_CONTEXT_LENGTH: - raise HTTPException( - status_code=400, - detail="The conversation context with the new message exceeds the maximum context length.", - ) - else: - # Run auto-select for first query or new independent questions - user_query_inputs = [query] + if ( + ENABLE_CHAT_AUTO_SELECT + and chat_context.auto_select_bool + and not should_reuse_locked + and conversation_history + ): + query = conversation_history[-1]["content"] logger.info(f"Calling auto_select_conversations with query: {query}") auto_select_result = await auto_select_conversations( - user_query_inputs=user_query_inputs, + user_query_inputs=[query], project_id_list=[project_id], - db=db, language=language, ) - logger.info(f"Auto-select result: {auto_select_result}") + logger.info("Auto-select result: %s", auto_select_result) - # Extract selected conversation IDs - selected_conversation_ids = [] + selected_conversation_ids: List[str] = [] if "results" in auto_select_result: for proj_result in auto_select_result["results"].values(): - if "conversation_id_list" in proj_result: - selected_conversation_ids.extend(proj_result["conversation_id_list"]) + selected_conversation_ids.extend(proj_result.get("conversation_id_list", [])) - # Add selected conversations to chat context, but only up to 80% of max context length - conversations_added = [] - MAX_CONTEXT_THRESHOLD = int(MAX_CHAT_CONTEXT_LENGTH * 0.8) + existing_conversation_ids = set(chat_context.conversation_id_list) + max_context_threshold = int(MAX_CHAT_CONTEXT_LENGTH * 0.8) for conversation_id in selected_conversation_ids: - conversation = db.get(ConversationModel, conversation_id) - if conversation and conversation not in chat.used_conversations: - # Temporarily add to test token count - temp_conversation_list = [c.id for c in conversations_added] + [conversation_id] - - # Build system messages for current set of conversations - temp_system_messages = await create_system_messages_for_chat( - temp_conversation_list, db, language, project_id - ) + if ( + conversation_id in existing_conversation_ids + or conversation_id in conversations_added_ids + ): + continue - # Build formatted messages to check token count - temp_formatted_messages = [] - if isinstance(temp_system_messages, list): - for msg in temp_system_messages: - temp_formatted_messages.append( - {"role": "system", "content": msg["text"]} - ) - temp_formatted_messages.extend(conversation_history) - else: - temp_formatted_messages = [ - {"role": "system", "content": temp_system_messages} - ] + conversation_history - - # Check if adding this conversation would exceed 80% threshold - prompt_len = token_counter( - messages=temp_formatted_messages, - **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), + temp_ids = ( + chat_context.conversation_id_list + conversations_added_ids + [conversation_id] + ) + candidate_messages = await build_formatted_messages(temp_ids) + prompt_len = token_counter( + messages=candidate_messages, + **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), + ) + + if prompt_len > max_context_threshold: + logger.info( + "Reached 80%% context threshold (%s/%s tokens). Stopping conversation addition.", + prompt_len, + max_context_threshold, ) + break - if prompt_len > MAX_CONTEXT_THRESHOLD: - logger.info( - f"Reached 80% context threshold ({prompt_len}/{MAX_CONTEXT_THRESHOLD} tokens). Stopping conversation addition. Added {len(conversations_added)}/{len(selected_conversation_ids)} conversations." - ) - break - - # If we're still under threshold, add the conversation - chat.used_conversations.append(conversation) - conversations_added.append(conversation) - - # Create a message to lock the auto-selected conversations - if conversations_added: - auto_select_message = ProjectChatMessageModel( - id=generate_uuid(), - date_created=get_utc_timestamp(), - message_from="dembrane", - text=f"Auto-selected and added {len(conversations_added)} conversations as context to the chat.", - project_chat_id=chat_id, - used_conversations=conversations_added, + await run_in_thread_pool( + chat_service.attach_conversations, + chat_id, + [conversation_id], + ) + conversations_added_ids.append(conversation_id) + existing_conversation_ids.add(conversation_id) + + if conversations_added_ids: + await run_in_thread_pool( + chat_service.create_message, + chat_id, + "dembrane", + text=f"Auto-selected and added {len(conversations_added_ids)} conversations as context to the chat.", + message_id=generate_uuid(), + used_conversation_ids=conversations_added_ids, + added_conversation_ids=conversations_added_ids, ) - db.add(auto_select_message) - db.commit() - logger.info(f"Added {len(conversations_added)} conversations via auto-select") - - # Get updated chat context - updated_chat_context = await get_chat_context(chat_id, db, auth) - updated_conversation_id_list = updated_chat_context.conversation_id_list - # Build system messages from the selected conversations - system_messages = await create_system_messages_for_chat( - updated_conversation_id_list, db, language, project_id + updated_context = await get_chat_context(chat_id, auth) + formatted_messages = await build_formatted_messages( + updated_context.conversation_id_list ) - # Build messages to send - formatted_messages = [] - if isinstance(system_messages, list): - for msg in system_messages: - formatted_messages.append({"role": "system", "content": msg["text"]}) - formatted_messages.extend(conversation_history) - else: - formatted_messages = [ - {"role": "system", "content": system_messages} - ] + conversation_history - - # Check context length prompt_len = token_counter( messages=formatted_messages, **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), ) - if prompt_len > MAX_CHAT_CONTEXT_LENGTH: raise HTTPException( status_code=400, detail="Auto select returned too many conversations. The selected conversations exceed the maximum context length.", ) - # Build references list from ONLY newly added conversations (not all conversations) - conversation_references: dict[str, list[dict[str, str]]] = {"references": []} - # Only include conversations that were just added via auto-select - for conv in conversations_added: - conversation_references["references"].append( - { - "conversation": conv.id, - "conversation_title": conv.participant_name, - } - ) - - logger.info(f"Newly added conversations for frontend: {conversation_references}") + if conversations_added_ids: + added_details = await run_in_thread_pool( + conversation_service.list_by_ids, + conversations_added_ids, + with_chunks=False, + with_tags=False, + ) + conversation_references["references"] = [ + { + "conversation": item.get("id", ""), + "conversation_title": str(item.get("participant_name") or ""), + } + for item in added_details + ] + else: + formatted_messages = await build_formatted_messages(chat_context.conversation_id_list) - async def stream_response_async_autoselect() -> AsyncGenerator[str, None]: - # Send conversation references (selected conversations) - conversation_references_yeild = f"h:{json.dumps([conversation_references])}\n" - yield conversation_references_yeild + async def stream_response_async( + formatted: List[Dict[str, str]], + references: Optional[dict[str, List[Dict[str, str]]]] = None, + ) -> AsyncGenerator[str, None]: + if references is not None: + header_payload = f"h:{json.dumps([references])}\n" + yield header_payload - accumulated_response = "" try: response = await litellm.acompletion( - messages=formatted_messages, + messages=formatted, stream=True, - timeout=300, # 5 minute timeout for response - stream_timeout=180, # 3 minute timeout for streaming + timeout=300, + stream_timeout=180, **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), ) async for chunk in response: - if chunk.choices[0].delta.content: - content = chunk.choices[0].delta.content - accumulated_response += content + delta = chunk.choices[0].delta.content + if delta: if protocol == "text": - yield content - elif protocol == "data": - yield f"0:{json.dumps(content)}\n" - except Exception as e: - logger.error(f"Error in litellm stream response: {str(e)}") - # delete user message if stream fails - with DatabaseSession() as error_db: - error_db.delete(user_message) - error_db.commit() - - if protocol == "data": - yield '3:"An error occurred while processing the chat response."\n' - else: + yield delta + else: + yield f"0:{json.dumps(delta)}\n" + except Exception as exc: # pragma: no cover - runtime safeguard + logger.error("Error in litellm stream response: %s", exc) + await run_in_thread_pool(chat_service.delete_message, user_message_id) + if protocol == "text": yield "Error: An error occurred while processing the chat response." - return # Stop generation on error + else: + yield '3:"An error occurred while processing the chat response."\n' headers = {"Content-Type": "text/event-stream"} if protocol == "data": headers["x-vercel-ai-data-stream"] = "v1" - response = StreamingResponse(stream_response_async_autoselect(), headers=headers) - return response - else: - system_messages = await create_system_messages_for_chat( - locked_conversation_id_list, db, language, project_id - ) - - async def stream_response_async_manualselect() -> AsyncGenerator[str, None]: - """ - Asynchronously stream a model-generated assistant response for the manual-selection chat path. - Builds the outgoing message sequence by combining provided system messages (list or string) with recent user/assistant messages, removes a duplicated trailing user message if present, then calls the Litellm streaming completion API and yields text chunks as they arrive. - - Yields: - - If protocol == "text": successive raw text fragments from the model. - - If protocol == "data": framed data lines of the form `0:` for each fragment. - - On generation error: a single error payload matching the active protocol (`"Error: ..." ` for text, or `3:"..."` for data). - - Side effects: - - On an exception during generation, deletes the in-flight `user_message` from the database and commits the change. - - Notes: - - Expects surrounding scope variables: `messages`, `system_messages`, `litellm`, model/API constants, `protocol`, `user_message`, and `logger`. - - Returns when the stream completes. - """ - with DatabaseSession() as db: - filtered_messages: List[Dict[str, Any]] = [] - - for message in messages: - if message["role"] in ["user", "assistant"]: - filtered_messages.append(message) - - # Remove duplicate consecutive user messages but preserve conversation flow - if ( - len(filtered_messages) >= 2 - and filtered_messages[-2]["role"] == "user" - and filtered_messages[-1]["role"] == "user" - and filtered_messages[-2]["content"] == filtered_messages[-1]["content"] - ): - filtered_messages = filtered_messages[:-1] - - try: - accumulated_response = "" - - # Check message token count and add padding if needed - # Handle system_messages whether it's a list or string - if isinstance(system_messages, list): - messages_to_send = [] - for msg in system_messages: - messages_to_send.append({"role": "system", "content": msg["text"]}) - messages_to_send.extend(filtered_messages) - else: - messages_to_send = [ - {"role": "system", "content": system_messages} - ] + filtered_messages - - logger.debug(f"messages_to_send: {messages_to_send}") - response = await litellm.acompletion( - messages=messages_to_send, - stream=True, - timeout=300, # 5 minute timeout for response - stream_timeout=180, # 3 minute timeout for streaming - **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), - ) - async for chunk in response: - if chunk.choices[0].delta.content: - content = chunk.choices[0].delta.content - accumulated_response += content - if protocol == "text": - yield content - elif protocol == "data": - yield f"0:{json.dumps(content)}\n" - except Exception as e: - logger.error(f"Error in litellm stream response: {str(e)}") - - # delete user message - db.delete(user_message) - db.commit() - - if protocol == "data": - yield '3:"An error occurred while processing the chat response."\n' - else: - yield "Error: An error occurred while processing the chat response." - - return - - headers = {"Content-Type": "text/event-stream"} - if protocol == "data": - headers["x-vercel-ai-data-stream"] = "v1" + if conversations_added_ids and conversation_references["references"]: + stream = stream_response_async(formatted_messages, conversation_references) + else: + stream = stream_response_async(formatted_messages) - response = StreamingResponse(stream_response_async_manualselect(), headers=headers) + return StreamingResponse(stream, headers=headers) - return response + except Exception: + # Ensure the user message does not linger on failure + await run_in_thread_pool(chat_service.delete_message, user_message_id) + raise diff --git a/echo/server/dembrane/api/conversation.py b/echo/server/dembrane/api/conversation.py index 4e2a995d..ba7d4f31 100644 --- a/echo/server/dembrane/api/conversation.py +++ b/echo/server/dembrane/api/conversation.py @@ -5,7 +5,6 @@ from fastapi import Request, APIRouter from pydantic import BaseModel -from sqlalchemy.orm import noload, selectinload from fastapi.responses import RedirectResponse, StreamingResponse from fastapi.exceptions import HTTPException from litellm.utils import token_counter @@ -13,11 +12,6 @@ from dembrane.s3 import get_signed_url from dembrane.utils import CacheWithExpiration, generate_uuid, get_utc_timestamp -from dembrane.database import ( - ConversationModel, - ConversationChunkModel, - DependencyInjectDatabase, -) from dembrane.directus import directus from dembrane.audio_utils import ( get_duration_from_s3, @@ -33,58 +27,32 @@ ConversationNotFoundException, ) from dembrane.api.dependency_auth import DependencyDirectusSession +from dembrane.service import conversation_service logger = getLogger("api.conversation") ConversationRouter = APIRouter(tags=["conversation"]) async def get_conversation( - conversation_id: str, db: DependencyInjectDatabase, load_chunks: Optional[bool] = True -) -> ConversationModel: - if load_chunks: - conversation = ( - db.query(ConversationModel) - .options( - selectinload(ConversationModel.tags), - selectinload(ConversationModel.chunks), - ) - .filter( - ConversationModel.id == conversation_id, - ) - .first() - ) - else: - conversation = ( - db.query(ConversationModel) - .options( - noload(ConversationModel.chunks), - selectinload(ConversationModel.tags), - ) - .filter( - ConversationModel.id == conversation_id, - ) - .first() - ) - - if not conversation: - raise ConversationNotFoundException + conversation_id: str, + load_chunks: bool = True, + with_tags: bool = True, +) -> dict: + conversation = await run_in_thread_pool( + conversation_service.get_by_id_or_raise, + conversation_id, + with_tags, + load_chunks, + ) return conversation async def get_conversation_chunks( - conversation_id: str, db: DependencyInjectDatabase -) -> List[ConversationChunkModel]: - conversation = await get_conversation(conversation_id, db, load_chunks=False) - - chunks = ( - db.query(ConversationChunkModel) - .filter( - ConversationChunkModel.conversation_id == conversation.id, - ) - .order_by(ConversationChunkModel.timestamp) - .all() - ) + conversation_id: str, +) -> List[dict]: + await get_conversation(conversation_id, load_chunks=False) + chunks = await run_in_thread_pool(conversation_service.list_chunks, conversation_id) return chunks @@ -419,7 +387,6 @@ async def get_conversation_transcript( @ConversationRouter.get("/{conversation_id}/token-count") async def get_conversation_token_count( conversation_id: str, - _db: DependencyInjectDatabase, auth: DependencyDirectusSession, ) -> int: await raise_if_conversation_not_found_or_not_authorized(conversation_id, auth) diff --git a/echo/server/dembrane/api/project.py b/echo/server/dembrane/api/project.py index 47f83b37..5b63ba1a 100644 --- a/echo/server/dembrane/api/project.py +++ b/echo/server/dembrane/api/project.py @@ -1,34 +1,24 @@ import os import asyncio import zipfile +from datetime import datetime from http import HTTPStatus -from typing import List, Optional, Generator +from typing import Any, List, Optional, Generator from logging import getLogger from fastapi import APIRouter, HTTPException, BackgroundTasks from pydantic import BaseModel -from sqlalchemy.orm import Session from fastapi.responses import StreamingResponse from dembrane.tasks import task_create_view, task_create_project_library from dembrane.utils import generate_uuid, get_safe_filename from dembrane.settings import get_settings -from dembrane.schemas import ( - ProjectSchema, -) -from dembrane.service import project_service -from dembrane.database import ( - ProjectModel, - ConversationModel, - DependencyInjectDatabase, -) -from dembrane.directus import DirectusBadRequest, directus_client_context +from dembrane.service import project_service, conversation_service from dembrane.report_utils import ContextTooLongException, get_report_content_for_project from dembrane.async_helpers import run_in_thread_pool from dembrane.api.exceptions import ( ProjectLanguageNotSupportedException, ) -from dembrane.api.conversation import get_conversation, get_conversation_chunks from dembrane.api.dependency_auth import DependencyDirectusSession logger = getLogger("api.project") @@ -49,87 +39,123 @@ class CreateProjectRequestSchema(BaseModel): default_conversation_finish_text: Optional[str] = None -@ProjectRouter.post("", response_model=ProjectSchema) +@ProjectRouter.post("") async def create_project( body: CreateProjectRequestSchema, - db: DependencyInjectDatabase, auth: DependencyDirectusSession, -) -> ProjectModel: +) -> dict: if body.language is not None and body.language not in PROJECT_ALLOWED_LANGUAGES: raise ProjectLanguageNotSupportedException name = body.name or "New Project" context = body.context or None language = body.language or "en" - project = ProjectModel( - id=generate_uuid(), - directus_user_id=auth.user_id, + is_conversation_allowed = ( + body.is_conversation_allowed if body.is_conversation_allowed is not None else True + ) + + optional_fields: dict[str, Any] = { + "context": context, + "default_conversation_title": body.default_conversation_title, + "default_conversation_description": body.default_conversation_description, + "default_conversation_finish_text": body.default_conversation_finish_text, + } + + filtered_optional_fields = { + key: value for key, value in optional_fields.items() if value is not None + } + + project = await run_in_thread_pool( + project_service.create, name=name, - context=context, language=language, + is_conversation_allowed=is_conversation_allowed, + directus_user_id=auth.user_id, + id=generate_uuid(), + **filtered_optional_fields, ) - db.add(project) - db.commit() return project -async def generate_transcript_file(conversation_id: str, db: Session) -> Optional[str]: +def _parse_iso_datetime(value: Any) -> datetime: + if isinstance(value, datetime): + return value + + if isinstance(value, str): + normalized = value.replace("Z", "+00:00") + try: + return datetime.fromisoformat(normalized) + except ValueError: + logger.warning( + "Unable to parse datetime string '%s', falling back to current UTC", value + ) + + return datetime.utcnow() + + +def _sanitize_for_filename(text: str, max_length: int = 30) -> str: + if not text: + return "" + safe_text = "".join(c if c.isalnum() else "_" for c in text) + safe_text = "_".join(filter(None, safe_text.split("_"))) + return safe_text[:max_length] + + +def _generate_transcript_file_sync(conversation: dict) -> Optional[str]: + conversation_id = conversation.get("id") logger.info(f"generating transcript for conversation {conversation_id}") - chunks = await get_conversation_chunks(conversation_id, db) + chunks: List[dict] = conversation.get("chunks") or [] if not chunks: return None - conversation = await get_conversation(conversation_id, db, load_chunks=False) - email = conversation.participant_email - name = conversation.participant_name - # Add timestamp to make filename unique - timestamp = conversation.created_at.strftime("%Y%m%d_%H%M%S") + transcript_lines = [ + str(chunk.get("transcript")) + for chunk in chunks + if isinstance(chunk, dict) and chunk.get("transcript") + ] + + if not transcript_lines: + return None + + created_at = _parse_iso_datetime(conversation.get("created_at")) + timestamp = created_at.strftime("%Y%m%d_%H%M%S") name_for_file = f"{timestamp}" - def sanitize_for_filename(text: str, max_length: int = 30) -> str: - """Sanitize text to be used in filenames by replacing invalid chars with underscore.""" - if not text: - return "" - # Replace any non-alphanumeric chars with underscore - safe_text = "".join(c if c.isalnum() else "_" for c in text) - # Collapse multiple underscores - safe_text = "_".join(filter(None, safe_text.split("_"))) - return safe_text[:max_length] - - if name: - safe_name = sanitize_for_filename(name, max_length=50) - if safe_name: # Only add if we have valid chars left + name_value = conversation.get("participant_name") + if name_value: + safe_name = _sanitize_for_filename(name_value, max_length=50) + if safe_name: name_for_file += f"_{safe_name}" - if email: - # Extract username part and sanitize - email_part = email.split("@")[0] - safe_email = sanitize_for_filename(email_part, max_length=30) - if safe_email: # Only add if we have valid chars left + email_value = conversation.get("participant_email") + if email_value: + email_part = email_value.split("@")[0] + safe_email = _sanitize_for_filename(email_part, max_length=30) + if safe_email: name_for_file += f"_{safe_email}" - # Add conversation ID to ensure uniqueness - name_for_file += f"_{conversation_id[:8]}" + if conversation_id: + name_for_file += f"_{conversation_id[:8]}" - conversation_dir = os.path.join(BASE_DIR, "transcripts", conversation_id) + conversation_dir = os.path.join(BASE_DIR, "transcripts", conversation_id or "unknown") os.makedirs(conversation_dir, exist_ok=True) file_path = os.path.join(conversation_dir, f"{name_for_file}-transcript.md") with open(file_path, "w") as file: - for chunk in chunks: - try: - if chunk.transcript is not None: - file.write(str(chunk.transcript) + "\n") - except Exception as e: - logger.error(f"Failed to write transcript for chunk {chunk.id}: {e}") + for line in transcript_lines: + file.write(line + "\n") return file_path +async def generate_transcript_file(conversation: dict) -> Optional[str]: + return await run_in_thread_pool(_generate_transcript_file_sync, conversation) + + async def cleanup_files(zip_file_name: str, filenames: List[str]) -> None: os.remove(zip_file_name) for filename in filenames: @@ -139,42 +165,52 @@ async def cleanup_files(zip_file_name: str, filenames: List[str]) -> None: @ProjectRouter.get("/{project_id}/transcripts") async def get_project_transcripts( project_id: str, - db: DependencyInjectDatabase, auth: DependencyDirectusSession, background_tasks: BackgroundTasks, ) -> StreamingResponse: - project = db.get(ProjectModel, project_id) + from dembrane.service.project import ProjectNotFoundException - if not project: - raise HTTPException(status_code=404, detail="Project not found") + try: + project = await run_in_thread_pool(project_service.get_by_id_or_raise, project_id) + except ProjectNotFoundException as exc: + raise HTTPException(status_code=404, detail="Project not found") from exc - if not auth.is_admin and project.directus_user_id != auth.user_id: + if not auth.is_admin and project.get("directus_user_id", "") != auth.user_id: raise HTTPException(status_code=403, detail="User does not have access to this project") - conversations = ( - db.query(ConversationModel).filter(ConversationModel.project_id == project_id).all() + conversations = await run_in_thread_pool( + conversation_service.list_by_project, + project_id, + with_chunks=True, + with_tags=False, ) if not conversations: raise HTTPException(status_code=404, detail="No conversations found for this project") - conversations = [ - c for c in conversations if c.chunks and any(ch.transcript is not None for ch in c.chunks) + conversations_with_transcripts = [ + conversation + for conversation in conversations + if any( + isinstance(chunk, dict) and chunk.get("transcript") + for chunk in (conversation.get("chunks") or []) + ) ] - filename_futures = [ - generate_transcript_file(conversation.id, db) for conversation in conversations - ] + if not conversations_with_transcripts: + raise HTTPException(status_code=404, detail="No transcripts available for this project") - filenames_with_none: List[str | None] = await asyncio.gather(*filename_futures) + filenames_with_none: List[Optional[str]] = await asyncio.gather( + *[generate_transcript_file(conversation) for conversation in conversations_with_transcripts] + ) filenames: List[str] = [filename for filename in filenames_with_none if filename is not None] if not filenames: raise HTTPException(status_code=404, detail="No transcripts available for this project") - project_name_or_id = project.name if project.name is not None else project.id - safe_project_name = get_safe_filename(project_name_or_id) + project_name_or_id = project.get("name") if project.get("name") is not None else project_id + safe_project_name = get_safe_filename(str(project_name_or_id)) zip_file_name = f"{safe_project_name}_transcripts.zip" with zipfile.ZipFile(zip_file_name, "w", zipfile.ZIP_DEFLATED) as zipf: @@ -202,35 +238,13 @@ def iterfile() -> Generator[bytes, None, None]: async def get_latest_project_analysis_run(project_id: str) -> Optional[dict]: - try: - def _get_analysis_run() -> Optional[list[dict]]: - with directus_client_context() as client: - return client.get_items( - "project_analysis_run", - { - "query": { - "filter": { - "project_id": project_id, - }, - "sort": "-created_at", - }, - }, - ) - - analysis_run: Optional[list[dict]] = await run_in_thread_pool(_get_analysis_run) - - if analysis_run is None: - return None - - if len(analysis_run) == 0: - return None - - return analysis_run[0] - - except DirectusBadRequest as e: - logger.error(f"Failed to get latest project analysis run for project {project_id}: {e}") + analysis_run = await run_in_thread_pool(project_service.get_latest_analysis_run, project_id) + + if not analysis_run: return None + return analysis_run + class CreateLibraryRequestBodySchema(BaseModel): language: Optional[str] = "en" @@ -326,38 +340,24 @@ async def create_report(project_id: str, body: CreateReportRequestBodySchema) -> try: report_content_response = await get_report_content_for_project(project_id, language) except ContextTooLongException: - - def _create_error_report() -> dict: - with directus_client_context() as client: - return client.create_item( - "project_report", - item_data={ - "content": "", - "project_id": project_id, - "language": language, - "status": "error", - "error_code": "CONTEXT_TOO_LONG", - }, - )["data"] - - report = await run_in_thread_pool(_create_error_report) + report = await run_in_thread_pool( + project_service.create_report, + project_id, + language, + "", + "error", + "CONTEXT_TOO_LONG", + ) return report except Exception as e: raise e - def _create_report() -> dict: - with directus_client_context() as client: - return client.create_item( - "project_report", - item_data={ - "content": report_content_response, - "project_id": project_id, - "language": language, - "status": "archived", - }, - )["data"] - - report = await run_in_thread_pool(_create_report) + report = await run_in_thread_pool( + project_service.create_report, + project_id, + language, + report_content_response, + ) return report diff --git a/echo/server/dembrane/chat_utils.py b/echo/server/dembrane/chat_utils.py index 74901ddb..b9a32c25 100644 --- a/echo/server/dembrane/chat_utils.py +++ b/echo/server/dembrane/chat_utils.py @@ -6,9 +6,8 @@ import backoff from litellm import acompletion -from litellm.utils import token_counter from pydantic import BaseModel -from sqlalchemy.orm import Session, selectinload +from litellm.utils import token_counter from litellm.exceptions import ( Timeout, APIError, @@ -18,10 +17,11 @@ ) from dembrane.llms import MODELS, get_completion_kwargs -from dembrane.settings import get_settings from dembrane.prompts import render_prompt -from dembrane.database import ConversationModel, ProjectChatMessageModel +from dembrane.service import chat_service, conversation_service from dembrane.directus import directus +from dembrane.settings import get_settings +from dembrane.async_helpers import run_in_thread_pool from dembrane.api.conversation import get_conversation_transcript from dembrane.api.dependency_auth import DirectusSession @@ -66,20 +66,25 @@ def convert_to_openai_messages(messages: List[ClientMessage]) -> List[Dict[str, return openai_messages -def get_project_chat_history(chat_id: str, db: Session) -> List[Dict[str, Any]]: - db_messages = ( - db.query(ProjectChatMessageModel) - .filter(ProjectChatMessageModel.project_chat_id == chat_id) - .order_by(ProjectChatMessageModel.date_created.asc()) - .all() +async def get_project_chat_history(chat_id: str) -> List[Dict[str, Any]]: + messages_raw = await run_in_thread_pool( + chat_service.list_messages, + chat_id, + include_relationships=False, + order="asc", ) - messages = [] - for i in db_messages: + messages: List[Dict[str, Any]] = [] + for message in messages_raw: + message_from = message.get("message_from") + if message_from is None: + continue messages.append( { - "role": i.message_from, - "content": i.text, + "role": message_from, + "content": message.get("text", ""), + "id": message.get("id"), + "tokens_count": message.get("tokens_count"), } ) @@ -87,12 +92,13 @@ def get_project_chat_history(chat_id: str, db: Session) -> List[Dict[str, Any]]: async def create_system_messages_for_chat( - locked_conversation_id_list: List[str], db: Session, language: str, project_id: str + locked_conversation_id_list: List[str], language: str, project_id: str ) -> List[Dict[str, Any]]: - conversations = ( - db.query(ConversationModel) - .filter(ConversationModel.id.in_(locked_conversation_id_list)) - .all() + conversations = await run_in_thread_pool( + conversation_service.list_by_ids, + locked_conversation_id_list, + with_chunks=False, + with_tags=True, ) try: project_query = { @@ -122,16 +128,22 @@ async def create_system_messages_for_chat( conversation_data_list = [] for conversation in conversations: + tag_text_list: List[str] = [] + for tag_entry in conversation.get("tags", []) or []: + if isinstance(tag_entry, dict): + project_tag = tag_entry.get("project_tag_id") + if isinstance(project_tag, dict): + tag_text = project_tag.get("text") + if tag_text: + tag_text_list.append(str(tag_text)) conversation_data_list.append( { - "name": conversation.participant_name, - "tags": ", ".join([tag.text for tag in conversation.tags]), - "created_at": conversation.created_at.isoformat() - if conversation.created_at - else None, - "duration": conversation.duration, + "name": conversation.get("participant_name"), + "tags": ", ".join(tag_text_list), + "created_at": conversation.get("created_at"), + "duration": conversation.get("duration"), "transcript": await get_conversation_transcript( - conversation.id, + conversation.get("id", ""), # fake auth to get this fn call DirectusSession(user_id="none", is_admin=True), ), @@ -208,7 +220,6 @@ async def generate_title( async def auto_select_conversations( user_query_inputs: List[str], project_id_list: List[str], - db: Session, language: str = "en", batch_size: int = 20, ) -> Dict[str, Any]: @@ -247,11 +258,11 @@ async def auto_select_conversations( for project_id in project_id_list: # Get all conversations for this project - conversations = ( - db.query(ConversationModel) - .filter(ConversationModel.project_id == project_id) - .options(selectinload(ConversationModel.tags)) - .all() + conversations = await run_in_thread_pool( + conversation_service.list_by_project, + project_id, + with_chunks=False, + with_tags=True, ) if not conversations: @@ -345,7 +356,7 @@ async def _call_llm_with_backoff(prompt: str, batch_num: int) -> Any: async def _process_single_batch( - batch: List[ConversationModel], + batch: List[dict], batch_num: int, user_query_inputs: List[str], language: str, @@ -354,7 +365,7 @@ async def _process_single_batch( Process a single batch of conversations and return selected IDs. Args: - batch: List of ConversationModel instances to process + batch: List of conversation dictionaries to process batch_num: Batch number for logging user_query_inputs: User queries to match against language: Language code for the prompt template @@ -368,17 +379,21 @@ async def _process_single_batch( logger.info(f"Processing batch {batch_num} ({len(batch)} conversations, parallel execution)") # Prepare conversation data for the prompt - conversation_data = [] + conversation_data: List[Dict[str, Any]] = [] for conv in batch: - # Get summary or fallback to transcript excerpt - summary_text = None - if conv.summary and conv.summary.strip(): - summary_text = conv.summary + conv_id = conv.get("id") + if not conv_id: + continue + + summary_text: Optional[str] = None + conv_summary = conv.get("summary") + if isinstance(conv_summary, str) and conv_summary.strip(): + summary_text = conv_summary else: # Use transcript as fallback try: transcript = await get_conversation_transcript( - conv.id, + conv_id, DirectusSession(user_id="none", is_admin=True), ) # Limit transcript to first 500 characters for context @@ -387,23 +402,34 @@ async def _process_single_batch( elif transcript: summary_text = transcript except Exception as e: - logger.warning(f"Could not get transcript for conversation {conv.id}: {e}") + logger.warning(f"Could not get transcript for conversation {conv_id}: {e}") # Skip conversations with no content at all if not summary_text: - logger.debug(f"Skipping conversation {conv.id} - no summary or transcript") + logger.debug(f"Skipping conversation {conv_id} - no summary or transcript") continue - conv_data = { - "id": conv.id, - "participant_name": conv.participant_name or "Unknown", + tag_values: List[str] = [] + for tag_entry in conv.get("tags", []) or []: + if isinstance(tag_entry, dict): + project_tag = tag_entry.get("project_tag_id") + if isinstance(project_tag, dict): + tag_text = project_tag.get("text") + if tag_text: + tag_values.append(str(tag_text)) + + conversation_entry: Dict[str, Any] = { + "id": conv_id, + "participant_name": conv.get("participant_name") or "Unknown", "summary": summary_text, } - if conv.tags: - conv_data["tags"] = ", ".join([tag.text for tag in conv.tags]) - if conv.created_at: - conv_data["created_at"] = conv.created_at.isoformat() - conversation_data.append(conv_data) + if tag_values: + conversation_entry["tags"] = ", ".join(tag_values) + created_at_value = conv.get("created_at") + if created_at_value: + conversation_entry["created_at"] = created_at_value + + conversation_data.append(conversation_entry) # Skip batch if no valid conversations if not conversation_data: @@ -431,9 +457,10 @@ async def _process_single_batch( if prompt_tokens > MAX_BATCH_CONTEXT: # If batch has only 1 conversation, we can't split further if len(batch) == 1: + conversation_identifier = batch[0].get("id") logger.error( f"Batch {batch_num} single conversation exceeds context limit: " - f"{prompt_tokens} tokens. Skipping conversation {batch[0].id}." + f"{prompt_tokens} tokens. Skipping conversation {conversation_identifier}." ) return { "selected_ids": [], @@ -479,9 +506,11 @@ async def _process_single_batch( raw_selected_ids = result.get("selected_conversation_ids", []) # Validate LLM response: ensure all returned IDs are from this batch - valid_ids = {conv.id for conv in batch} + valid_ids = {conv.get("id") for conv in batch if conv.get("id") is not None} batch_selected_ids = [ - id for id in raw_selected_ids if isinstance(id, (int, str)) and id in valid_ids + selected_id + for selected_id in raw_selected_ids + if isinstance(selected_id, (int, str)) and selected_id in valid_ids ] # Log warning if LLM returned invalid IDs diff --git a/echo/server/dembrane/database.py b/echo/server/dembrane/database.py deleted file mode 100644 index 87c55d9e..00000000 --- a/echo/server/dembrane/database.py +++ /dev/null @@ -1,596 +0,0 @@ -# this is not upto date. switched to directus for a better life -from enum import Enum -from typing import Any, List, Optional, Annotated, Generator -from logging import getLogger -from datetime import datetime, timezone - -from fastapi import Depends -from sqlalchemy import ( - Text, - Float, - Table, - Column, - String, - Boolean, - Integer, - DateTime as _DateTime, - ForeignKey, - TypeDecorator, - func, - create_engine, -) -from sqlalchemy.orm import ( - Mapped, - Session as _Session, - relationship, - sessionmaker, - mapped_column, - scoped_session, - declarative_base, -) -from pgvector.sqlalchemy import Vector -from sqlalchemy.dialects.postgresql import UUID - -from dembrane.settings import get_settings -from dembrane.embedding import EMBEDDING_DIM - -logger = getLogger("database") - -# Create the engine and connect to the SQLite database file -settings = get_settings() -DATABASE_URL = settings.database.database_url -logger.debug(f"Connecting to database: {DATABASE_URL}") -engine = create_engine(DATABASE_URL) - -# Create a session factory -session_factory = sessionmaker(bind=engine) -Session = scoped_session(session_factory) -# Alias -DatabaseSession = Session - -# Define your models as subclasses of the base class -Base: Any = declarative_base() - - -class DateTime(TypeDecorator[_DateTime]): - """Custom type to store UTC datetime in the database. Allows to only use - timezone aware datetime objects as parameters and return timezone aware - datetime objects as results.""" - - impl = _DateTime - cache_ok = True - - def process_bind_param(self, value, _dialect): # type: ignore - if isinstance(value, datetime) and value.tzinfo is None: - raise ValueError("Naive datetime is not supported") - - return value.astimezone(timezone.utc) if value else None - - def process_result_value(self, value, _dialect): # type: ignore - if isinstance(value, datetime) and value.tzinfo is None: - return value.replace(tzinfo=timezone.utc) - - return value.astimezone(timezone.utc) if value else None - - -class ProcessingStatusEnum(Enum): - PENDING = "PENDING" - PROCESSING = "PROCESSING" - DONE = "DONE" - ERROR = "ERROR" - - -# class SessionModel(Base): -# __tablename__ = "session" -# id: Mapped[int] = mapped_column(Integer, primary_key=True, autoincrement=True) -# uuid: Mapped[str] = mapped_column(UUID(as_uuid=False), unique=False, nullable=False) -# created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) -# updated_at: Mapped[datetime] = mapped_column( -# DateTime(timezone=True), server_default=func.now(), onupdate=func.now() -# ) -# projects: Mapped[List["ProjectModel"]] = relationship( -# "ProjectModel", back_populates="session" -# ) -# user_id: Mapped[str] = mapped_column(UUID(as_uuid=False), ForeignKey("directus_user.id")) - - -class UserModel(Base): - __tablename__ = "directus_user" - id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True) - - -class ProjectModel(Base): - __tablename__ = "project" - - # id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True, server_default=func.uuid_generate_v4()) - id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True) - - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) - updated_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() - ) - - # session_id: Mapped[int] = mapped_column(Integer, ForeignKey("session.id")) - # session: Mapped["SessionModel"] = relationship("SessionModel", back_populates="projects") - - directus_user_id: Mapped[str] = mapped_column( - UUID(as_uuid=False), ForeignKey("directus_user.id") - ) - directus_user: Mapped["UserModel"] = relationship("UserModel") - - language: Mapped[str] = mapped_column(String, default="en") - - name: Mapped[Optional[str]] = mapped_column(String, nullable=True) - context: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - - is_conversation_allowed: Mapped[bool] = mapped_column(Boolean, default=True) - image_generation_model: Mapped[str] = mapped_column(String, default="MODEST") - - default_conversation_ask_for_participant_name: Mapped[bool] = mapped_column(Boolean) - default_conversation_tutorial_slug: Mapped[Optional[str]] = mapped_column(String, nullable=True) - default_conversation_transcript_prompt: Mapped[Optional[str]] = mapped_column( - Text, nullable=True - ) - - default_conversation_title: Mapped[Optional[str]] = mapped_column(String, nullable=True) - default_conversation_description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - default_conversation_finish_text: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - - resources: Mapped[List["ResourceModel"]] = relationship( - "ResourceModel", back_populates="project", cascade="all, delete-orphan" - ) - conversations: Mapped[List["ConversationModel"]] = relationship( - "ConversationModel", back_populates="project", cascade="all, delete-orphan" - ) - - tags: Mapped[List["ProjectTagModel"]] = relationship( - "ProjectTagModel", - back_populates="project", - cascade="all, delete-orphan", - ) - - project_analysis_runs: Mapped[List["ProjectAnalysisRunModel"]] = relationship( - "ProjectAnalysisRunModel", - back_populates="project", - cascade="all, delete-orphan", - ) - - # @staticmethod - # def belongs_to_session(project_id: str, session_id: int) -> bool: - # return ( - # db.query(ProjectModel) - # .filter(ProjectModel.id == project_id, ProjectModel.session_id == session_id) - # .first() - # is not None - # ) - - -class ProjectAnalysisRunModel(Base): - __tablename__ = "project_analysis_run" - - id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True) - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) - updated_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() - ) - - project_id: Mapped[str] = mapped_column(UUID(as_uuid=False), ForeignKey("project.id")) - project: Mapped["ProjectModel"] = relationship( - "ProjectModel", back_populates="project_analysis_runs" - ) - - quotes: Mapped[List["QuoteModel"]] = relationship( - "QuoteModel", back_populates="project_analysis_run" - ) - insights: Mapped[List["InsightModel"]] = relationship( - "InsightModel", back_populates="project_analysis_run" - ) - views: Mapped[List["ViewModel"]] = relationship( - "ViewModel", back_populates="project_analysis_run" - ) - - processing_status: Mapped[ProcessingStatusEnum] = mapped_column(Text, default="PENDING") - processing_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - processing_error: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - processing_started_at: Mapped[Optional[datetime]] = mapped_column( - DateTime(timezone=True), nullable=True - ) - processing_completed_at: Mapped[Optional[datetime]] = mapped_column( - DateTime(timezone=True), nullable=True - ) - - -conversation_project_tag_association_table = Table( - "conversation_project_tag", - Base.metadata, - Column("id", Integer, autoincrement=True, primary_key=True, unique=True), - Column("conversation_id", ForeignKey("conversation.id")), - Column("project_tag_id", ForeignKey("project_tag.id")), -) - -project_chat_message_conversation_association_table = Table( - "project_chat_message_conversation", - Base.metadata, - Column("id", Integer, autoincrement=True, primary_key=True, unique=True), - Column("project_chat_message_id", ForeignKey("project_chat_message.id")), - Column("conversation_id", ForeignKey("conversation.id")), -) - -project_chat_message_conversation_association_1_table = Table( - "project_chat_message_conversation_1", - Base.metadata, - Column("id", Integer, autoincrement=True, primary_key=True, unique=True), - Column("project_chat_message_id", ForeignKey("project_chat_message.id")), - Column("conversation_id", ForeignKey("conversation.id")), -) - -project_chat_conversation_association_table = Table( - "project_chat_conversation", - Base.metadata, - Column("id", Integer, autoincrement=True, primary_key=True, unique=True), - Column("project_chat_id", ForeignKey("project_chat.id")), - Column("conversation_id", ForeignKey("conversation.id")), -) - - -class ProjectTagModel(Base): - __tablename__ = "project_tag" - - id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True) - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) - updated_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() - ) - - project_id: Mapped[str] = mapped_column(UUID(as_uuid=False), ForeignKey("project.id")) - project: Mapped["ProjectModel"] = relationship("ProjectModel", back_populates="tags") - - conversations: Mapped[List["ConversationModel"]] = relationship( - "ConversationModel", - secondary=conversation_project_tag_association_table, - back_populates="tags", - ) - - text: Mapped[str] = mapped_column(String) - - -class ProjectChatMessageModel(Base): - __tablename__ = "project_chat_message" - - id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True) - date_created: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now() - ) - date_updated: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() - ) - project_chat_id: Mapped[str] = mapped_column(UUID(as_uuid=False), ForeignKey("project_chat.id")) - project_chat: Mapped["ProjectChatModel"] = relationship( - "ProjectChatModel", back_populates="project_chat_messages" - ) - text: Mapped[str] = mapped_column(String) - message_from: Mapped[str] = mapped_column(String) - used_conversations: Mapped[List["ConversationModel"]] = relationship( - "ConversationModel", - secondary=project_chat_message_conversation_association_table, - back_populates="project_chat_messages", - ) - added_conversations: Mapped[List["ConversationModel"]] = relationship( - "ConversationModel", - secondary=project_chat_message_conversation_association_1_table, - ) - tokens_count: Mapped[int] = mapped_column(Integer) - # conversation_references: Mapped[List[Dict[str, str]]] = mapped_column(JSONB, default=[]) - # citations: Mapped[List[Dict[str, str]]] = mapped_column(JSONB, default=[]) - - -class ProjectChatModel(Base): - __tablename__ = "project_chat" - - id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True) - date_created: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now() - ) - date_updated: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() - ) - project_id: Mapped[str] = mapped_column(UUID(as_uuid=False), ForeignKey("project.id")) - - project_chat_messages: Mapped[List["ProjectChatMessageModel"]] = relationship( - "ProjectChatMessageModel", back_populates="project_chat" - ) - - used_conversations: Mapped[List["ConversationModel"]] = relationship( - "ConversationModel", - secondary=project_chat_conversation_association_table, - back_populates="project_chats", - ) - - auto_select_bool: Mapped[bool] = mapped_column("auto_select", Boolean, default=False) - name: Mapped[Optional[str]] = mapped_column(String, nullable=True) - - -class ResourceTypeEnum(Enum): - PDF = "PDF" - - -class ResourceModel(Base): - __tablename__ = "document" - - id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True) - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) - updated_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() - ) - - project_id: Mapped[str] = mapped_column(UUID(as_uuid=False), ForeignKey("project.id")) - project: Mapped["ProjectModel"] = relationship("ProjectModel", back_populates="resources") - - original_filename: Mapped[str] = mapped_column(String, default="") - type: Mapped[ResourceTypeEnum] = mapped_column(String, default=ResourceTypeEnum.PDF) - path: Mapped[str] = mapped_column(String) - - title: Mapped[str] = mapped_column(String) - description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - context: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - - is_processed: Mapped[bool] = mapped_column(Boolean, default=False) - processing_error: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - - -class ConversationModel(Base): - __tablename__ = "conversation" - - id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True) - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) - updated_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() - ) - - project_id: Mapped[str] = mapped_column(UUID(as_uuid=False), ForeignKey("project.id")) - project: Mapped["ProjectModel"] = relationship("ProjectModel", back_populates="conversations") - - participant_name: Mapped[str] = mapped_column(String, nullable=False, default="") - participant_email: Mapped[Optional[str]] = mapped_column(String, nullable=True) - participant_user_agent: Mapped[Optional[str]] = mapped_column(String, nullable=True) - - summary: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - source: Mapped[Optional[str]] = mapped_column(String, nullable=True) - duration: Mapped[Optional[float]] = mapped_column(Float, nullable=True) - - chunks: Mapped[List["ConversationChunkModel"]] = relationship( - "ConversationChunkModel", - back_populates="conversation", - cascade="all, delete-orphan", - ) - - tags: Mapped[List["ProjectTagModel"]] = relationship( - "ProjectTagModel", - secondary=conversation_project_tag_association_table, - back_populates="conversations", - ) - - quotes: Mapped[List["QuoteModel"]] = relationship( - "QuoteModel", back_populates="conversation", cascade="all, delete-orphan" - ) - - project_chats: Mapped[List["ProjectChatModel"]] = relationship( - "ProjectChatModel", - back_populates="used_conversations", - secondary=project_chat_conversation_association_table, - ) - - project_chat_messages: Mapped[List["ProjectChatMessageModel"]] = relationship( - "ProjectChatMessageModel", - back_populates="used_conversations", - secondary=project_chat_message_conversation_association_table, - ) - - -conversation_chunk_quote_association_table = Table( - "quote_conversation_chunk", - Base.metadata, - Column("id", Integer, autoincrement=True, primary_key=True, unique=True), - Column("conversation_chunk_id", ForeignKey("conversation_chunk.id")), - Column("quote_id", ForeignKey("quote.id")), -) - - -class ConversationChunkModel(Base): - __tablename__ = "conversation_chunk" - - id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True) - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) - updated_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() - ) - - conversation_id: Mapped[str] = mapped_column(UUID(as_uuid=False), ForeignKey("conversation.id")) - conversation: Mapped["ConversationModel"] = relationship( - "ConversationModel", back_populates="chunks" - ) - - path: Mapped[Optional[str]] = mapped_column(String, nullable=True) - - timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True)) - transcript: Mapped[str] = mapped_column(Text, nullable=True) - - quotes: Mapped[List["QuoteModel"]] = relationship( - "QuoteModel", - secondary=conversation_chunk_quote_association_table, - back_populates="conversation_chunks", - ) - - -quote_aspect_association_table = Table( - "quote_aspect", - Base.metadata, - Column("id", Integer, autoincrement=True, primary_key=True, unique=True), - Column("quote_id", ForeignKey("quote.id"), primary_key=True), - Column("aspect_id", ForeignKey("aspect.id"), primary_key=True), -) - -representative_quote_aspect_association_table = Table( - "quote_aspect_1", - Base.metadata, - Column("id", Integer, autoincrement=True, primary_key=True, unique=True), - Column("quote_id", ForeignKey("quote.id"), primary_key=True), - Column("aspect_id", ForeignKey("aspect.id"), primary_key=True), -) - - -class QuoteModel(Base): - __tablename__ = "quote" - - id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True) - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) - updated_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() - ) - - order: Mapped[int] = mapped_column(Integer, nullable=True) - timestamp: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=True) - - text: Mapped[str] = mapped_column(Text) - embedding: Mapped[List[float]] = mapped_column(Vector(EMBEDDING_DIM)) - - conversation_id: Mapped[str] = mapped_column(UUID(as_uuid=False), ForeignKey("conversation.id")) - conversation: Mapped["ConversationModel"] = relationship("ConversationModel") - - conversation_chunks: Mapped[List["ConversationChunkModel"]] = relationship( - "ConversationChunkModel", - secondary=conversation_chunk_quote_association_table, - back_populates="quotes", - ) - - insight_id: Mapped[Optional[str]] = mapped_column(UUID(as_uuid=False), ForeignKey("insight.id")) - insight: Mapped[Optional["InsightModel"]] = relationship( - "InsightModel", back_populates="quotes" - ) - - aspects: Mapped[List["AspectModel"]] = relationship( - "AspectModel", back_populates="quotes", secondary=quote_aspect_association_table - ) - representative_aspects: Mapped[List["AspectModel"]] = relationship( - "AspectModel", - back_populates="representative_quotes", - secondary=representative_quote_aspect_association_table, - ) - - project_analysis_run_id: Mapped[Optional[str]] = mapped_column( - UUID(as_uuid=False), ForeignKey("project_analysis_run.id") - ) - project_analysis_run: Mapped[Optional["ProjectAnalysisRunModel"]] = relationship( - ProjectAnalysisRunModel, back_populates="quotes" - ) - - -class ViewModel(Base): - __tablename__ = "view" - - id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True) - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) - updated_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() - ) - - name: Mapped[str] = mapped_column(String) - summary: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - - aspects: Mapped[List["AspectModel"]] = relationship("AspectModel", back_populates="view") - - project_analysis_run_id: Mapped[Optional[str]] = mapped_column( - UUID(as_uuid=False), ForeignKey("project_analysis_run.id") - ) - project_analysis_run: Mapped[Optional["ProjectAnalysisRunModel"]] = relationship( - ProjectAnalysisRunModel, back_populates="views" - ) - - processing_status: Mapped[ProcessingStatusEnum] = mapped_column(Text, default="PENDING") - processing_message: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - processing_error: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - processing_started_at: Mapped[Optional[datetime]] = mapped_column( - DateTime(timezone=True), nullable=True - ) - processing_completed_at: Mapped[Optional[datetime]] = mapped_column( - DateTime(timezone=True), nullable=True - ) - - -class AspectModel(Base): - __tablename__ = "aspect" - - id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True) - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) - updated_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() - ) - - name: Mapped[str] = mapped_column(String) - description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - image_url: Mapped[Optional[str]] = mapped_column(String, nullable=True) - short_summary: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - long_summary: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - - view_id: Mapped[Optional[str]] = mapped_column(UUID(as_uuid=False), ForeignKey("view.id")) - view: Mapped[Optional["ViewModel"]] = relationship("ViewModel", back_populates="aspects") - - quotes: Mapped[List["QuoteModel"]] = relationship( - "QuoteModel", back_populates="aspects", secondary=quote_aspect_association_table - ) - - representative_quotes: Mapped[List["QuoteModel"]] = relationship( - "QuoteModel", - back_populates="representative_aspects", - secondary=representative_quote_aspect_association_table, - ) - - centroid_embedding: Mapped[List[float]] = mapped_column(Vector(EMBEDDING_DIM), nullable=True) - - -## Depracated -class InsightModel(Base): - __tablename__ = "insight" - - id: Mapped[str] = mapped_column(UUID(as_uuid=False), primary_key=True) - created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) - updated_at: Mapped[datetime] = mapped_column( - DateTime(timezone=True), server_default=func.now(), onupdate=func.now() - ) - - title: Mapped[Optional[str]] = mapped_column(Text, nullable=False) - summary: Mapped[Optional[str]] = mapped_column(Text, nullable=True) - - quotes: Mapped[List["QuoteModel"]] = relationship("QuoteModel", back_populates="insight") - - project_analysis_run_id: Mapped[Optional[str]] = mapped_column( - UUID(as_uuid=False), ForeignKey("project_analysis_run.id") - ) - project_analysis_run: Mapped[Optional["ProjectAnalysisRunModel"]] = relationship( - ProjectAnalysisRunModel, back_populates="insights" - ) - - -### DO NOT USE -db = Session() -""" -use this instead: -``` -with Session() as db: - ... -``` -# this will automatically close the session after the block -""" - - -def get_db() -> Generator[_Session, None, None]: - logger.debug("Opening database connection") - db = Session() - try: - yield db - finally: - logger.debug("Closing database connection") - db.close() - - -DependencyInjectDatabase = Annotated[_Session, Depends(get_db, use_cache=False)] diff --git a/echo/server/dembrane/main.py b/echo/server/dembrane/main.py index 098467a6..ac4a8114 100644 --- a/echo/server/dembrane/main.py +++ b/echo/server/dembrane/main.py @@ -1,19 +1,17 @@ import time -from typing import Any, AsyncGenerator +from typing import Any, AsyncGenerator, Awaitable, Callable, cast from logging import getLogger from contextlib import asynccontextmanager import nest_asyncio -from fastapi import ( - FastAPI, - Request, - HTTPException, -) +from fastapi import FastAPI, Request, HTTPException from fastapi.staticfiles import StaticFiles from starlette.exceptions import HTTPException as StarletteHTTPException from starlette.middleware import Middleware from fastapi.openapi.utils import get_openapi from starlette.middleware.cors import CORSMiddleware +from starlette.responses import Response +from starlette.types import Scope from dembrane.settings import get_settings from dembrane.sentry import init_sentry @@ -80,7 +78,10 @@ async def lifespan(_app: FastAPI) -> AsyncGenerator[None, None]: @app.middleware("http") -async def add_process_time_header(request: Request, call_next): # type: ignore +async def add_process_time_header( + request: Request, + call_next: Callable[[Request], Awaitable[Response]], +) -> Response: start_time = time.time() response = await call_next(request) process_time = time.time() - start_time @@ -93,7 +94,7 @@ async def add_process_time_header(request: Request, call_next): # type: ignore class SPAStaticFiles(StaticFiles): - async def get_response(self, path: str, scope): # type: ignore + async def get_response(self, path: str, scope: Scope) -> Response: try: return await super().get_response(path, scope) except (HTTPException, StarletteHTTPException) as ex: @@ -116,7 +117,7 @@ def custom_openapi() -> Any: return app.openapi_schema -app.openapi = custom_openapi # type: ignore +app.openapi = cast(Callable[[], dict[str, Any]], custom_openapi) if __name__ == "__main__": diff --git a/echo/server/dembrane/prompts.py b/echo/server/dembrane/prompts.py index 146c7bf8..ba9d8991 100644 --- a/echo/server/dembrane/prompts.py +++ b/echo/server/dembrane/prompts.py @@ -15,10 +15,10 @@ import logging from typing import Any -from collections import defaultdict from pathlib import Path +from collections import defaultdict -from jinja2 import Environment, FileSystemLoader, DictLoader, select_autoescape +from jinja2 import BaseLoader, DictLoader, Environment, FileSystemLoader, select_autoescape from dembrane.settings import get_settings @@ -28,6 +28,8 @@ logger = logging.getLogger("prompts") prompt_templates_path = Path(PROMPT_TEMPLATES_DIR) +prompt_loader: BaseLoader + if prompt_templates_path.exists() and prompt_templates_path.is_dir(): prompt_loader = FileSystemLoader(prompt_templates_path) prompt_template_names = [ diff --git a/echo/server/dembrane/schemas.py b/echo/server/dembrane/schemas.py deleted file mode 100644 index 2582ca12..00000000 --- a/echo/server/dembrane/schemas.py +++ /dev/null @@ -1,201 +0,0 @@ -# this is not upto date. switched to directus for a better life -from enum import Enum -from typing import Any, List, Union, Optional -from datetime import datetime - -from pydantic import BaseModel - -from dembrane.database import ProcessingStatusEnum - - -class SessionSchema(BaseModel): - id: int - created_at: datetime - updated_at: datetime - - -class ProjectTagSchema(BaseModel): - id: str - created_at: datetime - updated_at: datetime - project_id: str - - text: str - - -class ProjectSchema(BaseModel): - id: str - created_at: datetime - updated_at: datetime - language: str - name: Optional[str] = None - context: Optional[str] = None - - tags: Optional[List[ProjectTagSchema]] = [] - - is_conversation_allowed: bool - default_conversation_title: Optional[str] = None - default_conversation_description: Optional[str] = None - default_conversation_finish_text: Optional[str] = None - - -class ResourceSchema(BaseModel): - id: str - created_at: datetime - updated_at: datetime - project_id: str - original_filename: str - type: str # ResourceTypeEnum - - title: str - description: Optional[str] = None - context: Optional[str] = None - - is_processed: bool - processing_error: Optional[str] = None - - -class ConversationChunkSchema(BaseModel): - id: str - created_at: datetime - updated_at: datetime - conversation_id: str - - transcript: Optional[str] = None - timestamp: datetime - - -class ConversationSchema(BaseModel): - id: str - created_at: datetime - updated_at: datetime - project_id: str - - title: Optional[str] = None - description: Optional[str] = None - context: Optional[str] = None - - participant_email: Optional[str] = None - participant_name: Optional[str] = None - duration: Optional[float] = None - - tags: Optional[List[ProjectTagSchema]] = None - chunks: Optional[List[ConversationChunkSchema]] = None - - -class ChatMessageSchema(BaseModel): - id: str - created_at: datetime - updated_at: datetime - chat_id: str - - text: str - role: str # ChatMessageRoleEnum - - -class ChatSchema(BaseModel): - id: str - created_at: datetime - updated_at: datetime - project_id: Optional[str] = None - - resources: Optional[list[ResourceSchema]] = [] - conversations: Optional[list[ConversationSchema]] = [] - messages: Optional[list[ChatMessageSchema]] = [] - - -class QuoteSchema(BaseModel): - id: str - created_at: datetime - updated_at: datetime - - project_analysis_run_id: str - conversation_id: str - - conversation_chunks: Optional[List[ConversationChunkSchema]] = [] - - text: str - - -class InsightSchema(BaseModel): - id: str - created_at: datetime - updated_at: datetime - - project_analysis_run_id: str - - title: str - summary: Optional[str] = None - - quotes: Optional[List[QuoteSchema]] = [] - - -class AspectSchema(BaseModel): - id: str - created_at: datetime - updated_at: datetime - - project_analysis_run_id: str - - name: str - description: Optional[str] = None - short_summary: Optional[str] = None - long_summary: Optional[str] = None - - image_url: Optional[str] = None - - view_id: Optional[str] = None - - quotes: Optional[List[QuoteSchema]] = [] - - -class ViewSchema(BaseModel): - id: str - created_at: datetime - updated_at: datetime - - project_analysis_run_id: str - - name: str - summary: Optional[str] = None - - aspects: Optional[List["AspectSchema"]] = [] - - -class ProjectAnalysisRunSchema(BaseModel): - id: str - created_at: datetime - updated_at: datetime - - project_id: str - - quotes: Optional[List[QuoteSchema]] = [] - - processing_status: ProcessingStatusEnum - processing_error: Optional[str] = None - processing_started_at: Optional[datetime] = None - processing_completed_at: Optional[datetime] = None - - -class TaskStateEnum(str, Enum): - PENDING = "PENDING" - STARTED = "STARTED" - PROGRESS = "PROGRESS" - SUCCESS = "SUCCESS" - FAILURE = "FAILURE" - RETRY = "RETRY" - REVOKED = "REVOKED" - IGNORED = "IGNORED" - - -class TaskProgressMetaSchema(BaseModel): - current: int - total: int - percent: int - message: Optional[str] = None - - -class TaskSchema(BaseModel): - id: str - state: TaskStateEnum - meta: Optional[Union[TaskProgressMetaSchema, Any]] = None diff --git a/echo/server/dembrane/service/__init__.py b/echo/server/dembrane/service/__init__.py index 626bca6d..c4be0004 100644 --- a/echo/server/dembrane/service/__init__.py +++ b/echo/server/dembrane/service/__init__.py @@ -24,6 +24,12 @@ ConversationChunkNotFoundException, ConversationNotOpenForParticipationException, ) +from .chat import ( + ChatService, + ChatServiceException, + ChatNotFoundException, + ChatMessageNotFoundException, +) file_service = get_file_service() project_service = ProjectService() @@ -32,6 +38,8 @@ project_service=project_service, ) +chat_service = ChatService() + exceptions = { "file": { "FileServiceException": FileServiceException, @@ -42,6 +50,11 @@ "ConversationNotOpenForParticipationException": ConversationNotOpenForParticipationException, "ConversationServiceException": ConversationServiceException, }, + "chat": { + "ChatServiceException": ChatServiceException, + "ChatNotFoundException": ChatNotFoundException, + "ChatMessageNotFoundException": ChatMessageNotFoundException, + }, "project": { "ProjectNotFoundException": ProjectNotFoundException, "ProjectServiceException": ProjectServiceException, diff --git a/echo/server/dembrane/service/chat.py b/echo/server/dembrane/service/chat.py new file mode 100644 index 00000000..39df9956 --- /dev/null +++ b/echo/server/dembrane/service/chat.py @@ -0,0 +1,280 @@ +from typing import Any, Iterable, List, Optional +from logging import getLogger + +from dembrane.directus import DirectusBadRequest, directus_client_context + +logger = getLogger("dembrane.service.chat") + + +class ChatServiceException(Exception): + pass + + +class ChatNotFoundException(ChatServiceException): + pass + + +class ChatMessageNotFoundException(ChatServiceException): + pass + + +class ChatService: + def get_by_id_or_raise( + self, + chat_id: str, + with_used_conversations: bool = False, + ) -> dict: + fields = [ + "id", + "name", + "auto_select", + "project_id.id", + "project_id.directus_user_id", + ] + + deep: dict[str, Any] = {} + + if with_used_conversations: + fields.extend( + [ + "used_conversations.id", + "used_conversations.conversation_id.id", + "used_conversations.conversation_id.participant_name", + ] + ) + deep["used_conversations"] = {"_sort": "id"} + + try: + with directus_client_context() as client: + chat_list: Optional[List[dict]] = client.get_items( + "project_chat", + { + "query": { + "filter": {"id": {"_eq": chat_id}}, + "fields": fields, + "deep": deep, + "limit": 1, + } + }, + ) + except DirectusBadRequest as e: + logger.error("Failed to fetch chat %s from Directus: %s", chat_id, e) + raise ChatServiceException() from e + + if not chat_list: + raise ChatNotFoundException(f"Chat {chat_id} not found") + + return chat_list[0] + + def list_messages( + self, + chat_id: str, + *, + include_relationships: bool = True, + order: str = "asc", + ) -> List[dict]: + fields = [ + "id", + "project_chat_id", + "message_from", + "text", + "tokens_count", + "template_key", + "date_created", + ] + + deep: dict[str, Any] = {} + + if include_relationships: + fields.extend( + [ + "used_conversations.id", + "used_conversations.conversation_id.id", + "used_conversations.conversation_id.participant_name", + "used_conversations.conversation_id.summary", + "used_conversations.conversation_id.duration", + "added_conversations.id", + "added_conversations.conversation_id.id", + "added_conversations.conversation_id.participant_name", + ] + ) + deep = { + "used_conversations": {"_sort": "id"}, + "added_conversations": {"_sort": "id"}, + } + + sort_value = "date_created" if order.lower() != "desc" else "-date_created" + + try: + with directus_client_context() as client: + messages: Optional[List[dict]] = client.get_items( + "project_chat_message", + { + "query": { + "filter": {"project_chat_id": {"_eq": chat_id}}, + "fields": fields, + "deep": deep, + "limit": 1000, + "sort": sort_value, + } + }, + ) + except DirectusBadRequest as e: + logger.error("Failed to list messages for chat %s: %s", chat_id, e) + raise ChatServiceException() from e + + return messages or [] + + def set_auto_select(self, chat_id: str, value: bool) -> dict: + try: + with directus_client_context() as client: + return client.update_item( + "project_chat", + chat_id, + {"auto_select": bool(value)}, + )["data"] + except DirectusBadRequest as e: + logger.error("Failed to update auto_select for chat %s: %s", chat_id, e) + raise ChatServiceException() from e + + def set_chat_name(self, chat_id: str, name: Optional[str]) -> dict: + try: + with directus_client_context() as client: + return client.update_item( + "project_chat", + chat_id, + {"name": name}, + )["data"] + except DirectusBadRequest as e: + logger.error("Failed to update chat name for %s: %s", chat_id, e) + raise ChatServiceException() from e + + def attach_conversations(self, chat_id: str, conversation_ids: Iterable[str]) -> None: + payload_list = [ + {"conversation_id": conversation_id} + for conversation_id in conversation_ids + ] + + if not payload_list: + return + + try: + with directus_client_context() as client: + client.update_item( + "project_chat", + chat_id, + {"used_conversations": {"create": payload_list}}, + ) + except DirectusBadRequest as e: + logger.error( + "Failed to attach conversations %s to chat %s: %s", + list(conversation_ids), + chat_id, + e, + ) + raise ChatServiceException() from e + + def detach_conversation(self, chat_id: str, conversation_id: str) -> None: + try: + with directus_client_context() as client: + links: Optional[List[dict]] = client.get_items( + "project_chat_conversation", + { + "query": { + "filter": { + "project_chat_id": {"_eq": chat_id}, + "conversation_id": {"_eq": conversation_id}, + }, + "fields": ["id"], + "limit": 20, + } + }, + ) + + for link in links or []: + link_id = link.get("id") + if link_id: + client.delete_item("project_chat_conversation", link_id) + except DirectusBadRequest as e: + logger.error( + "Failed to detach conversation %s from chat %s: %s", + conversation_id, + chat_id, + e, + ) + raise ChatServiceException() from e + + def create_message( + self, + chat_id: str, + message_from: str, + text: str, + *, + message_id: Optional[str] = None, + template_key: Optional[str] = None, + used_conversation_ids: Optional[Iterable[str]] = None, + added_conversation_ids: Optional[Iterable[str]] = None, + extra_fields: Optional[dict[str, Any]] = None, + ) -> dict: + payload: dict[str, Any] = { + "project_chat_id": chat_id, + "message_from": message_from, + "text": text, + } + + if message_id is not None: + payload["id"] = message_id + + if template_key is not None: + payload["template_key"] = template_key + + if extra_fields: + payload.update(extra_fields) + + used_ids = list(used_conversation_ids or []) + if used_ids: + payload.setdefault("used_conversations", {})["create"] = [ + {"conversation_id": conversation_id} + for conversation_id in used_ids + ] + + added_ids = list(added_conversation_ids or []) + if added_ids: + payload.setdefault("added_conversations", {})["create"] = [ + {"conversation_id": conversation_id} + for conversation_id in added_ids + ] + + try: + with directus_client_context() as client: + message = client.create_item( + "project_chat_message", + item_data=payload, + )["data"] + except DirectusBadRequest as e: + logger.error("Failed to create message in chat %s: %s", chat_id, e) + raise ChatServiceException() from e + + return message + + def update_message(self, message_id: str, update_data: dict[str, Any]) -> dict: + try: + with directus_client_context() as client: + message = client.update_item( + "project_chat_message", + message_id, + update_data, + )["data"] + except DirectusBadRequest as e: + logger.error("Failed to update message %s: %s", message_id, e) + raise ChatServiceException() from e + + return message + + def delete_message(self, message_id: str) -> None: + try: + with directus_client_context() as client: + client.delete_item("project_chat_message", message_id) + except DirectusBadRequest as e: + logger.error("Failed to delete message %s: %s", message_id, e) + raise ChatServiceException() from e diff --git a/echo/server/dembrane/service/conversation.py b/echo/server/dembrane/service/conversation.py index 07fd89bc..0c710234 100644 --- a/echo/server/dembrane/service/conversation.py +++ b/echo/server/dembrane/service/conversation.py @@ -1,5 +1,5 @@ # conversation.py -from typing import TYPE_CHECKING, Any, List, Optional +from typing import TYPE_CHECKING, Any, List, Iterable, Optional from logging import getLogger from datetime import datetime from urllib.parse import urlparse @@ -89,6 +89,66 @@ def get_by_id_or_raise( except (KeyError, IndexError) as e: raise ConversationNotFoundException() from e + def list_by_project( + self, + project_id: str, + with_chunks: bool = False, + with_tags: bool = False, + ) -> List[dict]: + return self._list_conversations( + filter_query={"project_id": {"_eq": project_id}}, + with_chunks=with_chunks, + with_tags=with_tags, + ) + + def list_by_ids( + self, + conversation_id_list: Iterable[str], + with_chunks: bool = False, + with_tags: bool = False, + ) -> List[dict]: + ids = [conversation_id for conversation_id in conversation_id_list] + if not ids: + return [] + + return self._list_conversations( + filter_query={"id": {"_in": ids}}, + with_chunks=with_chunks, + with_tags=with_tags, + ) + + def list_chunks(self, conversation_id: str) -> List[dict]: + try: + with directus_client_context() as client: + chunks: Optional[List[dict]] = client.get_items( + "conversation_chunk", + { + "query": { + "filter": {"conversation_id": {"_eq": conversation_id}}, + "fields": [ + "id", + "conversation_id", + "timestamp", + "transcript", + "path", + "created_at", + "updated_at", + ], + "sort": "timestamp", + "limit": 2000, + } + }, + ) + except DirectusBadRequest as e: + logger.error( + "Failed to list chunks for conversation %s via Directus: %s", + conversation_id, + e, + ) + raise ConversationServiceException() from e + + return chunks or [] + def create( self, project_id: str, @@ -261,12 +321,14 @@ def create_chunk( file_url = self.file_service.save(file=file_obj, key=file_name, public=False) logger.info(f"File uploaded to S3 via API: {sanitize_url_for_logging(file_url)}") elif file_url: - logger.info(f"Using pre-uploaded file from presigned URL: {sanitize_url_for_logging(file_url)}") + logger.info( + f"Using pre-uploaded file from presigned URL: {sanitize_url_for_logging(file_url)}" + ) # Validate that we have either a file or a transcript has_file = file_url and len(file_url.strip()) > 0 has_transcript = transcript and len(transcript.strip()) > 0 - + if not has_file and not has_transcript: logger.error( f"Cannot create chunk without content. " @@ -285,7 +347,7 @@ def create_chunk( "id": chunk_id, "conversation_id": conversation["id"], "timestamp": timestamp.isoformat(), - "path": file_url, + "path": file_url, "source": source, "transcript": transcript, }, @@ -436,3 +498,66 @@ def get_chunk_counts( "pending": pending, "ok": ok, } + + def _list_conversations( + self, + filter_query: dict[str, Any], + with_chunks: bool = False, + with_tags: bool = False, + ) -> List[dict]: + fields: List[str] = [ + "id", + "project_id", + "participant_name", + "participant_email", + "participant_user_agent", + "created_at", + "updated_at", + "duration", + "summary", + "source", + "is_finished", + "is_all_chunks_transcribed", + ] + + deep: dict[str, Any] = {} + + if with_tags: + fields.extend( + [ + "tags.id", + "tags.project_tag_id.id", + "tags.project_tag_id.text", + ] + ) + deep.setdefault("tags", {}) + + if with_chunks: + fields.extend( + [ + "chunks.id", + "chunks.timestamp", + "chunks.transcript", + "chunks.path", + ] + ) + deep["chunks"] = {"_sort": "timestamp"} + + try: + with directus_client_context() as client: + conversations: Optional[List[dict]] = client.get_items( + "conversation", + { + "query": { + "filter": filter_query, + "fields": fields, + "deep": deep, + "limit": 1000, + } + }, + ) + except DirectusBadRequest as e: + logger.error("Failed to list conversations via Directus: %s", e) + raise ConversationServiceException() from e + + return conversations or [] diff --git a/echo/server/dembrane/service/project.py b/echo/server/dembrane/service/project.py index 4e21e38f..700260c3 100644 --- a/echo/server/dembrane/service/project.py +++ b/echo/server/dembrane/service/project.py @@ -1,5 +1,5 @@ # project.py -from typing import Any, List +from typing import Any, List, Optional from logging import getLogger from dembrane.directus import DirectusBadRequest, directus_client_context @@ -75,6 +75,51 @@ def create( return project + def get_latest_analysis_run(self, project_id: str) -> Optional[dict]: + try: + with directus_client_context() as client: + runs: Optional[List[dict]] = client.get_items( + "project_analysis_run", + { + "query": { + "filter": {"project_id": project_id}, + "sort": "-created_at", + "limit": 1, + } + }, + ) + except DirectusBadRequest as e: + logger.error("Failed to fetch analysis run for %s: %s", project_id, e) + return None + + if not runs: + return None + + return runs[0] + + def create_report( + self, + project_id: str, + language: str, + content: str, + status: str = "archived", + error_code: Optional[str] = None, + ) -> dict: + payload = { + "project_id": project_id, + "language": language, + "content": content, + "status": status, + } + + if error_code is not None: + payload["error_code"] = error_code + + with directus_client_context() as client: + report = client.create_item("project_report", item_data=payload)["data"] + + return report + def delete( self, project_id: str, diff --git a/echo/server/dembrane/tasks.py b/echo/server/dembrane/tasks.py index 1e302fe9..9fb35a28 100644 --- a/echo/server/dembrane/tasks.py +++ b/echo/server/dembrane/tasks.py @@ -207,7 +207,7 @@ def task_merge_conversation_chunks(conversation_id: str) -> None: return_url=True, ) ) - except NoContentFoundException: # type: ignore + except NoContentFoundException: logger.info( f"No valid content found for conversation {conversation_id}; skipping merge task." ) diff --git a/echo/server/pyproject.toml b/echo/server/pyproject.toml index 4dc93eb1..84613f27 100644 --- a/echo/server/pyproject.toml +++ b/echo/server/pyproject.toml @@ -23,11 +23,6 @@ dependencies = [ "jinja2==3.1.*", "ffmpeg-python>=0.2.0", "flower>=2.0.1", - # Database - "SQLAlchemy==2.0.*", - "alembic==1.13.*", - "psycopg[binary,pool]==3.1.*", - "pgvector==0.2.*", "directus-py-sdk==1.1.1", # Config "python-dotenv==1.0.*", @@ -63,7 +58,6 @@ dependencies = [ "setuptools==75.8.0", # LLM Tools "tiktoken==0.9.0", - "asyncpg==0.30.0", "boto3==1.37.*", # Uncategorized "mypy>=1.16.0", diff --git a/echo/server/tests/test_quote_utils.py b/echo/server/tests/test_quote_utils.py deleted file mode 100644 index 09d13fa4..00000000 --- a/echo/server/tests/test_quote_utils.py +++ /dev/null @@ -1,277 +0,0 @@ -# ruff: noqa: F821 -import random -import logging -import datetime -from typing import List - -import numpy as np -from sqlalchemy.orm import Session - -from dembrane.utils import generate_uuid -from dembrane.database import ( - QuoteModel, - ProcessingStatusEnum, - ProjectAnalysisRunModel, - get_db, -) -from dembrane.embedding import EMBEDDING_DIM -from dembrane.quote_utils import get_random_sample_quotes - -from .common import ( - create_project, - delete_project, - create_conversation, - delete_conversation, -) - -logger = logging.getLogger("test_quote_utils") - - -def test_create_test_quotes( - db: Session, project_analysis_run_id: str, conversation_id: str, count: int = 10 -) -> List[QuoteModel]: - """ - Helper function to create test quotes for testing. - """ - quotes = [] - sample_texts = [ - "This is a test quote about feature A", - "I really like the user interface", - "The performance could be better", - "Navigation is very intuitive", - "I found a bug in the search function", - "The app crashed when I tried to upload a file", - "Documentation needs improvement", - "Response time is excellent", - "Customer support was helpful", - "I would recommend this product to others", - "The mobile experience is not as good as desktop", - "It's hard to find the settings menu", - "The update fixed most of my issues", - "I'm confused by the workflow", - "Security features are robust", - ] - - # Get current time as UTC datetime object - current_time = datetime.datetime.now(datetime.timezone.utc) - - for i in range(count): - # Use sample texts in rotation, or generate random text for variety - if i < len(sample_texts): - text = sample_texts[i] - else: - text = f"Test quote {i} with some random content {random.randint(1000, 9999)}" - - # Use random embedding vector instead of embedding the text - # This avoids dependency on the embedding service during tests - embedding = np.random.randn(EMBEDDING_DIM).tolist() - - # Create a quote with embedded vector - quote = QuoteModel( - id=generate_uuid(), - created_at=current_time, - project_analysis_run_id=project_analysis_run_id, - conversation_id=conversation_id, - text=text, - embedding=embedding, - timestamp=current_time, - order=i, - ) - - quotes.append(quote) - - # Bulk insert quotes - db.add_all(quotes) - db.commit() - - return quotes - - -def test_get_random_sample_quotes(): - """ - Test the get_random_sample_quotes function to ensure it returns quotes - within the context limit and with proper distribution. - """ - # Create a test project - project = create_project( - "test_quote_utils_project", - "en", - ) - - # Create a test conversation - conversation = create_conversation( - project["id"], - "test_conversation", - ) - - db = next(get_db()) - try: - # Create a project analysis run - project_analysis_run = ProjectAnalysisRunModel( - id=generate_uuid(), - project_id=project["id"], - processing_status=ProcessingStatusEnum.PROCESSING, - ) - db.add(project_analysis_run) - db.commit() - - # Create test quotes with embeddings - quotes = create_test_quotes(db, project_analysis_run.id, conversation["id"], count=20) - - # Test with small context limit to ensure it's respected - small_context_limit = 100 - small_sample = get_random_sample_quotes( - db, project_analysis_run.id, context_limit=small_context_limit - ) - - total_tokens_small = sum(len(quote.text.split()) for quote in small_sample) - logger.info( - f"Small context limit: {small_context_limit}, tokens used: {total_tokens_small}" - ) - assert len(small_sample) > 0, "Should return at least some quotes" - - # Test with larger context limit - large_context_limit = 10000 - large_sample = get_random_sample_quotes( - db, project_analysis_run.id, context_limit=large_context_limit - ) - - total_tokens_large = sum(len(quote.text.split()) for quote in large_sample) - logger.info( - f"Large context limit: {large_context_limit}, tokens used: {total_tokens_large}" - ) - assert len(large_sample) >= len(small_sample), "Larger context should allow more quotes" - - # Clean up - for quote in quotes: - db.delete(quote) - db.delete(project_analysis_run) - db.commit() - finally: - db.close() - - delete_conversation(conversation["id"]) - delete_project(project["id"]) - - -def test_random_vectors_selection(): - """ - Test that the random vectors selection part of get_random_sample_quotes - is working correctly. - """ - # Create a test project - project = create_project( - "test_quote_utils_vectors", - "en", - ) - - # Create multiple test conversations to test per-conversation selection - conversation1 = create_conversation( - project["id"], - "test_conversation1", - ) - - conversation2 = create_conversation( - project["id"], - "test_conversation2", - ) - - db = next(get_db()) - try: - # Create a project analysis run - project_analysis_run = ProjectAnalysisRunModel( - id=generate_uuid(), - project_id=project["id"], - processing_status=ProcessingStatusEnum.PROCESSING, - ) - db.add(project_analysis_run) - db.commit() - - # Create test quotes for each conversation - quotes1 = create_test_quotes(db, project_analysis_run.id, conversation1["id"], count=15) - - quotes2 = create_test_quotes(db, project_analysis_run.id, conversation2["id"], count=15) - - # Test the function multiple times to check distribution - all_quotes = quotes1 + quotes2 - quote_selection_counts = {quote.id: 0 for quote in all_quotes} - - # Run multiple samples and track which quotes are selected - num_samples = 10 - context_limit = 5000 # Large enough to get a good sample - - for _ in range(num_samples): - sample = get_random_sample_quotes( - db, project_analysis_run.id, context_limit=context_limit - ) - - # Count how many times each quote is selected - for quote in sample: - if quote.id in quote_selection_counts: - quote_selection_counts[quote.id] += 1 - - # Verify at least some quotes from each conversation were selected - quotes1_ids = {q.id for q in quotes1} - quotes2_ids = {q.id for q in quotes2} - - selected_from_conv1 = any(quote_selection_counts[qid] > 0 for qid in quotes1_ids) - selected_from_conv2 = any(quote_selection_counts[qid] > 0 for qid in quotes2_ids) - - assert selected_from_conv1, "Should select at least some quotes from conversation 1" - assert selected_from_conv2, "Should select at least some quotes from conversation 2" - - # Check that l2_distance() in vector comparison is working - # This tests the vector similarity search part of the function - sample = get_random_sample_quotes(db, project_analysis_run.id, context_limit=context_limit) - - # There should be quotes in the sample, which verifies the vector similarity search works - assert len(sample) > 0, "Vector similarity search should return quotes" - - # Clean up - for quote in all_quotes: - db.delete(quote) - db.delete(project_analysis_run) - db.commit() - finally: - db.close() - - delete_conversation(conversation1["id"]) - delete_conversation(conversation2["id"]) - delete_project(project["id"]) - - -def test_empty_project(): - """ - Test behavior when project has no quotes. - """ - # Create a test project - project = create_project( - "test_empty_project", - "en", - ) - - db = next(get_db()) - try: - # Create a project analysis run - project_analysis_run = ProjectAnalysisRunModel( - id=generate_uuid(), - project_id=project["id"], - processing_status=ProcessingStatusEnum.PROCESSING, - ) - db.add(project_analysis_run) - db.commit() - - # Test with no quotes - sample = get_random_sample_quotes(db, project_analysis_run.id, context_limit=1000) - - # Should return empty list with no errors - assert isinstance(sample, list), "Should return a list" - assert len(sample) == 0, "Should return an empty list for a project with no quotes" - - # Clean up - db.delete(project_analysis_run) - db.commit() - finally: - db.close() - - delete_project(project["id"]) From 6245eb3f8a58b918949721f395bb8bf9386ed060 Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti Date: Wed, 12 Nov 2025 17:05:00 +0000 Subject: [PATCH 20/23] fix --- echo/frontend/src/routes/auth/Login.tsx | 17 ++- echo/server/.env.sample | 9 ++ echo/server/dembrane/api/chat.py | 6 +- echo/server/dembrane/api/conversation.py | 2 +- echo/server/dembrane/api/verify.py | 22 ++-- echo/server/dembrane/chat_utils.py | 2 +- echo/server/dembrane/llms.py | 28 +++-- echo/server/dembrane/prompts.py | 41 +++++-- echo/server/dembrane/reply_utils.py | 6 +- echo/server/dembrane/report_utils.py | 7 +- echo/server/dembrane/scheduler.py | 9 +- echo/server/dembrane/service/__init__.py | 3 + echo/server/dembrane/service/conversation.py | 57 +++++++--- echo/server/dembrane/service/events.py | 28 +++++ echo/server/dembrane/settings.py | 89 +++++++++++---- echo/server/dembrane/transcribe.py | 30 +++-- echo/server/run-worker-cpu.sh | 2 +- echo/server/run-worker.sh | 2 +- echo/server/uv.lock | 114 ------------------- 19 files changed, 253 insertions(+), 221 deletions(-) create mode 100644 echo/server/dembrane/service/events.py diff --git a/echo/frontend/src/routes/auth/Login.tsx b/echo/frontend/src/routes/auth/Login.tsx index 983a847a..62088c2d 100644 --- a/echo/frontend/src/routes/auth/Login.tsx +++ b/echo/frontend/src/routes/auth/Login.tsx @@ -84,9 +84,10 @@ export const LoginRoute = () => { }) => { if (loginMutation.isPending) return; + const trimmedOtp = data.otp?.trim(); + try { setError(""); - const trimmedOtp = data.otp?.trim(); if (otpRequired && (!trimmedOtp || trimmedOtp.length < 6)) { setError(t`Enter the 6-digit code from your authenticator app.`); @@ -133,11 +134,15 @@ export const LoginRoute = () => { if (code === "INVALID_OTP") { setOtpRequired(true); - setError( - t`That code didn't work. Try again with a fresh code from your authenticator app.`, - ); - setValue("otp", ""); - setOtpValue(""); + if (trimmedOtp && trimmedOtp.length > 0) { + setError( + t`That code didn't work. Try again with a fresh code from your authenticator app.`, + ); + setValue("otp", ""); + setOtpValue(""); + } else { + setError(""); + } return; } diff --git a/echo/server/.env.sample b/echo/server/.env.sample index 085d16fe..d51a787e 100644 --- a/echo/server/.env.sample +++ b/echo/server/.env.sample @@ -55,16 +55,25 @@ GCP_SA_JSON= LLM__MULTI_MODAL_PRO__MODEL=vertex_ai/gemini-2.5-pro LLM__MULTI_MODAL_PRO__API_BASE=https://europe-west1-aiplatform.googleapis.com LLM__MULTI_MODAL_PRO__API_VERSION= +LLM__MULTI_MODAL_PRO__GCP_SA_JSON=${GCP_SA_JSON} +LLM__MULTI_MODAL_PRO__VERTEX_PROJECT= +LLM__MULTI_MODAL_PRO__VERTEX_LOCATION=europe-west1 # Multi-modal Fast – Gemini Flash (Vertex) LLM__MULTI_MODAL_FAST__MODEL=vertex_ai/gemini-2.5-flash LLM__MULTI_MODAL_FAST__API_BASE=https://europe-west1-aiplatform.googleapis.com LLM__MULTI_MODAL_FAST__API_VERSION= +LLM__MULTI_MODAL_FAST__GCP_SA_JSON=${GCP_SA_JSON} +LLM__MULTI_MODAL_FAST__VERTEX_PROJECT= +LLM__MULTI_MODAL_FAST__VERTEX_LOCATION=europe-west1 # Text Fast – Claude Sonnet on Vertex LLM__TEXT_FAST__MODEL=vertex_ai/claude-3-5-sonnet-20241022 LLM__TEXT_FAST__API_BASE=https://europe-west1-aiplatform.googleapis.com LLM__TEXT_FAST__API_VERSION= +LLM__TEXT_FAST__GCP_SA_JSON=${GCP_SA_JSON} +LLM__TEXT_FAST__VERTEX_PROJECT= +LLM__TEXT_FAST__VERTEX_LOCATION=europe-west1 ############################################################ # Embedding configuration diff --git a/echo/server/dembrane/api/chat.py b/echo/server/dembrane/api/chat.py index 0d7ae882..eacff24e 100644 --- a/echo/server/dembrane/api/chat.py +++ b/echo/server/dembrane/api/chat.py @@ -173,7 +173,7 @@ async def get_chat_context(chat_id: str, auth: DependencyDirectusSession) -> Cha if tokens_count is None: tokens_count = token_counter( messages=[{"role": message_from, "content": message_text}], - **get_completion_kwargs(MODELS.TEXT_FAST), + **get_completion_kwargs(MODELS.TEXT_FAST)["model"], ) try: await run_in_thread_pool( @@ -568,7 +568,7 @@ async def build_formatted_messages(conversation_ids: Iterable[str]) -> List[Dict candidate_messages = await build_formatted_messages(temp_ids) prompt_len = token_counter( messages=candidate_messages, - **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), + **get_completion_kwargs(MODELS.MULTI_MODAL_PRO)["model"], ) if prompt_len > max_context_threshold: @@ -605,7 +605,7 @@ async def build_formatted_messages(conversation_ids: Iterable[str]) -> List[Dict prompt_len = token_counter( messages=formatted_messages, - **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), + **get_completion_kwargs(MODELS.MULTI_MODAL_PRO)["model"], ) if prompt_len > MAX_CHAT_CONTEXT_LENGTH: raise HTTPException( diff --git a/echo/server/dembrane/api/conversation.py b/echo/server/dembrane/api/conversation.py index ba7d4f31..41076790 100644 --- a/echo/server/dembrane/api/conversation.py +++ b/echo/server/dembrane/api/conversation.py @@ -401,7 +401,7 @@ async def get_conversation_token_count( token_count = token_counter( messages=[{"role": "user", "content": transcript}], - **get_completion_kwargs(MODELS.MULTI_MODAL_PRO), + **get_completion_kwargs(MODELS.MULTI_MODAL_PRO)["model"], ) # Store the result in the cache diff --git a/echo/server/dembrane/api/verify.py b/echo/server/dembrane/api/verify.py index a65247fd..3a1b900e 100644 --- a/echo/server/dembrane/api/verify.py +++ b/echo/server/dembrane/api/verify.py @@ -581,7 +581,6 @@ async def generate_verification_artifacts( "content": message_content, }, ], - vertex_credentials=GCP_SA_JSON, **completion_kwargs, ) except Exception as exc: @@ -696,12 +695,12 @@ async def update_verification_artifact( revision_completion_kwargs = get_completion_kwargs(MODELS.MULTI_MODAL_PRO) try: - response = litellm.completion( - messages=[ - { - "role": "system", - "content": [ - { + response = litellm.completion( + messages=[ + { + "role": "system", + "content": [ + { "type": "text", "text": system_prompt, } @@ -710,11 +709,10 @@ async def update_verification_artifact( { "role": "user", "content": message_content, - }, - ], - vertex_credentials=GCP_SA_JSON, - **revision_completion_kwargs, - ) + }, + ], + **revision_completion_kwargs, + ) except Exception as exc: # pragma: no cover - external failure logger.error("Gemini revision failed: %s", exc, exc_info=True) raise HTTPException( diff --git a/echo/server/dembrane/chat_utils.py b/echo/server/dembrane/chat_utils.py index b9a32c25..24009029 100644 --- a/echo/server/dembrane/chat_utils.py +++ b/echo/server/dembrane/chat_utils.py @@ -450,7 +450,7 @@ async def _process_single_batch( try: prompt_tokens = token_counter( messages=[{"role": "user", "content": prompt}], - **get_completion_kwargs(MODELS.TEXT_FAST), + **get_completion_kwargs(MODELS.TEXT_FAST)["model"], ) MAX_BATCH_CONTEXT = 100000 # Leave headroom for response diff --git a/echo/server/dembrane/llms.py b/echo/server/dembrane/llms.py index 56a1152d..3dd60dbf 100644 --- a/echo/server/dembrane/llms.py +++ b/echo/server/dembrane/llms.py @@ -1,5 +1,6 @@ from __future__ import annotations +import json import logging from enum import Enum from typing import Any, Dict @@ -31,17 +32,24 @@ def get_completion_kwargs(model: MODELS, **overrides: Any) -> Dict[str, Any]: provider = getattr(settings.llms, attr, None) if provider is None: raise ValueError(f"No configuration found for model group {model.value}.") - if not provider.model: - raise ValueError(f"Model name is not configured for {model.value}") - kwargs: Dict[str, Any] = {"model": provider.model} - - if provider.api_key: - kwargs["api_key"] = provider.api_key - if provider.api_base: - kwargs["api_base"] = provider.api_base - if provider.api_version: - kwargs["api_version"] = provider.api_version + resolved = provider.resolve() + + kwargs: Dict[str, Any] = {"model": resolved.model} + + if resolved.api_key: + kwargs["api_key"] = resolved.api_key + if resolved.api_base: + kwargs["api_base"] = resolved.api_base + if resolved.api_version: + kwargs["api_version"] = resolved.api_version + vertex_credentials = resolved.vertex_credentials or settings.transcription.gcp_sa_json + if vertex_credentials: + kwargs["vertex_credentials"] = json.dumps(vertex_credentials) + if resolved.vertex_project: + kwargs["vertex_project"] = resolved.vertex_project + if resolved.vertex_location: + kwargs["vertex_location"] = resolved.vertex_location # Allow callers to override any field (e.g., temperature, max_tokens) kwargs.update(overrides) diff --git a/echo/server/dembrane/prompts.py b/echo/server/dembrane/prompts.py index ba9d8991..ad9d50d2 100644 --- a/echo/server/dembrane/prompts.py +++ b/echo/server/dembrane/prompts.py @@ -56,19 +56,38 @@ name, lang, _ = template_name.rsplit(".", 2) template_support[name].add(lang) -# Log the template support matrix -header = "Name | de | en | es | fr | nl" -separator = "-" * len(header) -rows = [] -for name, languages in template_support.items(): - # Pad the name to 19 characters to align with header - padded_name = f"{name[:30]}{' ' * (30 - len(name[:30]))}" - row = f"{padded_name}| " + " | ".join( - " y " if lang in languages else " n " for lang in ["de", "en", "es", "fr", "nl"] +logger.info("Loaded %d prompt templates", len(PROMPT_TEMPLATE_LIST)) + + +def log_template_support(level: int = logging.DEBUG) -> None: + """Emit the language availability matrix for prompt templates.""" + if not template_support: + logger.log(level, "No prompt templates available to display language support") + return + + header = "Name | de | en | es | fr | nl" + separator = "-" * len(header) + rows = [] + templates_payload = [] + for name, languages in sorted(template_support.items()): + padded_name = f"{name[:30]}{' ' * (30 - len(name[:30]))}" + row = f"{padded_name}| " + " | ".join( + " y " if lang in languages else " n " for lang in ["de", "en", "es", "fr", "nl"] + ) + rows.append(row) + templates_payload.append({"name": name, "languages": sorted(languages)}) + + message = f"{header}\n{separator}\n" + "\n".join(rows) + logger.log( + level, + "Prompt template language support matrix:\n%s", + message, + extra={"prompt_template_languages": templates_payload}, ) - rows.append(row) -logger.info(f"Loaded {len(rows)} prompt templates:\n{header}\n{separator}\n" + "\n".join(rows)) + +if settings.feature_flags.debug_mode: + log_template_support(level=logging.INFO) def render_prompt(prompt_name: str, language: str, kwargs: dict[str, Any]) -> str: diff --git a/echo/server/dembrane/reply_utils.py b/echo/server/dembrane/reply_utils.py index 994d1e26..7faac8d6 100644 --- a/echo/server/dembrane/reply_utils.py +++ b/echo/server/dembrane/reply_utils.py @@ -237,7 +237,7 @@ async def generate_reply_for_conversation( formatted_conv = format_conversation(c) tokens = token_counter( messages=[{"role": "user", "content": formatted_conv}], - **get_completion_kwargs(MODELS.TEXT_FAST), + **get_completion_kwargs(MODELS.TEXT_FAST)["model"], ) candidate_conversations.append((formatted_conv, tokens)) @@ -260,7 +260,7 @@ async def generate_reply_for_conversation( formatted_conv = format_conversation(c) tokens = token_counter( messages=[{"role": "user", "content": formatted_conv}], - **get_completion_kwargs(MODELS.TEXT_FAST), + **get_completion_kwargs(MODELS.TEXT_FAST)["model"], ) # If conversation is too large, truncate it @@ -272,7 +272,7 @@ async def generate_reply_for_conversation( formatted_conv = format_conversation(c) tokens = token_counter( messages=[{"role": "user", "content": formatted_conv}], - **get_completion_kwargs(MODELS.TEXT_FAST), + **get_completion_kwargs(MODELS.TEXT_FAST)["model"], ) candidate_conversations.append((formatted_conv, tokens)) diff --git a/echo/server/dembrane/report_utils.py b/echo/server/dembrane/report_utils.py index 6906b506..7af2dd80 100644 --- a/echo/server/dembrane/report_utils.py +++ b/echo/server/dembrane/report_utils.py @@ -2,7 +2,7 @@ import logging from litellm import completion -from litellm.utils import get_max_tokens, token_counter +from litellm.utils import token_counter, get_max_tokens from dembrane.llms import MODELS, get_completion_kwargs from dembrane.prompts import render_prompt @@ -14,7 +14,6 @@ TEXT_PROVIDER_KWARGS = get_completion_kwargs(MODELS.TEXT_FAST) TEXT_PROVIDER_MODEL = TEXT_PROVIDER_KWARGS["model"] -TOKEN_COUNT_KWARGS = TEXT_PROVIDER_KWARGS.copy() _max_tokens = get_max_tokens(TEXT_PROVIDER_MODEL) @@ -74,7 +73,7 @@ async def get_report_content_for_project(project_id: str, language: str) -> str: # Count tokens before adding summary_tokens = token_counter( messages=[{"role": "user", "content": conversation["summary"]}], - **TOKEN_COUNT_KWARGS, + **get_completion_kwargs(MODELS.TEXT_FAST)["model_kwargs"], ) # Check if adding this conversation would exceed the limit @@ -132,7 +131,7 @@ async def get_report_content_for_project(project_id: str, language: str) -> str: # Calculate token count for the transcript transcript_tokens = token_counter( messages=[{"role": "user", "content": transcript}], - **TOKEN_COUNT_KWARGS, + **get_completion_kwargs(MODELS.TEXT_FAST)["model_kwargs"], ) if token_count + transcript_tokens < MAX_REPORT_CONTEXT_LENGTH: diff --git a/echo/server/dembrane/scheduler.py b/echo/server/dembrane/scheduler.py index 6d7ea29b..a3497679 100644 --- a/echo/server/dembrane/scheduler.py +++ b/echo/server/dembrane/scheduler.py @@ -1,13 +1,13 @@ +from logging import getLogger + from pytz import utc from apscheduler.triggers.cron import CronTrigger from apscheduler.jobstores.memory import MemoryJobStore from apscheduler.schedulers.blocking import BlockingScheduler -# from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore from dembrane.settings import get_settings jobstores = { - # "default": SQLAlchemyJobStore(url=DATABASE_URL), "default": MemoryJobStore(), } @@ -20,12 +20,15 @@ # Add periodic tasks scheduler.add_job( func="dembrane.tasks:task_collect_and_finish_unfinished_conversations.send", - trigger=CronTrigger(minute="*/1"), + trigger=CronTrigger(minute="*/2"), id="task_collect_and_finish_unfinished_conversations", name="Collect and finish unfinished conversations", replace_existing=True, ) +logger = getLogger("dembrane.scheduler") + # Start the scheduler when this module is run directly if __name__ == "__main__": + logger.info("Starting scheduler") scheduler.start() diff --git a/echo/server/dembrane/service/__init__.py b/echo/server/dembrane/service/__init__.py index c4be0004..76c0cdee 100644 --- a/echo/server/dembrane/service/__init__.py +++ b/echo/server/dembrane/service/__init__.py @@ -30,12 +30,15 @@ ChatNotFoundException, ChatMessageNotFoundException, ) +from .events import EventService file_service = get_file_service() project_service = ProjectService() +event_service = EventService() conversation_service = ConversationService( file_service=file_service, project_service=project_service, + event_service=event_service, ) chat_service = ChatService() diff --git a/echo/server/dembrane/service/conversation.py b/echo/server/dembrane/service/conversation.py index 0c710234..cb2695c4 100644 --- a/echo/server/dembrane/service/conversation.py +++ b/echo/server/dembrane/service/conversation.py @@ -7,13 +7,15 @@ from fastapi import UploadFile from dembrane.utils import generate_uuid -from dembrane.directus import DirectusBadRequest, directus_client_context +from dembrane.directus import DirectusBadRequest, DirectusGenericException, directus_client_context +from dembrane.service.events import ChunkCreatedEvent logger = getLogger("dembrane.service.conversation") if TYPE_CHECKING: from dembrane.service.file import FileService from dembrane.service.project import ProjectService + from dembrane.service.events import EventService # allows for None to be a sentinel value _UNSET = object() @@ -44,11 +46,37 @@ class ConversationChunkNotFoundException(ConversationServiceException): class ConversationService: def __init__( self, - file_service: "FileService", - project_service: "ProjectService", + file_service: Optional["FileService"] = None, + project_service: Optional["ProjectService"] = None, + event_service: Optional["EventService"] = None, ): - self.file_service = file_service - self.project_service = project_service + self._file_service = file_service + self._project_service = project_service + self._event_service = event_service + + @property + def file_service(self) -> "FileService": + if self._file_service is None: + from dembrane.service.file import get_file_service + + self._file_service = get_file_service() + return self._file_service + + @property + def project_service(self) -> "ProjectService": + if self._project_service is None: + from dembrane.service.project import ProjectService + + self._project_service = ProjectService() + return self._project_service + + @property + def event_service(self) -> "EventService": + if self._event_service is None: + from dembrane.service.events import EventService + + self._event_service = EventService() + return self._event_service def get_by_id_or_raise( self, @@ -81,7 +109,7 @@ def get_by_id_or_raise( }, ) - except DirectusBadRequest as e: + except (DirectusBadRequest, DirectusGenericException) as e: raise ConversationNotFoundException() from e try: @@ -226,7 +254,7 @@ def update( )["data"] return updated_conversation - except DirectusBadRequest as e: + except (DirectusBadRequest, DirectusGenericException) as e: raise ConversationNotFoundException() from e def delete( @@ -309,8 +337,8 @@ def create_chunk( if project.get("is_conversation_allowed", False) is False: raise ConversationNotOpenForParticipationException() - # if conversation.get("is_finished", False) is True: - # raise ConversationNotOpenForParticipationException() + if conversation.get("is_finished") is True: + raise ConversationNotOpenForParticipationException() chunk_id = generate_uuid() @@ -353,12 +381,11 @@ def create_chunk( }, )["data"] - # self.event_service.publish( - # ChunkCreatedEvent( - # chunk_id=chunk_id, - # conversation_id=conversation["id"], - # ) - # ) + event = ChunkCreatedEvent(chunk_id=chunk_id, conversation_id=conversation["id"]) + try: + self.event_service.publish(event) + except Exception: + logger.exception("Failed to publish ChunkCreatedEvent for %s", chunk_id) # Only trigger background audio processing if there's a file to process if has_file: diff --git a/echo/server/dembrane/service/events.py b/echo/server/dembrane/service/events.py new file mode 100644 index 00000000..940221d0 --- /dev/null +++ b/echo/server/dembrane/service/events.py @@ -0,0 +1,28 @@ +from __future__ import annotations + +from dataclasses import dataclass +from logging import getLogger +from typing import Any + +logger = getLogger("dembrane.service.events") + + +@dataclass(slots=True) +class ChunkCreatedEvent: + """Domain event emitted whenever a new conversation chunk is created.""" + + chunk_id: str + conversation_id: str + + +class EventService: + """Minimal event dispatcher used by services during tests and local runs.""" + + def publish(self, event: Any) -> None: + """ + Publish an event downstream. + + The default implementation simply logs the event so tests can assert the call. + Production deployments are expected to provide a richer implementation. + """ + logger.info("Event published: %s", event) diff --git a/echo/server/dembrane/settings.py b/echo/server/dembrane/settings.py index 19a0c0df..3a4a1fed 100644 --- a/echo/server/dembrane/settings.py +++ b/echo/server/dembrane/settings.py @@ -20,17 +20,61 @@ from pathlib import Path from functools import lru_cache +from dotenv import load_dotenv from pydantic import Field, BaseModel, AliasChoices, field_validator from pydantic_settings import BaseSettings, SettingsConfigDict TranscriptionProvider = Literal["LiteLLM", "AssemblyAI", "Dembrane-25-09"] +_MODULE_BASE_DIR = Path(__file__).resolve().parent.parent +_DEFAULT_ENV_PATH = _MODULE_BASE_DIR / ".env" + +if _DEFAULT_ENV_PATH.exists(): + logging.info(f"Loading environment variables from {_DEFAULT_ENV_PATH}") + load_dotenv(_DEFAULT_ENV_PATH, override=True) +else: + logging.info(f"Environment variables file not found at {_DEFAULT_ENV_PATH}. Skipping.") + + +def _coerce_service_account(value: Optional[Any]) -> Optional[Dict[str, Any]]: + if value is None: + return None + if isinstance(value, dict): + return value + if isinstance(value, str): + trimmed = value.strip() + if trimmed in {"", "null", "None"}: + return None + raw_value: str | bytes = trimmed + elif isinstance(value, (bytes, bytearray)): + if not value: + return None + raw_value = value + else: + raise ValueError( + "Service account JSON must be a mapping, JSON string, or base64-encoded JSON" + ) + + try: + return json.loads(raw_value) + except (TypeError, json.JSONDecodeError): + try: + decoded = base64.b64decode(raw_value) + return json.loads(decoded) + except (ValueError, json.JSONDecodeError, TypeError) as exc: + raise ValueError( + "Service account JSON must be valid JSON or base64-encoded JSON" + ) from exc + class ResolvedLLMConfig(BaseModel): model: str api_key: Optional[str] = None api_base: Optional[str] = None api_version: Optional[str] = None + vertex_credentials: Optional[Dict[str, Any]] = None + vertex_project: Optional[str] = None + vertex_location: Optional[str] = None class LLMProviderConfig(BaseModel): @@ -38,6 +82,22 @@ class LLMProviderConfig(BaseModel): api_key: Optional[str] = None api_base: Optional[str] = None api_version: Optional[str] = None + vertex_credentials: Optional[Dict[str, Any]] = None + gcp_sa_json: Optional[Dict[str, Any]] = None + vertex_project: Optional[str] = None + vertex_location: Optional[str] = None + + @field_validator("vertex_credentials", mode="before") + @classmethod + def parse_vertex_credentials( + cls, value: Optional[Any] + ) -> Optional[Dict[str, Any]]: + return _coerce_service_account(value) + + @field_validator("gcp_sa_json", mode="before") + @classmethod + def parse_gcp_sa_json(cls, value: Optional[Any]) -> Optional[Dict[str, Any]]: + return _coerce_service_account(value) def resolve(self) -> ResolvedLLMConfig: if not self.model: @@ -48,6 +108,9 @@ def resolve(self) -> ResolvedLLMConfig: api_key=self.api_key, api_base=self.api_base, api_version=self.api_version, + vertex_credentials=self.vertex_credentials or self.gcp_sa_json, + vertex_project=self.vertex_project, + vertex_location=self.vertex_location, ) @@ -309,30 +372,7 @@ class TranscriptionSettings(BaseSettings): @field_validator("gcp_sa_json", mode="before") @classmethod def parse_gcp_sa_json(cls, value: Optional[Any]) -> Optional[Dict[str, Any]]: - if value is None: - return None - if isinstance(value, dict): - return value - if isinstance(value, str): - trimmed = value.strip() - if trimmed in {"", "null", "None"}: - return None - raw_value: str | bytes = trimmed - elif isinstance(value, (bytes, bytearray)): - if not value: - return None - raw_value = value - else: - raise ValueError("GCP_SA_JSON must be a mapping, JSON string, or base64-encoded JSON") - - try: - return json.loads(raw_value) - except (TypeError, json.JSONDecodeError): - try: - decoded = base64.b64decode(raw_value) - return json.loads(decoded) - except (ValueError, json.JSONDecodeError, TypeError) as exc: - raise ValueError("GCP_SA_JSON must be valid JSON or base64-encoded JSON") from exc + return _coerce_service_account(value) def ensure_valid(self) -> None: if self.provider == "AssemblyAI": @@ -389,6 +429,7 @@ def environment(self) -> str: def prompt_templates_dir(self) -> Path: return self.base_dir / "prompt_templates" + @lru_cache def get_settings() -> AppSettings: settings = AppSettings() diff --git a/echo/server/dembrane/transcribe.py b/echo/server/dembrane/transcribe.py index 591a0914..fea972d3 100644 --- a/echo/server/dembrane/transcribe.py +++ b/echo/server/dembrane/transcribe.py @@ -122,10 +122,9 @@ def transcribe_audio_assemblyai( if hotwords: data["keyterms_prompt"] = hotwords - try: - response = requests.post(f"{ASSEMBLYAI_BASE_URL}/v2/transcript", headers=headers, json=data) - response.raise_for_status() + response = requests.post(f"{ASSEMBLYAI_BASE_URL}/v2/transcript", headers=headers, json=data) + if response.status_code == 200: transcript_id = response.json()["id"] polling_endpoint = f"{ASSEMBLYAI_BASE_URL}/v2/transcript/{transcript_id}" @@ -137,13 +136,13 @@ def transcribe_audio_assemblyai( # return both to add the diarization response later... return transcript["text"], transcript elif transcript["status"] == "error": - raise RuntimeError(f"Transcription failed: {transcript['error']}") + raise TranscriptionError(f"Transcription failed: {transcript['error']}") else: time.sleep(3) - - except Exception as e: - logger.error(f"AssemblyAI transcription failed: {e}") - raise TranscriptionError(f"AssemblyAI transcription failed: {e}") from e + elif response.status_code == 400: + raise TranscriptionError(f"Transcription failed: {response.json()['error']}") + else: + raise Exception(f"Transcription failed: {response.json()['error']}") def _get_audio_file_object(audio_file_uri: str) -> Any: @@ -237,7 +236,6 @@ def _transcript_correction_workflow( "type": "json_object", "response_schema": response_schema, }, - vertex_credentials=GCP_SA_JSON, **completion_kwargs, ) @@ -270,8 +268,14 @@ def transcribe_audio_dembrane_25_09( """ logger = logging.getLogger("transcribe.transcribe_audio_dembrane_25_09") - transcript, response = transcribe_audio_assemblyai(audio_file_uri, language, hotwords) - logger.debug(f"transcript from assemblyai: {transcript}") + try: + transcript, response = transcribe_audio_assemblyai(audio_file_uri, language, hotwords) + logger.debug(f"transcript from assemblyai: {transcript}") + except TranscriptionError as e: + logger.info( + f"Transcription failed with AssemblyAI. So we will continue with the correction workflow with empty transcript: {e}" + ) + transcript, response = "[Nothing to transcribe]", {} # use correction workflow to correct keyterms and fix missing segments corrected_transcript, note = _transcript_correction_workflow( @@ -437,7 +441,9 @@ def transcribe_conversation_chunk( _save_transcript(conversation_chunk_id, transcript, diarization=None) return conversation_chunk_id case _: - raise TranscriptionError(f"Unsupported transcription provider: {transcript_provider}") + raise TranscriptionError( + f"Unsupported transcription provider: {transcript_provider}" + ) except Exception as e: logger.error("Failed to process conversation chunk %s: %s", conversation_chunk_id, e) diff --git a/echo/server/run-worker-cpu.sh b/echo/server/run-worker-cpu.sh index d0f6c3c6..54b0659f 100755 --- a/echo/server/run-worker-cpu.sh +++ b/echo/server/run-worker-cpu.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -uv run dramatiq --watch ./dembrane --queues cpu --processes 1 --threads 2 dembrane.tasks +uv run dramatiq --queues cpu --processes 1 --threads 2 dembrane.tasks diff --git a/echo/server/run-worker.sh b/echo/server/run-worker.sh index 82981b99..dd103d07 100755 --- a/echo/server/run-worker.sh +++ b/echo/server/run-worker.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -uv run dramatiq-gevent --watch ./dembrane --queues network --processes 2 --threads 1 dembrane.tasks +uv run dramatiq-gevent --queues network --processes 1 --threads 10 dembrane.tasks diff --git a/echo/server/uv.lock b/echo/server/uv.lock index 66acffce..2e6e48e2 100644 --- a/echo/server/uv.lock +++ b/echo/server/uv.lock @@ -66,20 +66,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, ] -[[package]] -name = "alembic" -version = "1.13.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mako" }, - { name = "sqlalchemy" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/94/a2/840c3b84382dce8624bc2f0ee67567fc74c32478d0c5a5aea981518c91c3/alembic-1.13.3.tar.gz", hash = "sha256:203503117415561e203aa14541740643a611f641517f0209fcae63e9fa09f1a2", size = 1921223, upload-time = "2024-09-23T14:52:14.593Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/12/58f4f11385fddafef5d6f7bfaaf2f42899c8da6b4f95c04b7c3b744851a8/alembic-1.13.3-py3-none-any.whl", hash = "sha256:908e905976d15235fae59c9ac42c4c5b75cfcefe3d27c0fbf7ae15a37715d80e", size = 233217, upload-time = "2024-09-23T14:52:18.183Z" }, -] - [[package]] name = "amqp" version = "5.3.1" @@ -148,22 +134,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, ] -[[package]] -name = "asyncpg" -version = "0.30.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746, upload-time = "2024-10-20T00:30:41.127Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4c/0e/f5d708add0d0b97446c402db7e8dd4c4183c13edaabe8a8500b411e7b495/asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a", size = 674506, upload-time = "2024-10-20T00:29:27.988Z" }, - { url = "https://files.pythonhosted.org/packages/6a/a0/67ec9a75cb24a1d99f97b8437c8d56da40e6f6bd23b04e2f4ea5d5ad82ac/asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed", size = 645922, upload-time = "2024-10-20T00:29:29.391Z" }, - { url = "https://files.pythonhosted.org/packages/5c/d9/a7584f24174bd86ff1053b14bb841f9e714380c672f61c906eb01d8ec433/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a", size = 3079565, upload-time = "2024-10-20T00:29:30.832Z" }, - { url = "https://files.pythonhosted.org/packages/a0/d7/a4c0f9660e333114bdb04d1a9ac70db690dd4ae003f34f691139a5cbdae3/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956", size = 3109962, upload-time = "2024-10-20T00:29:33.114Z" }, - { url = "https://files.pythonhosted.org/packages/3c/21/199fd16b5a981b1575923cbb5d9cf916fdc936b377e0423099f209e7e73d/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056", size = 3064791, upload-time = "2024-10-20T00:29:34.677Z" }, - { url = "https://files.pythonhosted.org/packages/77/52/0004809b3427534a0c9139c08c87b515f1c77a8376a50ae29f001e53962f/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454", size = 3188696, upload-time = "2024-10-20T00:29:36.389Z" }, - { url = "https://files.pythonhosted.org/packages/52/cb/fbad941cd466117be58b774a3f1cc9ecc659af625f028b163b1e646a55fe/asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d", size = 567358, upload-time = "2024-10-20T00:29:37.915Z" }, - { url = "https://files.pythonhosted.org/packages/3c/0a/0a32307cf166d50e1ad120d9b81a33a948a1a5463ebfa5a96cc5606c0863/asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f", size = 629375, upload-time = "2024-10-20T00:29:39.987Z" }, -] - [[package]] name = "attrs" version = "25.4.0" @@ -478,9 +448,7 @@ source = { virtual = "." } dependencies = [ { name = "aiofiles" }, { name = "aiohttp" }, - { name = "alembic" }, { name = "apscheduler" }, - { name = "asyncpg" }, { name = "backoff" }, { name = "boto3" }, { name = "colorlog" }, @@ -508,9 +476,7 @@ dependencies = [ { name = "numpy" }, { name = "pandas" }, { name = "pandas-stubs" }, - { name = "pgvector" }, { name = "pipmaster" }, - { name = "psycopg", extra = ["binary", "pool"] }, { name = "pydantic" }, { name = "pydantic-settings" }, { name = "pydub" }, @@ -530,7 +496,6 @@ dependencies = [ { name = "sentry-dramatiq" }, { name = "sentry-sdk" }, { name = "setuptools" }, - { name = "sqlalchemy" }, { name = "tenacity" }, { name = "tiktoken" }, { name = "types-aiofiles" }, @@ -544,9 +509,7 @@ dependencies = [ requires-dist = [ { name = "aiofiles", specifier = "==23.2.*" }, { name = "aiohttp", specifier = "==3.11.14" }, - { name = "alembic", specifier = "==1.13.*" }, { name = "apscheduler", specifier = "==3.11.*" }, - { name = "asyncpg", specifier = "==0.30.0" }, { name = "backoff", specifier = "==2.2.*" }, { name = "boto3", specifier = "==1.37.*" }, { name = "colorlog", specifier = ">=6.9.0" }, @@ -574,9 +537,7 @@ requires-dist = [ { name = "numpy", specifier = "==1.26.*" }, { name = "pandas", specifier = "==2.2.*" }, { name = "pandas-stubs", specifier = ">=2.2.2.240514" }, - { name = "pgvector", specifier = "==0.2.*" }, { name = "pipmaster", specifier = "==0.5.1" }, - { name = "psycopg", extras = ["binary", "pool"], specifier = "==3.1.*" }, { name = "pydantic", specifier = "==2.10.6" }, { name = "pydantic-settings", specifier = "==2.6.1" }, { name = "pydub", specifier = "==0.25.1" }, @@ -597,7 +558,6 @@ requires-dist = [ { name = "sentry-dramatiq", specifier = "==0.3.*" }, { name = "sentry-sdk", specifier = "==2.2.1" }, { name = "setuptools", specifier = "==75.8.0" }, - { name = "sqlalchemy", specifier = "==2.0.*" }, { name = "tenacity", specifier = "==8.3.*" }, { name = "tiktoken", specifier = "==0.9.0" }, { name = "types-aiofiles", specifier = "==23.2.*" }, @@ -1498,18 +1458,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/87/c8/1675527549ee174b9e1db089f7ddfbb962a97314657269b1e0344a5eaf56/lz4-4.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:6ea715bb3357ea1665f77874cf8f55385ff112553db06f3742d3cdcec08633f7", size = 89741, upload-time = "2025-04-01T22:55:31.184Z" }, ] -[[package]] -name = "mako" -version = "1.3.10" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "markupsafe" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, -] - [[package]] name = "markupsafe" version = "3.0.3" @@ -1725,17 +1673,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, ] -[[package]] -name = "pgvector" -version = "0.2.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "numpy" }, -] -wheels = [ - { url = "https://files.pythonhosted.org/packages/29/bb/4686b1090a7c68fa367e981130a074dc6c1236571d914ffa6e05c882b59d/pgvector-0.2.5-py2.py3-none-any.whl", hash = "sha256:5e5e93ec4d3c45ab1fa388729d56c602f6966296e19deee8878928c6d567e41b", size = 9638, upload-time = "2024-02-07T19:35:03.8Z" }, -] - [[package]] name = "pipmaster" version = "0.5.1" @@ -1830,57 +1767,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/07/d1/0a28c21707807c6aacd5dc9c3704b2aa1effbf37adebd8caeaf68b17a636/protobuf-6.33.0-py3-none-any.whl", hash = "sha256:25c9e1963c6734448ea2d308cfa610e692b801304ba0908d7bfa564ac5132995", size = 170477, upload-time = "2025-10-15T20:39:51.311Z" }, ] -[[package]] -name = "psycopg" -version = "3.1.20" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, - { name = "tzdata", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/5c/6d/0939210f3ba089b360cf0d3741494719152567bc81303cca2c0f1e67c78a/psycopg-3.1.20.tar.gz", hash = "sha256:32f5862ab79f238496236f97fe374a7ab55b4b4bb839a74802026544735f9a07", size = 147567, upload-time = "2024-06-30T17:03:55.421Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2d/e9/126bbfd5dded758bb109526c5f5f2c2538fe293b15b6fa208db7078c72c4/psycopg-3.1.20-py3-none-any.whl", hash = "sha256:898a29f49ac9c903d554f5a6cdc44a8fc564325557c18f82e51f39c1f4fc2aeb", size = 179473, upload-time = "2024-06-30T16:57:04.093Z" }, -] - -[package.optional-dependencies] -binary = [ - { name = "psycopg-binary", marker = "implementation_name != 'pypy'" }, -] -pool = [ - { name = "psycopg-pool" }, -] - -[[package]] -name = "psycopg-binary" -version = "3.1.20" -source = { registry = "https://pypi.org/simple" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/1c/45e5f240765e80076b08c3ed02c5dfeb5e97d549769b81f8382485d70a15/psycopg_binary-3.1.20-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:802989350fcbc783732bfef660afb34439a62727642a05e8bb9acf7d68993627", size = 3350503, upload-time = "2024-06-30T16:58:27.18Z" }, - { url = "https://files.pythonhosted.org/packages/52/b8/acf96d388692d0bbf2346286f8b175778bc24046aca9181f50d9df9f4714/psycopg_binary-3.1.20-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:01b0e39128715fc37fed6cdc50ab58278eacb75709af503eb607654030975f09", size = 3480091, upload-time = "2024-06-30T16:58:33.872Z" }, - { url = "https://files.pythonhosted.org/packages/41/d4/20604282ff08823d0e90cf092738ea21b339f56a172d8583565b272fc4be/psycopg_binary-3.1.20-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77af1086bedfa0729465565c636de3519079ba523d7b7ee6e8b9486beb1ee905", size = 4434555, upload-time = "2024-06-30T16:58:40.795Z" }, - { url = "https://files.pythonhosted.org/packages/73/e0/3917b766508bb749e08225492d45ba7463b559de1c8a41d3f8f3cf0927cb/psycopg_binary-3.1.20-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9b9562395d441e225f354e8c6303ee6993a93aaeb0dbb5b94368f3249ab2388", size = 4231402, upload-time = "2024-06-30T16:58:48.586Z" }, - { url = "https://files.pythonhosted.org/packages/b4/9b/251435896f7459beda355ef3e3919b6b20d067582cd6838ba248d3cff188/psycopg_binary-3.1.20-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e814d69e5447a93e7b98117ec95a8ce606d3742092fd120960551ed67c376fea", size = 4484218, upload-time = "2024-06-30T16:58:56.911Z" }, - { url = "https://files.pythonhosted.org/packages/a1/12/b2057f9bb8b5f408139266a5b48bfd7578340296d7314d964b9f09e5b18f/psycopg_binary-3.1.20-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adf1c2061600235ae9b11d7ad357cab89ac583a76bdb0199f7a29ac947939c20", size = 4176668, upload-time = "2024-06-30T16:59:02.496Z" }, - { url = "https://files.pythonhosted.org/packages/80/9c/a62fe4167427a06e69882d274ba90903507afc89caf6bcc3671790a20875/psycopg_binary-3.1.20-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:50f1d807b4167f973a6f67bca39bf656b737f7426be158a1dc9cb0000d020744", size = 3102502, upload-time = "2024-06-30T16:59:07.216Z" }, - { url = "https://files.pythonhosted.org/packages/98/83/bceca23dd830d4069949e70dec9feb03c114cc551b104f0e2b48b1e598c6/psycopg_binary-3.1.20-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4cf6ec1490232a5b208dae94a8269dc739e6762684c8658a0f3570402db934ae", size = 3080005, upload-time = "2024-06-30T16:59:14.927Z" }, - { url = "https://files.pythonhosted.org/packages/fc/83/bab7c8495e0eb11bf710663afb2849c2d3c91a2bf61b2bd597941f57f80b/psycopg_binary-3.1.20-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:309c09ec50a9c5c8492c2922ee666df1e30a08b08a9b63083d0daa414eccd09c", size = 3182315, upload-time = "2024-06-30T16:59:21.18Z" }, - { url = "https://files.pythonhosted.org/packages/ca/9b/bd4970faed24ae4a850ee8c6ebd621e98fd86e2962e13038603a726e2504/psycopg_binary-3.1.20-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e2c33a01799f93ef8c11a023df66280e39ca3c3249a2581adb2a0e5e80801088", size = 3222552, upload-time = "2024-06-30T16:59:27.663Z" }, - { url = "https://files.pythonhosted.org/packages/5d/0b/7ab0744f282df53968f5066d5fd8bf3f994f90bf2a8003ab40278818d0f2/psycopg_binary-3.1.20-cp311-cp311-win_amd64.whl", hash = "sha256:2c67532057fda72579b02d9d61e9cc8975982844bd5c3c9dc7f84ce8bcac859c", size = 2899115, upload-time = "2024-06-30T16:59:35.512Z" }, -] - -[[package]] -name = "psycopg-pool" -version = "3.2.7" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9d/8f/3ec52b17087c2ed5fa32b64fd4814dde964c9aa4bd49d0d30fc24725ca6d/psycopg_pool-3.2.7.tar.gz", hash = "sha256:a77d531bfca238e49e5fb5832d65b98e69f2c62bfda3d2d4d833696bdc9ca54b", size = 29765, upload-time = "2025-10-26T00:46:10.379Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/59/74e752f605c6f0e351d4cf1c54fb9a1616dc800db4572b95bbfbb1a6225f/psycopg_pool-3.2.7-py3-none-any.whl", hash = "sha256:4b47bb59d887ef5da522eb63746b9f70e2faf967d34aac4f56ffc65e9606728f", size = 38232, upload-time = "2025-10-26T00:46:00.496Z" }, -] - [[package]] name = "pyarrow" version = "22.0.0" From 13e6e1e8bb69acc130a88fe78b5cc2a9cffe0a57 Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti Date: Wed, 12 Nov 2025 17:50:12 +0000 Subject: [PATCH 21/23] updates --- echo/server/dembrane/api/chat.py | 6 +++--- echo/server/dembrane/api/conversation.py | 2 +- echo/server/dembrane/chat_utils.py | 2 +- echo/server/dembrane/reply_utils.py | 6 +++--- echo/server/dembrane/report_utils.py | 4 ++-- echo/server/pyproject.toml | 2 +- echo/server/uv.lock | 8 ++++---- 7 files changed, 15 insertions(+), 15 deletions(-) diff --git a/echo/server/dembrane/api/chat.py b/echo/server/dembrane/api/chat.py index eacff24e..4ddb4a3f 100644 --- a/echo/server/dembrane/api/chat.py +++ b/echo/server/dembrane/api/chat.py @@ -173,7 +173,7 @@ async def get_chat_context(chat_id: str, auth: DependencyDirectusSession) -> Cha if tokens_count is None: tokens_count = token_counter( messages=[{"role": message_from, "content": message_text}], - **get_completion_kwargs(MODELS.TEXT_FAST)["model"], + model=get_completion_kwargs(MODELS.TEXT_FAST)["model"], ) try: await run_in_thread_pool( @@ -568,7 +568,7 @@ async def build_formatted_messages(conversation_ids: Iterable[str]) -> List[Dict candidate_messages = await build_formatted_messages(temp_ids) prompt_len = token_counter( messages=candidate_messages, - **get_completion_kwargs(MODELS.MULTI_MODAL_PRO)["model"], + model=get_completion_kwargs(MODELS.MULTI_MODAL_PRO)["model"], ) if prompt_len > max_context_threshold: @@ -605,7 +605,7 @@ async def build_formatted_messages(conversation_ids: Iterable[str]) -> List[Dict prompt_len = token_counter( messages=formatted_messages, - **get_completion_kwargs(MODELS.MULTI_MODAL_PRO)["model"], + model=get_completion_kwargs(MODELS.MULTI_MODAL_PRO)["model"], ) if prompt_len > MAX_CHAT_CONTEXT_LENGTH: raise HTTPException( diff --git a/echo/server/dembrane/api/conversation.py b/echo/server/dembrane/api/conversation.py index 41076790..1c724f81 100644 --- a/echo/server/dembrane/api/conversation.py +++ b/echo/server/dembrane/api/conversation.py @@ -401,7 +401,7 @@ async def get_conversation_token_count( token_count = token_counter( messages=[{"role": "user", "content": transcript}], - **get_completion_kwargs(MODELS.MULTI_MODAL_PRO)["model"], + model=get_completion_kwargs(MODELS.MULTI_MODAL_PRO)["model"], ) # Store the result in the cache diff --git a/echo/server/dembrane/chat_utils.py b/echo/server/dembrane/chat_utils.py index 24009029..ec115346 100644 --- a/echo/server/dembrane/chat_utils.py +++ b/echo/server/dembrane/chat_utils.py @@ -450,7 +450,7 @@ async def _process_single_batch( try: prompt_tokens = token_counter( messages=[{"role": "user", "content": prompt}], - **get_completion_kwargs(MODELS.TEXT_FAST)["model"], + model=get_completion_kwargs(MODELS.TEXT_FAST)["model"], ) MAX_BATCH_CONTEXT = 100000 # Leave headroom for response diff --git a/echo/server/dembrane/reply_utils.py b/echo/server/dembrane/reply_utils.py index 7faac8d6..3bb99595 100644 --- a/echo/server/dembrane/reply_utils.py +++ b/echo/server/dembrane/reply_utils.py @@ -237,7 +237,7 @@ async def generate_reply_for_conversation( formatted_conv = format_conversation(c) tokens = token_counter( messages=[{"role": "user", "content": formatted_conv}], - **get_completion_kwargs(MODELS.TEXT_FAST)["model"], + model=get_completion_kwargs(MODELS.TEXT_FAST)["model"], ) candidate_conversations.append((formatted_conv, tokens)) @@ -260,7 +260,7 @@ async def generate_reply_for_conversation( formatted_conv = format_conversation(c) tokens = token_counter( messages=[{"role": "user", "content": formatted_conv}], - **get_completion_kwargs(MODELS.TEXT_FAST)["model"], + model=get_completion_kwargs(MODELS.TEXT_FAST)["model"], ) # If conversation is too large, truncate it @@ -272,7 +272,7 @@ async def generate_reply_for_conversation( formatted_conv = format_conversation(c) tokens = token_counter( messages=[{"role": "user", "content": formatted_conv}], - **get_completion_kwargs(MODELS.TEXT_FAST)["model"], + model=get_completion_kwargs(MODELS.TEXT_FAST)["model"], ) candidate_conversations.append((formatted_conv, tokens)) diff --git a/echo/server/dembrane/report_utils.py b/echo/server/dembrane/report_utils.py index 7af2dd80..957b92fb 100644 --- a/echo/server/dembrane/report_utils.py +++ b/echo/server/dembrane/report_utils.py @@ -73,7 +73,7 @@ async def get_report_content_for_project(project_id: str, language: str) -> str: # Count tokens before adding summary_tokens = token_counter( messages=[{"role": "user", "content": conversation["summary"]}], - **get_completion_kwargs(MODELS.TEXT_FAST)["model_kwargs"], + model=get_completion_kwargs(MODELS.TEXT_FAST)["model"], ) # Check if adding this conversation would exceed the limit @@ -131,7 +131,7 @@ async def get_report_content_for_project(project_id: str, language: str) -> str: # Calculate token count for the transcript transcript_tokens = token_counter( messages=[{"role": "user", "content": transcript}], - **get_completion_kwargs(MODELS.TEXT_FAST)["model_kwargs"], + model=get_completion_kwargs(MODELS.TEXT_FAST)["model"], ) if token_count + transcript_tokens < MAX_REPORT_CONTEXT_LENGTH: diff --git a/echo/server/pyproject.toml b/echo/server/pyproject.toml index 84613f27..9d1663c8 100644 --- a/echo/server/pyproject.toml +++ b/echo/server/pyproject.toml @@ -39,7 +39,7 @@ dependencies = [ "pandas-stubs>=2.2.2.240514", "types-requests>=2.32.0.20240602", "types-python-jose>=3.3.4.20240106", - "litellm==1.76.*", + "litellm==1.79.*", # Additional Dependencies "lightrag-dembrane==1.2.7.8", "nest-asyncio==1.6.0", diff --git a/echo/server/uv.lock b/echo/server/uv.lock index 2e6e48e2..ca063c80 100644 --- a/echo/server/uv.lock +++ b/echo/server/uv.lock @@ -529,7 +529,7 @@ requires-dist = [ { name = "langchain-community", specifier = "==0.0.*" }, { name = "langchain-experimental", specifier = "==0.0.*" }, { name = "lightrag-dembrane", specifier = "==1.2.7.8" }, - { name = "litellm", specifier = "==1.76.*" }, + { name = "litellm", specifier = "==1.79.*" }, { name = "lz4", specifier = "==4.4.*" }, { name = "mypy", specifier = ">=1.16.0" }, { name = "nest-asyncio", specifier = "==1.6.0" }, @@ -1421,7 +1421,7 @@ wheels = [ [[package]] name = "litellm" -version = "1.76.3" +version = "1.79.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, @@ -1437,9 +1437,9 @@ dependencies = [ { name = "tiktoken" }, { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/46/57b6539365616452bb6f4401487448ce62e62755738fce55d8222d7a557e/litellm-1.76.3.tar.gz", hash = "sha256:fc81219c59b17b26cc81276ce32582f3715612877ab11c1ea2c26e4853ac67e8", size = 10210403, upload-time = "2025-09-07T01:59:19.55Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/0a/587c3f895f5d6c842d6cd630204c8bf7de677fc69ce2bd26e812c02b6e0b/litellm-1.79.3.tar.gz", hash = "sha256:4da4716f8da3e1b77838262c36d3016146860933e0489171658a9d4a3fd59b1b", size = 11319885, upload-time = "2025-11-09T02:33:17.684Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/d9/5f8ed27241b487f51f04573b8ba06d4460ebed9f792ff5cc148649fbf862/litellm-1.76.3-py3-none-any.whl", hash = "sha256:d62e3ff2a80ec5e551c6d7a0fe199ffe718ecb6cbaa43fc9250dd8d7c0944352", size = 9000797, upload-time = "2025-09-07T01:59:16.261Z" }, + { url = "https://files.pythonhosted.org/packages/41/ad/3e030c925c99b9a2f1573bf376259338b502ed1aa25ae768bf1f79d8b1bf/litellm-1.79.3-py3-none-any.whl", hash = "sha256:16314049d109e5cadb2abdccaf2e07ea03d2caa3a9b3f54f34b5b825092b4eeb", size = 10412553, upload-time = "2025-11-09T02:33:14.021Z" }, ] [[package]] From 37b5f2739b9b746ba0d70400fc8afedc7d3017a3 Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti Date: Thu, 13 Nov 2025 10:36:29 +0000 Subject: [PATCH 22/23] docs --- echo/readme.md | 1 + echo/server/dembrane/api/chat.py | 4 ++-- echo/server/dembrane/embedding.py | 4 ++-- echo/server/dembrane/main.py | 16 ++++++++-------- 4 files changed, 13 insertions(+), 12 deletions(-) diff --git a/echo/readme.md b/echo/readme.md index 3c14c6cf..912aec62 100644 --- a/echo/readme.md +++ b/echo/readme.md @@ -43,6 +43,7 @@ The following guide is to run the whole application locally. it is HIGHLY recomm - Press Ctrl+Shift+P to open the command palette. - Type **"Dev Containers: Open Folder in Container"** (or "Reopen in Container"). + - Create .envs `touch server/.env && touch directus/.env` - Choose the `/echo/echo` folder (this is the folder containing the `.devcontainer/` folder) - Wait for the containers to build. This will take a few minutes. diff --git a/echo/server/dembrane/api/chat.py b/echo/server/dembrane/api/chat.py index 4ddb4a3f..1ecef933 100644 --- a/echo/server/dembrane/api/chat.py +++ b/echo/server/dembrane/api/chat.py @@ -222,7 +222,7 @@ async def get_chat_context(chat_id: str, auth: DependencyDirectusSession) -> Cha if not conversation_id: continue - participant_name = conversation_ref.get("participant_name") + participant_name = str(conversation_ref.get("participant_name") or "") is_locked = conversation_id in locked_conversations token_count = await get_conversation_token_count(conversation_id, auth) @@ -462,7 +462,7 @@ async def post_chat( user_message_content = body.messages[-1].content user_message_id = generate_uuid() - user_message = await run_in_thread_pool( + await run_in_thread_pool( chat_service.create_message, chat_id, "user", diff --git a/echo/server/dembrane/embedding.py b/echo/server/dembrane/embedding.py index e02024a9..0bd96547 100644 --- a/echo/server/dembrane/embedding.py +++ b/echo/server/dembrane/embedding.py @@ -1,5 +1,5 @@ import logging -from typing import List +from typing import Any, Dict, List import backoff import litellm @@ -22,7 +22,7 @@ def embed_text(text: str) -> List[float]: if not embedding_settings.model: raise ValueError("Embedding model is not configured.") - embedding_kwargs = { + embedding_kwargs: Dict[str, Any] = { "model": embedding_settings.model, } if embedding_settings.api_key: diff --git a/echo/server/dembrane/main.py b/echo/server/dembrane/main.py index ac4a8114..d1aa70b6 100644 --- a/echo/server/dembrane/main.py +++ b/echo/server/dembrane/main.py @@ -1,22 +1,22 @@ import time -from typing import Any, AsyncGenerator, Awaitable, Callable, cast +from typing import Any, Callable, Awaitable, AsyncGenerator from logging import getLogger from contextlib import asynccontextmanager import nest_asyncio from fastapi import FastAPI, Request, HTTPException +from starlette.types import Scope from fastapi.staticfiles import StaticFiles +from starlette.responses import Response from starlette.exceptions import HTTPException as StarletteHTTPException from starlette.middleware import Middleware from fastapi.openapi.utils import get_openapi from starlette.middleware.cors import CORSMiddleware -from starlette.responses import Response -from starlette.types import Scope -from dembrane.settings import get_settings +from dembrane.seed import seed_default_languages, seed_default_verification_topics from dembrane.sentry import init_sentry from dembrane.api.api import api -from dembrane.seed import seed_default_languages, seed_default_verification_topics +from dembrane.settings import get_settings # LightRAG requires nest_asyncio for nested event loops nest_asyncio.apply() @@ -104,7 +104,7 @@ async def get_response(self, path: str, scope: Scope) -> Response: raise ex -def custom_openapi() -> Any: +def custom_openapi() -> dict[str, Any]: if app.openapi_schema: return app.openapi_schema openapi_schema = get_openapi( @@ -114,10 +114,10 @@ def custom_openapi() -> Any: ) openapi_schema["info"]["x-logo"] = {"url": "/dembrane-logo.png"} app.openapi_schema = openapi_schema - return app.openapi_schema + return openapi_schema -app.openapi = cast(Callable[[], dict[str, Any]], custom_openapi) +setattr(app, "openapi", custom_openapi) if __name__ == "__main__": From 864ce1f77ae4e70b0c7bd7bdd1eb77edcd1fe792 Mon Sep 17 00:00:00 2001 From: Sameer Pashikanti Date: Thu, 13 Nov 2025 12:14:37 +0100 Subject: [PATCH 23/23] fix build --- echo/server/dembrane/api/conversation.py | 6 ++-- echo/server/dembrane/api/participant.py | 2 +- echo/server/dembrane/api/project.py | 4 +-- echo/server/dembrane/api/verify.py | 4 +-- echo/server/dembrane/async_helpers.py | 2 +- echo/server/dembrane/audio_utils.py | 2 +- echo/server/dembrane/main.py | 2 +- echo/server/dembrane/reply_utils.py | 2 +- echo/server/dembrane/seed.py | 4 +-- echo/server/dembrane/service/__init__.py | 14 ++++---- echo/server/dembrane/service/chat.py | 2 +- echo/server/dembrane/service/conversation.py | 2 +- echo/server/dembrane/service/events.py | 4 +-- echo/server/dembrane/tasks.py | 14 +++++--- echo/server/dembrane/transcribe.py | 3 +- echo/server/tests/test_audio_utils.py | 2 +- echo/server/tests/test_transcribe_assembly.py | 34 ++++++++++++++++--- 17 files changed, 67 insertions(+), 36 deletions(-) diff --git a/echo/server/dembrane/api/conversation.py b/echo/server/dembrane/api/conversation.py index 1c724f81..1bb5d998 100644 --- a/echo/server/dembrane/api/conversation.py +++ b/echo/server/dembrane/api/conversation.py @@ -5,13 +5,15 @@ from fastapi import Request, APIRouter from pydantic import BaseModel +from litellm.utils import token_counter from fastapi.responses import RedirectResponse, StreamingResponse from fastapi.exceptions import HTTPException -from litellm.utils import token_counter from litellm.exceptions import ContentPolicyViolationError from dembrane.s3 import get_signed_url +from dembrane.llms import MODELS, get_completion_kwargs from dembrane.utils import CacheWithExpiration, generate_uuid, get_utc_timestamp +from dembrane.service import conversation_service from dembrane.directus import directus from dembrane.audio_utils import ( get_duration_from_s3, @@ -19,7 +21,6 @@ merge_multiple_audio_files_and_save_to_s3, ) from dembrane.reply_utils import generate_reply_for_conversation -from dembrane.llms import MODELS, get_completion_kwargs from dembrane.api.stateless import generate_summary from dembrane.async_helpers import run_in_thread_pool from dembrane.api.exceptions import ( @@ -27,7 +28,6 @@ ConversationNotFoundException, ) from dembrane.api.dependency_auth import DependencyDirectusSession -from dembrane.service import conversation_service logger = getLogger("api.conversation") ConversationRouter = APIRouter(tags=["conversation"]) diff --git a/echo/server/dembrane/api/participant.py b/echo/server/dembrane/api/participant.py index 35903bc8..cb06d36e 100644 --- a/echo/server/dembrane/api/participant.py +++ b/echo/server/dembrane/api/participant.py @@ -9,9 +9,9 @@ from dembrane.s3 import get_sanitized_s3_key, get_file_size_bytes_from_s3 from dembrane.utils import generate_uuid -from dembrane.settings import get_settings from dembrane.service import project_service, conversation_service from dembrane.directus import directus +from dembrane.settings import get_settings from dembrane.async_helpers import run_in_thread_pool from dembrane.service.project import ProjectNotFoundException from dembrane.service.conversation import ( diff --git a/echo/server/dembrane/api/project.py b/echo/server/dembrane/api/project.py index 5b63ba1a..36fd987d 100644 --- a/echo/server/dembrane/api/project.py +++ b/echo/server/dembrane/api/project.py @@ -1,10 +1,10 @@ import os import asyncio import zipfile -from datetime import datetime from http import HTTPStatus from typing import Any, List, Optional, Generator from logging import getLogger +from datetime import datetime from fastapi import APIRouter, HTTPException, BackgroundTasks from pydantic import BaseModel @@ -12,8 +12,8 @@ from dembrane.tasks import task_create_view, task_create_project_library from dembrane.utils import generate_uuid, get_safe_filename -from dembrane.settings import get_settings from dembrane.service import project_service, conversation_service +from dembrane.settings import get_settings from dembrane.report_utils import ContextTooLongException, get_report_content_for_project from dembrane.async_helpers import run_in_thread_pool from dembrane.api.exceptions import ( diff --git a/echo/server/dembrane/api/verify.py b/echo/server/dembrane/api/verify.py index 3a1b900e..2a19ddd7 100644 --- a/echo/server/dembrane/api/verify.py +++ b/echo/server/dembrane/api/verify.py @@ -8,15 +8,15 @@ from fastapi import APIRouter, HTTPException from pydantic import Field, BaseModel +from dembrane.llms import MODELS, get_completion_kwargs from dembrane.utils import generate_uuid -from dembrane.settings import get_settings from dembrane.prompts import render_prompt from dembrane.directus import directus +from dembrane.settings import get_settings from dembrane.transcribe import _get_audio_file_object from dembrane.async_helpers import run_in_thread_pool from dembrane.api.exceptions import ProjectNotFoundException, ConversationNotFoundException from dembrane.api.dependency_auth import DependencyDirectusSession -from dembrane.llms import MODELS, get_completion_kwargs logger = logging.getLogger("api.verify") diff --git a/echo/server/dembrane/async_helpers.py b/echo/server/dembrane/async_helpers.py index 59460432..deddc32a 100644 --- a/echo/server/dembrane/async_helpers.py +++ b/echo/server/dembrane/async_helpers.py @@ -23,7 +23,7 @@ import atexit import asyncio import threading -from typing import Any, TypeVar, Callable, Optional, Awaitable, Coroutine +from typing import Any, TypeVar, Callable, Optional, Coroutine from logging import getLogger from functools import partial from concurrent.futures import ThreadPoolExecutor diff --git a/echo/server/dembrane/audio_utils.py b/echo/server/dembrane/audio_utils.py index 548ae46d..698546ab 100644 --- a/echo/server/dembrane/audio_utils.py +++ b/echo/server/dembrane/audio_utils.py @@ -13,9 +13,9 @@ from dembrane.s3 import s3_client, delete_from_s3, get_stream_from_s3, get_sanitized_s3_key from dembrane.utils import generate_uuid -from dembrane.settings import get_settings from dembrane.service import conversation_service from dembrane.directus import directus +from dembrane.settings import get_settings logger = logging.getLogger("audio_utils") diff --git a/echo/server/dembrane/main.py b/echo/server/dembrane/main.py index d1aa70b6..d14174d1 100644 --- a/echo/server/dembrane/main.py +++ b/echo/server/dembrane/main.py @@ -117,7 +117,7 @@ def custom_openapi() -> dict[str, Any]: return openapi_schema -setattr(app, "openapi", custom_openapi) +app.openapi = custom_openapi if __name__ == "__main__": diff --git a/echo/server/dembrane/reply_utils.py b/echo/server/dembrane/reply_utils.py index 3bb99595..4557a6c4 100644 --- a/echo/server/dembrane/reply_utils.py +++ b/echo/server/dembrane/reply_utils.py @@ -4,8 +4,8 @@ import sentry_sdk from litellm import acompletion -from litellm.utils import token_counter from pydantic import BaseModel +from litellm.utils import token_counter from litellm.exceptions import ContentPolicyViolationError from dembrane.llms import MODELS, get_completion_kwargs diff --git a/echo/server/dembrane/seed.py b/echo/server/dembrane/seed.py index 137a31b7..e0b6aab8 100644 --- a/echo/server/dembrane/seed.py +++ b/echo/server/dembrane/seed.py @@ -2,11 +2,11 @@ Seeding helpers for bootstrap tasks that need to run during application startup. """ +from typing import Any, Dict, List, Mapping, Iterable from logging import getLogger -from typing import Any, Dict, Iterable, Mapping, List -from dembrane.async_helpers import run_in_thread_pool from dembrane.directus import directus +from dembrane.async_helpers import run_in_thread_pool logger = getLogger("dembrane.seed") diff --git a/echo/server/dembrane/service/__init__.py b/echo/server/dembrane/service/__init__.py index 76c0cdee..10601049 100644 --- a/echo/server/dembrane/service/__init__.py +++ b/echo/server/dembrane/service/__init__.py @@ -15,7 +15,14 @@ project = project_service.get_by_id_or_raise(project_id) """ +from .chat import ( + ChatService, + ChatServiceException, + ChatNotFoundException, + ChatMessageNotFoundException, +) from .file import FileServiceException, get_file_service +from .events import EventService from .project import ProjectService, ProjectServiceException, ProjectNotFoundException from .conversation import ( ConversationService, @@ -24,13 +31,6 @@ ConversationChunkNotFoundException, ConversationNotOpenForParticipationException, ) -from .chat import ( - ChatService, - ChatServiceException, - ChatNotFoundException, - ChatMessageNotFoundException, -) -from .events import EventService file_service = get_file_service() project_service = ProjectService() diff --git a/echo/server/dembrane/service/chat.py b/echo/server/dembrane/service/chat.py index 39df9956..c93c7417 100644 --- a/echo/server/dembrane/service/chat.py +++ b/echo/server/dembrane/service/chat.py @@ -1,4 +1,4 @@ -from typing import Any, Iterable, List, Optional +from typing import Any, List, Iterable, Optional from logging import getLogger from dembrane.directus import DirectusBadRequest, directus_client_context diff --git a/echo/server/dembrane/service/conversation.py b/echo/server/dembrane/service/conversation.py index cb2695c4..c76e3bc9 100644 --- a/echo/server/dembrane/service/conversation.py +++ b/echo/server/dembrane/service/conversation.py @@ -14,8 +14,8 @@ if TYPE_CHECKING: from dembrane.service.file import FileService - from dembrane.service.project import ProjectService from dembrane.service.events import EventService + from dembrane.service.project import ProjectService # allows for None to be a sentinel value _UNSET = object() diff --git a/echo/server/dembrane/service/events.py b/echo/server/dembrane/service/events.py index 940221d0..1249fd19 100644 --- a/echo/server/dembrane/service/events.py +++ b/echo/server/dembrane/service/events.py @@ -1,8 +1,8 @@ from __future__ import annotations -from dataclasses import dataclass -from logging import getLogger from typing import Any +from logging import getLogger +from dataclasses import dataclass logger = getLogger("dembrane.service.events") diff --git a/echo/server/dembrane/tasks.py b/echo/server/dembrane/tasks.py index 9fb35a28..4215b419 100644 --- a/echo/server/dembrane/tasks.py +++ b/echo/server/dembrane/tasks.py @@ -13,21 +13,19 @@ from dramatiq.results.backends.redis import RedisBackend as ResultsRedisBackend from dembrane.utils import generate_uuid, get_utc_timestamp -from dembrane.settings import get_settings from dembrane.sentry import init_sentry from dembrane.directus import ( DirectusBadRequest, DirectusServerError, - directus, directus_client_context, ) +from dembrane.settings import get_settings from dembrane.transcribe import transcribe_conversation_chunk -from dembrane.async_helpers import run_in_thread_pool, run_async_in_new_loop +from dembrane.async_helpers import run_async_in_new_loop from dembrane.conversation_utils import collect_unfinished_conversations from dembrane.api.dependency_auth import DependencyDirectusSession from dembrane.processing_status_utils import ( ProcessingStatusContext, - set_error_status, ) settings = get_settings() @@ -367,6 +365,13 @@ def task_create_view( return logger.info(f"User query: {user_query}") + if user_query_context: + logger.info( + "User query context provided (%d characters).", len(user_query_context) + ) + else: + logger.info("No additional user query context provided.") + logger.info("Requested language for view generation: %s", language or "unspecified") project_id: Optional[str] = None @@ -414,6 +419,7 @@ def task_create_view( @dramatiq.actor(queue_name="network", priority=50) def task_create_project_library(project_id: str, language: str) -> None: logger = getLogger("dembrane.tasks.task_create_project_library") + logger.info("Requested language for project library creation: %s", language or "unspecified") with ProcessingStatusContext( project_id=project_id, diff --git a/echo/server/dembrane/transcribe.py b/echo/server/dembrane/transcribe.py index fea972d3..e53afb55 100644 --- a/echo/server/dembrane/transcribe.py +++ b/echo/server/dembrane/transcribe.py @@ -19,11 +19,11 @@ import requests from dembrane.s3 import get_signed_url, get_stream_from_s3 -from dembrane.settings import get_settings from dembrane.llms import MODELS, get_completion_kwargs from dembrane.prompts import render_prompt from dembrane.service import file_service, conversation_service from dembrane.directus import directus +from dembrane.settings import get_settings logger = logging.getLogger("transcribe") @@ -90,6 +90,7 @@ def transcribe_audio_assemblyai( ) -> tuple[str, dict[str, Any]]: """Transcribe audio through AssemblyAI""" logger = logging.getLogger("transcribe.transcribe_audio_assemblyai") + logger.info("Submitting AssemblyAI transcription request for %s", audio_file_uri) headers = { "Content-Type": "application/json", diff --git a/echo/server/tests/test_audio_utils.py b/echo/server/tests/test_audio_utils.py index 697f7c6d..167533c4 100644 --- a/echo/server/tests/test_audio_utils.py +++ b/echo/server/tests/test_audio_utils.py @@ -6,8 +6,8 @@ from dembrane.s3 import s3_client, get_sanitized_s3_key from dembrane.utils import generate_uuid -from dembrane.settings import get_settings from dembrane.directus import directus +from dembrane.settings import get_settings from dembrane.audio_utils import ( probe_from_s3, probe_from_bytes, diff --git a/echo/server/tests/test_transcribe_assembly.py b/echo/server/tests/test_transcribe_assembly.py index 3e02006a..8107faa5 100644 --- a/echo/server/tests/test_transcribe_assembly.py +++ b/echo/server/tests/test_transcribe_assembly.py @@ -1,25 +1,46 @@ import os import logging +import importlib +from typing import Any, Callable, Optional import pytest -os.environ.setdefault("TRANSCRIPTION_PROVIDER", "AssemblyAI") - -TEST_AUDIO_URL = "https://storage.googleapis.com/aai-platform-public/samples/1765269382848385.wav" - from dembrane.s3 import delete_from_s3, save_to_s3_from_url from dembrane.utils import get_utc_timestamp from dembrane.directus import directus -from dembrane.transcribe import transcribe_audio_assemblyai, transcribe_conversation_chunk + +TEST_AUDIO_URL = "https://storage.googleapis.com/aai-platform-public/samples/1765269382848385.wav" + +transcribe_audio_assemblyai: Optional[Callable[..., tuple[str, dict[str, Any]]]] = None +transcribe_conversation_chunk: Optional[Callable[[str], str]] = None logger = logging.getLogger("test_transcribe_assembly") +@pytest.fixture(scope="module", autouse=True) +def configure_transcription_provider() -> None: + """Ensure AssemblyAI is the active transcription provider before tests run.""" + global transcribe_audio_assemblyai + global transcribe_conversation_chunk + + os.environ.setdefault("TRANSCRIPTION_PROVIDER", "AssemblyAI") + + import dembrane.transcribe as transcribe_module + + importlib.reload(transcribe_module) + + transcribe_audio_assemblyai = transcribe_module.transcribe_audio_assemblyai + transcribe_conversation_chunk = transcribe_module.transcribe_conversation_chunk + yield + + def _require_assemblyai(): """Ensure AssemblyAI is enabled and credentials are present or skip.""" if not os.environ.get("ASSEMBLYAI_API_KEY"): pytest.skip("ASSEMBLYAI_API_KEY not set; skipping AssemblyAI tests") os.environ["TRANSCRIPTION_PROVIDER"] = "AssemblyAI" + if transcribe_audio_assemblyai is None or transcribe_conversation_chunk is None: + pytest.skip("AssemblyAI transcription helpers not initialized") @pytest.fixture @@ -113,6 +134,7 @@ def fixture_chunk_nl(): class TestTranscribeAssemblyAI: def test_transcribe_conversation_chunk_en(self, fixture_chunk_en): chunk_id = fixture_chunk_en["chunk_id"] + assert transcribe_conversation_chunk is not None result_id = transcribe_conversation_chunk(chunk_id) assert result_id == chunk_id @@ -124,6 +146,7 @@ def test_transcribe_conversation_chunk_en(self, fixture_chunk_en): def test_transcribe_conversation_chunk_nl(self, fixture_chunk_nl): chunk_id = fixture_chunk_nl["chunk_id"] + assert transcribe_conversation_chunk is not None result_id = transcribe_conversation_chunk(chunk_id) assert result_id == chunk_id @@ -134,6 +157,7 @@ def test_transcribe_conversation_chunk_nl(self, fixture_chunk_nl): def test_transcribe_audio_assemblyai(): + assert transcribe_audio_assemblyai is not None transcript, response = transcribe_audio_assemblyai( audio_file_uri=TEST_AUDIO_URL, language="en",