diff --git a/.github/workflows/build-wavefront-call_processing-develop.yaml b/.github/workflows/build-wavefront-call_processing-develop.yaml new file mode 100644 index 00000000..2cd35967 --- /dev/null +++ b/.github/workflows/build-wavefront-call_processing-develop.yaml @@ -0,0 +1,66 @@ +name: (Develop) Build and Push Wavefront Call Processing app to AWS and GCP + +on: + workflow_dispatch: + +env: + PROJECT_ID: aesy-330511 + GCP_REGION: asia-south1 + GAR_LOCATION: asia-south1-docker.pkg.dev/aesy-330511/root-hub + IMAGE_NAME: wavefront-call_processing + +jobs: + build-push-artifact: + runs-on: ubuntu-latest + + steps: + - name: "Checkout" + uses: "actions/checkout@v3" + + - name: Get commit hash + id: get-commit-hash + run: echo "::set-output name=commit-hash::$(git rev-parse --short HEAD)" + + - name: Get timestamp + id: get-timestamp + run: echo "::set-output name=timestamp::$(date +'%Y-%m-%d-%H-%M')" + + - name: Cache Docker layers + id: cache-docker-layers + uses: actions/cache@v3 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-docker-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-docker- + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build Docker Image + id: build-image + run: | + docker build -f wavefront/server/docker/call_processing.Dockerfile -t rootflo:${{ steps.get-commit-hash.outputs.commit-hash }}-${{ steps.get-timestamp.outputs.timestamp }} . + echo "IMAGE_TAG=${{ steps.get-commit-hash.outputs.commit-hash }}-${{ steps.get-timestamp.outputs.timestamp }}" >> $GITHUB_ENV + + - id: "Auth-to-GCP" + uses: "google-github-actions/auth@v1" + with: + credentials_json: "${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}" + + - name: "Set up Cloud SDK" + uses: "google-github-actions/setup-gcloud@v1" + + - name: "Docker auth for GCP" + run: |- + gcloud auth configure-docker ${{ env.GCP_REGION }}-docker.pkg.dev --quiet + + - name: Tag and push image to GCP Artifact Registry + run: | + docker tag rootflo:${{ env.IMAGE_TAG }} ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + docker push ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + + - name: Cleanup Docker images + run: | + docker rmi rootflo:${{ env.IMAGE_TAG }} || true + docker rmi ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} || true diff --git a/.github/workflows/build-wavefront-floconsole-develop.yaml b/.github/workflows/build-wavefront-floconsole-develop.yaml new file mode 100644 index 00000000..15bef39f --- /dev/null +++ b/.github/workflows/build-wavefront-floconsole-develop.yaml @@ -0,0 +1,66 @@ +name: (Develop) Build and Push Wavefront Floconsole app to AWS and GCP + +on: + workflow_dispatch: + +env: + PROJECT_ID: aesy-330511 + GCP_REGION: asia-south1 + GAR_LOCATION: asia-south1-docker.pkg.dev/aesy-330511/root-hub + IMAGE_NAME: wavefront-floconsole + +jobs: + build-push-artifact: + runs-on: ubuntu-latest + + steps: + - name: "Checkout" + uses: "actions/checkout@v3" + + - name: Get commit hash + id: get-commit-hash + run: echo "::set-output name=commit-hash::$(git rev-parse --short HEAD)" + + - name: Get timestamp + id: get-timestamp + run: echo "::set-output name=timestamp::$(date +'%Y-%m-%d-%H-%M')" + + - name: Cache Docker layers + id: cache-docker-layers + uses: actions/cache@v3 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-docker-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-docker- + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build Docker Image + id: build-image + run: | + docker build -f wavefront/server/docker/floconsole.Dockerfile -t rootflo:${{ steps.get-commit-hash.outputs.commit-hash }}-${{ steps.get-timestamp.outputs.timestamp }} . + echo "IMAGE_TAG=${{ steps.get-commit-hash.outputs.commit-hash }}-${{ steps.get-timestamp.outputs.timestamp }}" >> $GITHUB_ENV + + - id: "Auth-to-GCP" + uses: "google-github-actions/auth@v1" + with: + credentials_json: "${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}" + + - name: "Set up Cloud SDK" + uses: "google-github-actions/setup-gcloud@v1" + + - name: "Docker auth for GCP" + run: |- + gcloud auth configure-docker ${{ env.GCP_REGION }}-docker.pkg.dev --quiet + + - name: Tag and push image to GCP Artifact Registry + run: | + docker tag rootflo:${{ env.IMAGE_TAG }} ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + docker push ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + + - name: Cleanup Docker images + run: | + docker rmi rootflo:${{ env.IMAGE_TAG }} || true + docker rmi ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} || true diff --git a/.github/workflows/build-wavefront-floware-develop.yaml b/.github/workflows/build-wavefront-floware-develop.yaml new file mode 100644 index 00000000..7527e75e --- /dev/null +++ b/.github/workflows/build-wavefront-floware-develop.yaml @@ -0,0 +1,66 @@ +name: (Develop) Build and Push Wavefront Floware app to AWS and GCP + +on: + workflow_dispatch: + +env: + PROJECT_ID: aesy-330511 + GCP_REGION: asia-south1 + GAR_LOCATION: asia-south1-docker.pkg.dev/aesy-330511/root-hub + IMAGE_NAME: wavefront-floware + +jobs: + build-push-artifact: + runs-on: ubuntu-latest + + steps: + - name: "Checkout" + uses: "actions/checkout@v3" + + - name: Get commit hash + id: get-commit-hash + run: echo "::set-output name=commit-hash::$(git rev-parse --short HEAD)" + + - name: Get timestamp + id: get-timestamp + run: echo "::set-output name=timestamp::$(date +'%Y-%m-%d-%H-%M')" + + - name: Cache Docker layers + id: cache-docker-layers + uses: actions/cache@v3 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-docker-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-docker- + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build Docker Image + id: build-image + run: | + docker build -f wavefront/server/docker/floware.Dockerfile -t rootflo:${{ steps.get-commit-hash.outputs.commit-hash }}-${{ steps.get-timestamp.outputs.timestamp }} . + echo "IMAGE_TAG=${{ steps.get-commit-hash.outputs.commit-hash }}-${{ steps.get-timestamp.outputs.timestamp }}" >> $GITHUB_ENV + + - id: "Auth-to-GCP" + uses: "google-github-actions/auth@v1" + with: + credentials_json: "${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}" + + - name: "Set up Cloud SDK" + uses: "google-github-actions/setup-gcloud@v1" + + - name: "Docker auth for GCP" + run: |- + gcloud auth configure-docker ${{ env.GCP_REGION }}-docker.pkg.dev --quiet + + - name: Tag and push image to GCP Artifact Registry + run: | + docker tag rootflo:${{ env.IMAGE_TAG }} ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + docker push ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + + - name: Cleanup Docker images + run: | + docker rmi rootflo:${{ env.IMAGE_TAG }} || true + docker rmi ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} || true diff --git a/.github/workflows/build-wavefront-inference_app-develop.yaml b/.github/workflows/build-wavefront-inference_app-develop.yaml new file mode 100644 index 00000000..09825946 --- /dev/null +++ b/.github/workflows/build-wavefront-inference_app-develop.yaml @@ -0,0 +1,66 @@ +name: (Develop) Build and Push Wavefront Inference app to AWS and GCP + +on: + workflow_dispatch: + +env: + PROJECT_ID: aesy-330511 + GCP_REGION: asia-south1 + GAR_LOCATION: asia-south1-docker.pkg.dev/aesy-330511/root-hub + IMAGE_NAME: wavefront-inference_app + +jobs: + build-push-artifact: + runs-on: ubuntu-latest + + steps: + - name: "Checkout" + uses: "actions/checkout@v3" + + - name: Get commit hash + id: get-commit-hash + run: echo "::set-output name=commit-hash::$(git rev-parse --short HEAD)" + + - name: Get timestamp + id: get-timestamp + run: echo "::set-output name=timestamp::$(date +'%Y-%m-%d-%H-%M')" + + - name: Cache Docker layers + id: cache-docker-layers + uses: actions/cache@v3 + with: + path: /tmp/.buildx-cache + key: ${{ runner.os }}-docker-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-docker- + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build Docker Image + id: build-image + run: | + docker build -f wavefront/server/docker/inference_app.Dockerfile -t rootflo:${{ steps.get-commit-hash.outputs.commit-hash }}-${{ steps.get-timestamp.outputs.timestamp }} . + echo "IMAGE_TAG=${{ steps.get-commit-hash.outputs.commit-hash }}-${{ steps.get-timestamp.outputs.timestamp }}" >> $GITHUB_ENV + + - id: "Auth-to-GCP" + uses: "google-github-actions/auth@v1" + with: + credentials_json: "${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}" + + - name: "Set up Cloud SDK" + uses: "google-github-actions/setup-gcloud@v1" + + - name: "Docker auth for GCP" + run: |- + gcloud auth configure-docker ${{ env.GCP_REGION }}-docker.pkg.dev --quiet + + - name: Tag and push image to GCP Artifact Registry + run: | + docker tag rootflo:${{ env.IMAGE_TAG }} ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + docker push ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} + + - name: Cleanup Docker images + run: | + docker rmi rootflo:${{ env.IMAGE_TAG }} || true + docker rmi ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} || true diff --git a/.github/workflows/build-wavefront-web-develop.yaml b/.github/workflows/build-wavefront-web-develop.yaml index 270758e3..29e0c56f 100644 --- a/.github/workflows/build-wavefront-web-develop.yaml +++ b/.github/workflows/build-wavefront-web-develop.yaml @@ -2,9 +2,6 @@ name: (Develop) Build and Push Wavefront Web app to AWS and GCP on: workflow_dispatch: - push: - branches: - - wavefront env: PROJECT_ID: aesy-330511 diff --git a/.vscode/launch.json b/.vscode/launch.json new file mode 100644 index 00000000..9e26dfee --- /dev/null +++ b/.vscode/launch.json @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/docker-compose.sample.yml b/docker-compose.sample.yml new file mode 100644 index 00000000..98beea6c --- /dev/null +++ b/docker-compose.sample.yml @@ -0,0 +1,437 @@ +version: '3.8' + +services: + # PostgreSQL for Floware with pgvector extension + postgres-floware: + image: ankane/pgvector + container_name: postgres-floware + restart: unless-stopped + environment: + - POSTGRES_USER= + - POSTGRES_PASSWORD= + - POSTGRES_DB= + ports: + - '5432:5432' + volumes: + - postgres-floware-data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U -d "] + interval: 10s + timeout: 5s + retries: 5 + networks: + - floware-network + + # PostgreSQL for Console + postgres-console: + image: postgres:16 + container_name: postgres-console + restart: unless-stopped + environment: + - POSTGRES_USER= + - POSTGRES_PASSWORD= + - POSTGRES_DB= + ports: + - '5433:5432' + volumes: + - postgres-console-data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U -d "] + interval: 10s + timeout: 5s + retries: 5 + networks: + - floware-network + + # Redis for Floware + redis-floware: + image: redis:7.4 + container_name: redis-floware + restart: unless-stopped + ports: + - '6379:6379' + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 5s + timeout: 3s + retries: 5 + networks: + - floware-network + + # Redis for Call Processing + redis-call-processing: + image: redis:7.4 + container_name: redis-call-processing + restart: unless-stopped + ports: + - '6380:6379' + healthcheck: + test: ["CMD", "redis-cli", "ping"] + interval: 5s + timeout: 3s + retries: 5 + networks: + - floware-network + + # Wavefront - AI middleware web + wavefront: + build: + context: . + dockerfile: ./wavefront/client/.Dockerfile + container_name: wavefront + restart: unless-stopped + ports: + - '3000:3000' + environment: + - APP_ENV=local + - BASE_URL=http://floconsole:8002/floconsole + - FEATURE_API_SERVICES=true + networks: + - floware-network + + # Floware - Core AI middleware + floware: + build: + context: . + dockerfile: ./wavefront/server/docker/floware.Dockerfile + container_name: floware + restart: unless-stopped + ports: + - '8001:8001' + + # ============================================ + # Volume Mounts for Credential Files + # Uncomment the volumes you need and update the host path (left side of :) + # ============================================ + volumes: + # GCP Service Account JSON (for cloud storage, KMS, Pub/Sub) + # Required if CLOUD_PROVIDER=gcp + - ./credentials/gcp-service-account.json:/app/credentials/gcp-service-account.json:ro + + # Google OAuth Client Secret (for Google OAuth integration) + # Required if using Google OAuth for Gmail/Drive + # - ./credentials/google-oauth-client-secret.json:/app/credentials/google-oauth-client-secret.json:ro + + # Gmail Service Account JSON (for Gmail email sending) + # Required if EMAIL_PROVIDER=gmail with service account + # - ./credentials/gmail-service-account.json:/app/credentials/gmail-service-account.json:ro + + environment: + # ============================================ + # Database Configuration + # ============================================ + - DB_USERNAME= + - DB_PASSWORD= + - DB_HOST=postgres-floware + - DB_PORT=5432 + - DB_NAME= + + # ============================================ + # Redis Configuration + # ============================================ + - REDIS_PROTOCOL=redis + - REDIS_HOST=redis-floware + - REDIS_PORT=6379 + + # ============================================ + # Application Settings + # ============================================ + - APP_ENV= + - APP_NAME=floware + - ALLOWED_ORIGINS= + - PASSTHROUGH_SECRET= + + # ============================================ + # JWT Configuration + # Generate keys using: ./scripts/generate-keys.sh + # ============================================ + - TOKEN_EXPIRY= + - TEMPORARY_TOKEN_EXPIRY= + - ENABLE_CLOUD_KMS= + - CONSOLE_TOKEN_PREFIX= + - FLOWARE_JWT_ISSUER= + - FLOWARE_JWT_AUDIENCE= + - FLOWARE_JWT_VALIDATION_ISSUER= + - PRIVATE_KEY= + - PUBLIC_KEY= + + # ============================================ + # Initial User configuration + # ============================================ + - EMAIL= + - PASSWORD= + - FIRST_NAME= + - LAST_NAME= + + # ============================================ + # Cloud Provider Configuration + # Choose: aws or gcp + # ============================================ + - CLOUD_PROVIDER= + + # AWS Configuration (if CLOUD_PROVIDER=aws) + - AWS_ACCESS_KEY_ID= + - AWS_SECRET_ACCESS_KEY= + - AWS_REGION= + - AWS_KMS_ARN= + - AWS_QUEUE_URL= + - TRANSCRIPT_BUCKET_NAME= + - AUDIO_BUCKET_NAME= + - AWS_GOLD_ASSET_BUCKET_NAME= + - MODEL_STORAGE_BUCKET= + - AGENT_YAML_BUCKET= + - VOICE_AGENT_BUCKET= + - IMAGE_SEARCH_REFERENCE_IMAGES_BUCKET= + - APPLICATION_BUCKET= + + # GCP Configuration (if CLOUD_PROVIDER=gcp) + - GCP_PROJECT_ID= + - GCP_LOCATION= + - GOOGLE_APPLICATION_CREDENTIALS=/app/credentials/gcp-service-account.json + - GCP_ASSET_STORAGE_BUCKET= + - GCP_KMS_KEY_RING= + - GCP_KMS_CRYPTO_KEY= + - GCP_KMS_CRYPTO_KEY_VERSION= + - GCP_GOLD_TOPIC_ID= + - GCP_EMAIL_TOPIC_ID= + - WORKFLOW_WORKER_TOPIC= + + # ============================================ + # LLM/AI Configuration + # ============================================ + - OPENAI_API_KEY= + - OPENAI_MODEL_NAME= + + # Other AI APIs + - GOOGLE_API_KEY= + + # ============================================ + # External Services + # ============================================ + - INFERENCE_SERVICE_URL= + - EMBEDDING_SERVICE_URL= + - CALL_PROCESSING_BASE_URL= + - HERMES_URL= + + # ============================================ + # OAuth Configuration (Optional) + # ============================================ + # Azure OAuth + - AZURE_CLIENT_ID= + - AZURE_TENANT_ID= + - AZURE_CLIENT_SECRET= + - AZURE_SCOPES= + - AZURE_REDIRECT_URI= + + # ============================================ + # Email Configuration (Optional) + # ============================================ + - EMAIL_PROVIDER= + + # Gmail + - GMAIL_SERVICE_ACCOUNT_FILE=/app/credentials/gmail-service-account.json + - GMAIL_SENDER_EMAILID= + - GMAIL_DELEGATE_USER= + + # Outlook + - OUTLOOK_CLIENT_ID= + - OUTLOOK_CLIENT_SECRET= + - OUTLOOK_TENANT_ID= + - OUTLOOK_SENDER_EMAILID= + + # ============================================ + # Analytics (Optional) + # ============================================ + - SUPERSET_URL= + - SUPERSET_USERNAME= + - SUPERSET_PASSWORD= + - SUPERSET_FLAG= + - BQ_PROJECT_ID= + - BQ_DATASET_ID= + + # Redshift + - REDSHIFT_DB= + - REDSHIFT_USERNAME= + - REDSHIFT_PASSWORD= + - REDSHIFT_HOST= + - REDSHIFT_PORT= + + # ============================================ + # Security Settings + # ============================================ + - MAX_FAILED_ATTEMPTS= + - LOCKOUT_DURATION_HOURS= + - INACTIVE_DAYS_THRESHOLD= + + # ============================================ + # Feature Flags (true/false) + # ============================================ + - AZURE_FLAG= + - AZURE_OPENAI_FLAG= + - CELERY_FLAG= + - EMAIL_SYNC_FLAG= + - GOOGLE_FLAG= + - INACTIVE_ACCOUNT_DISABLE_FLAG= + - SAML_FLAG= + - SLACK_FLAG= + - SUPERSET_FLAG= + - VECTOR_DB_FLAG= + + depends_on: + postgres-floware: + condition: service_healthy + redis-floware: + condition: service_healthy + networks: + - floware-network + + # FloConsole - Management console + floconsole: + build: + context: . + dockerfile: ./wavefront/server/docker/floconsole.Dockerfile + container_name: floconsole + restart: unless-stopped + ports: + - '8002:8002' + environment: + - CONSOLE_DB_HOST=postgres-console + - CONSOLE_DB_PORT=5432 + - CONSOLE_DB_USERNAME= + - CONSOLE_DB_PASSWORD= + - CONSOLE_DB_NAME= + # ============================================ + # Application Configuration + # ============================================ + - ALLOWED_ORIGINS=http://wavefront:3000 + - CONSOLE_EMAIL= + - CONSOLE_PASSWORD= + - CONSOLE_FIRST_NAME= + - CONSOLE_LAST_NAME= + - CONSOLE_JWT_ISSUER= + - CONSOLE_JWT_AUDIENCE= + - CONSOLE_TOKEN_PREFIX= + - SUPER_ADMIN_EMAIL= + - TOKEN_EXPIRY= + - TEMPORARY_TOKEN_EXPIRY= + - PRIVATE_KEY= + - PUBLIC_KEY= + - APP_ENV= + - ENABLE_CLOUD_KMS= + - PASSTHROUGH_SECRET= + # =============================================== + # Default App + # =============================================== + - DEFAULT_APP_NAME=floware-dev + - DEFAULT_APP_PUBLIC_URL=http://floware:8001 + - DEFAULT_APP_PRIVATE_URL=http://floware:8001 + depends_on: + postgres-console: + condition: service_healthy + floware: + condition: service_started + networks: + - floware-network + + # Call Processing - Voice call processing with Pipecat + call_processing: + build: + context: . + dockerfile: ./wavefront/server/docker/call_processing.Dockerfile + container_name: call_processing + restart: unless-stopped + ports: + - '8004:8004' + environment: + # ============================================ + # Redis Configuration + # ============================================ + - REDIS_HOST=redis-call-processing + - REDIS_PORT=6379 + - REDIS_DB=0 + + # ============================================ + # Application Settings + # ============================================ + - APP_ENV= + - APP_NAME=call_processing + - APP_NAME_FLOWARE=floware + + # ============================================ + # Floware Integration + # ============================================ + - FLOWARE_BASE_URL= + - PASSTHROUGH_SECRET= + + # ============================================ + # JWT Configuration + # ============================================ + - TOKEN_EXPIRY= + - TEMPORARY_TOKEN_EXPIRY= + - CALL_PROCESSING_TOKEN_PREFIX= + - CALL_PROCESSING_JWT_ISSUER= + + depends_on: + redis-call-processing: + condition: service_healthy + floware: + condition: service_started + networks: + - floware-network + + # Inference App - ML inference service + # Uncomment to enable inference service + inference_app: + build: + context: . + dockerfile: ./wavefront/server/docker/inference_app.Dockerfile + container_name: inference_app + restart: unless-stopped + ports: + - '8003:8003' + # + # ============================================ + # Volume Mounts for Credential Files + # Uncomment if using GCP for model storage + # ============================================ + volumes: + # GCP Service Account JSON (for model storage access) + # Required if CLOUD_PROVIDER=gcp + - ./credentials/gcp-service-account.json:/app/credentials/gcp-service-account.json:ro + + environment: + # ============================================ + # Application Settings + # ============================================ + - APP_ENV= + + # ============================================ + # Cloud Provider Configuration + # Choose: aws or gcp + # ============================================ + - CLOUD_PROVIDER= + + # AWS Configuration (if CLOUD_PROVIDER=aws) + - AWS_ACCESS_KEY_ID= + - AWS_SECRET_ACCESS_KEY= + - AWS_REGION= + - MODEL_STORAGE_BUCKET= + + # GCP Configuration (if CLOUD_PROVIDER=gcp) + - GCP_PROJECT_ID= + - GCP_LOCATION= + - GOOGLE_APPLICATION_CREDENTIALS=/app/credentials/gcp-service-account.json + - MODEL_STORAGE_BUCKET= + + networks: + - floware-network + +volumes: + postgres-floware-data: + driver: local + postgres-console-data: + driver: local + +networks: + floware-network: + driver: bridge diff --git a/docker/floware.Dockerfile b/docker/floware.Dockerfile deleted file mode 100644 index 016e8be8..00000000 --- a/docker/floware.Dockerfile +++ /dev/null @@ -1,45 +0,0 @@ -FROM python:3.11-slim - -WORKDIR /app - -COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /bin/ - -RUN apt-get update && apt-get install -y \ - libpq-dev \ - gcc \ - libgl1 \ - libglib2.0-0 \ - && rm -rf /var/lib/apt/lists/* - -COPY pyproject.toml uv.lock ./ - -COPY modules/auth_module /app/modules/auth_module -COPY modules/common_module /app/modules/common_module -COPY modules/db_repo_module /app/modules/db_repo_module -COPY modules/gold_module /app/modules/gold_module -COPY modules/insights_module /app/modules/insights_module -COPY modules/knowledge_base_module /app/modules/knowledge_base_module -COPY modules/user_management_module /app/modules/user_management_module -COPY modules/llm_inference_config_module /app/modules/llm_inference_config_module -COPY modules/agents_module /app/modules/agents_module -COPY modules/plugins_module/ /app/modules/plugins_module -COPY modules/product_analysis_module /app/modules/product_analysis_module -COPY modules/inference_module /app/modules/inference_module -COPY modules/image_search_module /app/modules/image_search_module -COPY modules/tools_module /app/modules/tools_module -COPY modules/voice_agents_module /app/modules/voice_agents_module -COPY modules/api_services_module /app/modules/api_services_module - -COPY packages/flo_cloud /app/packages/flo_cloud -COPY packages/flo_utils /app/packages/flo_utils - -COPY plugins/datasource /app/plugins/datasource -COPY plugins/authenticator /app/plugins/authenticator - -COPY apps/floware /app/apps/floware - -RUN uv sync --package floware --frozen --no-dev - -WORKDIR /app/apps/floware/floware - -CMD ["uv", "run", "server.py"] diff --git a/wavefront/docker_setup.md b/wavefront/docker_setup.md new file mode 100644 index 00000000..a8aab192 --- /dev/null +++ b/wavefront/docker_setup.md @@ -0,0 +1,571 @@ +# Docker Setup Guide + +This guide explains how to configure and run the RootFlo AI platform using Docker Compose. + +## Quick Start + +1. **Generate JWT Keys** + ```bash + cd /path/to/wavefront/server + ./scripts/generate-keys.sh + ``` + Copy the output `PRIVATE_KEY` and `PUBLIC_KEY` values. + +2. **Configure Environment Variables** + + Edit `docker-compose.yml` and replace all `` placeholders with your actual values. + +3. **Start Services** + ```bash + docker-compose up -d + ``` + +4. **Check Logs** + ```bash + docker-compose logs -f floware + ``` + +## Setting Up Credential Files + +Some services require credential files (JSON files for GCP, OAuth, etc.). Follow these steps: + +1. **Create a credentials directory** (recommended): + ```bash + mkdir -p /path/to/wavefront/server/credentials + ``` + +2. **Copy your credential files**: + ```bash + # Example for GCP + cp ~/Downloads/service-account.json ./credentials/gcp-service-account.json + + # Example for Google OAuth + cp ~/Downloads/client_secret.json ./credentials/google-oauth-client-secret.json + + # Example for Gmail Service Account + cp ~/Downloads/gmail-service-account.json ./credentials/gmail-service-account.json + ``` + +3. **Uncomment volume mounts in docker-compose.sample.yml**: + - Find the `volumes:` section under the `floware` service + - Uncomment the lines for the credentials you're using + - Update the left side of the `:` with your actual file path + +4. **Security**: Add `credentials/` to `.gitignore` to avoid committing secrets + ```bash + echo "credentials/" >> .gitignore + ``` + +### Which Credentials Do I Need? + +- **GCP Service Account** (`GOOGLE_APPLICATION_CREDENTIALS`): + - Required if: `CLOUD_PROVIDER=gcp` + - Used for: Cloud Storage, KMS, Pub/Sub + - Environment variable: `GOOGLE_APPLICATION_CREDENTIALS=/app/credentials/gcp-service-account.json` + +- **Gmail Service Account** (`GMAIL_SERVICE_ACCOUNT_FILE`): + - Required if: `EMAIL_PROVIDER=gmail` with service account auth + - Used for: Sending emails via Gmail API + - Environment variable: `GMAIL_SERVICE_ACCOUNT_FILE=/app/credentials/gmail-service-account.json` + +**Note**: The container paths (right side of `:` in volume mounts) are fixed at `/app/credentials/`. Only change the host paths (left side) to match where you store your credential files. + +## Services Overview + +| Service | Port | Description | +|---------|------|-------------| +| **floware** | 8001 | Core AI middleware platform | +| **floconsole** | 8002 | Management console | +| **inference_app** | 8003 | Inference App | +| **call_processing** | 8004 | Voice call processing (Pipecat) | +| **postgres-floware** | 5432 | Floware database (pgvector) | +| **postgres-console** | 5433 | Console database | +| **redis-floware** | 6379 | Floware cache | +| **redis-call-processing** | 6380 | Call processing cache | + +## Environment Variables Reference + +### Floware Service + +#### Required Variables + +**Database**: +- `DB_USERNAME`: PostgreSQL username (default: `postgres`) +- `DB_PASSWORD`: PostgreSQL password (default: `postgres`) +- `DB_HOST`: Database host (default: `postgres-floware`) +- `DB_PORT`: Database port (default: `5432`) +- `DB_NAME`: Database name (default: `floware`) + +**Redis**: +- `REDIS_PROTOCOL`: Protocol (default: `redis`) +- `REDIS_HOST`: Redis host (default: `redis-floware`) +- `REDIS_PORT`: Redis port (default: `6379`) + +**Application Settings**: +- `APP_ENV`: Application environment (e.g., `dev`, `staging`, `production`) +- `APP_NAME`: Application name (default: `floware`) +- `ALLOWED_ORIGINS`: Comma-separated list of allowed CORS origins +- `PASSTHROUGH_SECRET`: Secret for service-to-service authentication (**IMPORTANT**: Must be the same in floware, floconsole, and call_processing services) + +**JWT Authentication**: +- `PRIVATE_KEY`: Base64-encoded RSA private key (generate using `./scripts/generate-keys.sh`) +- `PUBLIC_KEY`: Base64-encoded RSA public key (generate using `./scripts/generate-keys.sh`) +- `TOKEN_EXPIRY`: Token expiration in seconds (default: `3600`) +- `TEMPORARY_TOKEN_EXPIRY`: Temporary token expiration (default: `600`) +- `ENABLE_CLOUD_KMS`: Enable cloud KMS for key management (`true` or `false`) +- `CONSOLE_TOKEN_PREFIX`: Token prefix for console tokens (default: `fc_`) +- `FLOWARE_JWT_ISSUER`: JWT issuer URL for floware +- `FLOWARE_JWT_AUDIENCE`: JWT audience URL for floware +- `FLOWARE_JWT_VALIDATION_ISSUER`: Comma-separated list of valid issuers + +**Initial User configuration**: +- `EMAIL`: User Email +- `PASSWORD`: User Password +- `FIRST_NAME`: User first name +- `LAST_NAME`: User last name + +**Cloud Provider** (Choose one: `aws` or `gcp`): +- `CLOUD_PROVIDER`: Set to `aws` or `gcp` + +#### AWS Configuration (if CLOUD_PROVIDER=aws) + +```yaml +AWS_ACCESS_KEY_ID: Your AWS access key +AWS_SECRET_ACCESS_KEY: Your AWS secret key +AWS_REGION: AWS region (e.g., ap-south-1) +AWS_KMS_ARN: KMS key ARN for encryption +AWS_QUEUE_URL: SQS queue URL + +# S3 Buckets +TRANSCRIPT_BUCKET_NAME: Bucket for audio transcripts +AUDIO_BUCKET_NAME: Bucket for audio files +AWS_GOLD_ASSET_BUCKET_NAME: Bucket for gold/insights assets +MODEL_STORAGE_BUCKET: Bucket for ML models +AGENT_YAML_BUCKET: Bucket for agent YAML configs +VOICE_AGENT_BUCKET: Bucket for voice agent configs +IMAGE_SEARCH_REFERENCE_IMAGES_BUCKET: Bucket for reference images +APPLICATION_BUCKET: Bucket for API service applications +``` + +#### GCP Configuration (if CLOUD_PROVIDER=gcp) + +```yaml +GCP_PROJECT_ID: Your GCP project ID +GCP_LOCATION: GCP region (e.g., asia-south1) +GOOGLE_APPLICATION_CREDENTIALS: Path to service account JSON file +GCP_KMS_KEY_RING: KMS key ring name +GCP_KMS_CRYPTO_KEY: KMS crypto key name +GCP_KMS_CRYPTO_KEY_VERSION: KMS key version (usually 1) + +# GCS Buckets +GCP_ASSET_STORAGE_BUCKET: Bucket for assets +GCP_GOLD_TOPIC_ID: Gold/insights Pub/Sub topic +GCP_EMAIL_TOPIC_ID: Email processing Pub/Sub topic +WORKFLOW_WORKER_TOPIC: Workflow Pub/Sub topic +``` + +#### LLM/AI Configuration + +**OpenAI**: +- `OPENAI_API_KEY`: Your OpenAI API key +- `OPENAI_MODEL_NAME`: Model to use (default: `gpt-4o`) + +**Other APIs**: +- `GOOGLE_API_KEY`: Google API key + +#### Optional Configurations + +**External Services**: +- `INFERENCE_SERVICE_URL`: URL for the inference service (default: `http://inference_app:8003`) +- `EMBEDDING_SERVICE_URL`: URL for the embedding service +- `CALL_PROCESSING_BASE_URL`: URL for call processing service (default: `http://call_processing:8004`) +- `HERMES_URL`: URL for Hermes service + +**OAuth Integration**: +- Azure: `AZURE_CLIENT_ID`, `AZURE_TENANT_ID`, `AZURE_CLIENT_SECRET`, `AZURE_SCOPES`, `AZURE_REDIRECT_URI` + +**Email Integration**: +- `EMAIL_PROVIDER`: `gmail` or `outlook` +- Gmail: `GMAIL_SERVICE_ACCOUNT_FILE`, `GMAIL_SENDER_EMAILID`, `GMAIL_DELEGATE_USER` +- Outlook: `OUTLOOK_CLIENT_ID`, `OUTLOOK_CLIENT_SECRET`, `OUTLOOK_TENANT_ID`, `OUTLOOK_SENDER_EMAILID` + +**Analytics**: +- Superset: `SUPERSET_URL`, `SUPERSET_USERNAME`, `SUPERSET_PASSWORD` +- BigQuery: `BQ_PROJECT_ID`, `BQ_DATASET_ID` +- Redshift: `REDSHIFT_DB`, `REDSHIFT_USERNAME`, `REDSHIFT_PASSWORD`, `REDSHIFT_HOST`, `REDSHIFT_PORT` + +**Security Settings**: +- `MAX_FAILED_ATTEMPTS`: Maximum failed login attempts before lockout (default: `3`) +- `LOCKOUT_DURATION_HOURS`: Hours to lock account after max failed attempts (default: `24`) +- `INACTIVE_DAYS_THRESHOLD`: Days of inactivity before account is disabled (default: `60`) + +**Feature Flags** (set to `true` or `false`): +- `AZURE_FLAG`: Enable Azure integration +- `AZURE_OPENAI_FLAG`: Enable Azure OpenAI +- `CELERY_FLAG`: Enable Celery for async tasks +- `EMAIL_SYNC_FLAG`: Enable email synchronization +- `GOOGLE_FLAG`: Enable Google integration +- `INACTIVE_ACCOUNT_DISABLE_FLAG`: Enable automatic account disabling for inactive users +- `SAML_FLAG`: Enable SAML authentication +- `SLACK_FLAG`: Enable Slack integration +- `SUPERSET_FLAG`: Enable Superset analytics +- `VECTOR_DB_FLAG`: Enable vector database features + +### FloConsole Service + +#### Required Variables + +- `ALLOWED_ORIGINS`: Allowed origins (http://wavefront:3000) +- `CONSOLE_DB_HOST`: Database host (default: `postgres-console`) +- `CONSOLE_DB_PORT`: Database port (default: `5432`) +- `CONSOLE_DB_USERNAME`: Database username (default: `postgres`) +- `CONSOLE_DB_PASSWORD`: Database password (default: `postgres`) +- `CONSOLE_DB_NAME`: Database name (default: `console`) +- `CONSOLE_EMAIL`: Admin email address +- `CONSOLE_PASSWORD`: Admin password +- `CONSOLE_FIRST_NAME`: Admin first name +- `CONSOLE_LAST_NAME`: Admin last name +- `CONSOLE_JWT_ISSUER`: JWT issuer URL for console +- `CONSOLE_JWT_AUDIENCE`: JWT audience URL for console +- `CONSOLE_TOKEN_PREFIX`: Token prefix for console tokens (default: `fc_`) +- `SUPER_ADMIN_EMAIL`: Super admin email (usually same as `CONSOLE_EMAIL`) +- `TOKEN_EXPIRY`: Token expiration in seconds (default: `3600`) +- `TEMPORARY_TOKEN_EXPIRY`: Temporary token expiration (default: `600`) +- `PRIVATE_KEY`: Base64-encoded RSA private key (can be different from floware) +- `PUBLIC_KEY`: Base64-encoded RSA public key (can be different from floware) +- `APP_ENV`: Application environment +- `ENABLE_CLOUD_KMS`: Enable cloud KMS for key management (`true` or `false`) +- `PASSTHROUGH_SECRET`: Secret for service-to-service authentication (**IMPORTANT**: Must be the same as floware and call_processing) +- `DEFAULT_APP_NAME`: Name for the default app created automatically (e.g., `floware-dev`) +- `DEFAULT_APP_PUBLIC_URL`: Public URL for the default app (e.g., `http://floware:8001`) +- `DEFAULT_APP_PRIVATE_URL`: Private URL for the default app (e.g., `http://floware:8001`) + +### Call Processing Service + +#### Required Variables + +- `REDIS_HOST`: Redis host (default: `redis-call-processing`) +- `REDIS_PORT`: Redis port (default: `6379`) +- `REDIS_DB`: Redis database number (default: `0`) +- `APP_ENV`: Application environment +- `APP_NAME`: Application name (default: `call_processing`) +- `APP_NAME_FLOWARE`: Floware app name reference (default: `floware`) +- `FLOWARE_BASE_URL`: Floware URL (default: `http://floware:8001`) +- `PASSTHROUGH_SECRET`: Secret for service-to-service authentication +- `TOKEN_EXPIRY`: Token expiration in seconds (default: `3600`) +- `TEMPORARY_TOKEN_EXPIRY`: Temporary token expiration (default: `600`) +- `CALL_PROCESSING_TOKEN_PREFIX`: Token prefix (default: `fc_`) +- `CALL_PROCESSING_JWT_ISSUER`: JWT issuer URL for call processing + +### Inference App Service (Optional) + +Uncomment the `inference_app` service in `docker-compose.yml` to enable. + +#### Required Variables + +- `APP_ENV`: Application environment +- `CLOUD_PROVIDER`: `aws` or `gcp` +- `MODEL_STORAGE_BUCKET`: Bucket for ML models + +**If AWS**: +- `AWS_ACCESS_KEY_ID`: AWS access key +- `AWS_SECRET_ACCESS_KEY`: AWS secret key +- `AWS_REGION`: AWS region (e.g., `ap-south-1`) + +**If GCP**: +- `GCP_PROJECT_ID`: GCP project ID +- `GOOGLE_APPLICATION_CREDENTIALS`: Path to service account JSON file (default: `/app/credentials/gcp-service-account.json`) + +## Cloud Provider Setup + +### AWS Setup + +1. **Create IAM User**: + ```bash + aws iam create-user --user-name rootflo-backend + ``` + +2. **Attach Policies**: + ```json + { + "Version": "2012-10-17", + "Statement": [ + { + "Effect": "Allow", + "Action": [ + "s3:GetObject", + "s3:PutObject", + "s3:DeleteObject", + "s3:ListBucket" + ], + "Resource": [ + "arn:aws:s3:::your-bucket-name", + "arn:aws:s3:::your-bucket-name/*" + ] + }, + { + "Effect": "Allow", + "Action": [ + "sqs:SendMessage", + "sqs:ReceiveMessage", + "sqs:DeleteMessage" + ], + "Resource": "arn:aws:sqs:*:*:your-queue" + }, + { + "Effect": "Allow", + "Action": [ + "kms:Decrypt", + "kms:Encrypt", + "kms:GenerateDataKey" + ], + "Resource": "arn:aws:kms:*:*:key/*" + } + ] + } + ``` + +3. **Create Buckets**: + ```bash + aws s3 mb s3://your-transcript-bucket --region ap-south-1 + aws s3 mb s3://your-audio-bucket --region ap-south-1 + aws s3 mb s3://your-agent-yaml-bucket --region ap-south-1 + # ... create other buckets as needed + ``` + +### GCP Setup + +1. **Create Service Account**: + ```bash + gcloud iam service-accounts create rootflo-backend \ + --display-name="RootFlo Backend Service Account" + ``` + +2. **Grant Roles**: + ```bash + gcloud projects add-iam-policy-binding YOUR_PROJECT_ID \ + --member="serviceAccount:rootflo-backend@YOUR_PROJECT_ID.iam.gserviceaccount.com" \ + --role="roles/storage.objectAdmin" + + gcloud projects add-iam-policy-binding YOUR_PROJECT_ID \ + --member="serviceAccount:rootflo-backend@YOUR_PROJECT_ID.iam.gserviceaccount.com" \ + --role="roles/pubsub.publisher" + + gcloud projects add-iam-policy-binding YOUR_PROJECT_ID \ + --member="serviceAccount:rootflo-backend@YOUR_PROJECT_ID.iam.gserviceaccount.com" \ + --role="roles/cloudkms.cryptoKeyEncrypterDecrypter" + ``` + +3. **Create Service Account Key**: + ```bash + gcloud iam service-accounts keys create service-account.json \ + --iam-account=rootflo-backend@YOUR_PROJECT_ID.iam.gserviceaccount.com + ``` + +4. **Create Buckets**: + ```bash + gsutil mb -l asia-south1 gs://your-gcp-assets-bucket + gsutil mb -l asia-south1 gs://your-gcp-storage-bucket + # ... create other buckets as needed + ``` + +5. **Create Pub/Sub Topics**: + ```bash + gcloud pubsub topics create your-gold-topic + gcloud pubsub topics create your-email-topic + gcloud pubsub topics create your-workflow-topic + ``` + +### LocalStack (Development Alternative) + +For local development without cloud dependencies: + +1. **Add LocalStack to docker-compose.yml**: + ```yaml + localstack: + image: localstack/localstack:latest + ports: + - "4566:4566" + environment: + - SERVICES=s3,sqs,kms + - DEBUG=1 + networks: + - floware-network + ``` + +2. **Configure Environment Variables**: + ```yaml + AWS_ACCESS_KEY_ID: test + AWS_SECRET_ACCESS_KEY: test + AWS_ENDPOINT_URL: http://localstack:4566 + ``` + +## Common Operations + +### Start All Services +```bash +docker-compose up -d +``` + +### View Logs +```bash +# All services +docker-compose logs -f + +# Specific service +docker-compose logs -f floware +``` + +### Restart Service +```bash +docker-compose restart floware +``` + +### Stop All Services +```bash +docker-compose down +``` + +### Stop and Remove Volumes (Reset Everything) +```bash +docker-compose down -v +``` + +### Rebuild Service +```bash +docker-compose build floware +docker-compose up -d floware +``` + +### Run Database Migrations +Migrations run automatically on service startup. To run manually: +```bash +docker-compose exec floware uv run alembic upgrade head +``` + +### Access Database +```bash +# Floware database +docker-compose exec postgres-floware psql -U postgres -d floware + +# Console database +docker-compose exec postgres-console psql -U postgres -d console +``` + +### Access Redis +```bash +# Floware Redis +docker-compose exec redis-floware redis-cli + +# Call Processing Redis +docker-compose exec redis-call-processing redis-cli +``` + +## Troubleshooting + +### Service Won't Start + +1. **Check logs**: + ```bash + docker-compose logs floware + ``` + +2. **Check environment variables**: + ```bash + docker-compose config + ``` + +3. **Verify dependencies**: + ```bash + docker-compose ps + ``` + +### Database Connection Issues + +- Ensure database service is healthy: `docker-compose ps postgres-floware` +- Check database logs: `docker-compose logs postgres-floware` +- Verify credentials in docker-compose.yml match + +### Redis Connection Issues + +- Ensure Redis is running: `docker-compose ps redis-floware` +- Test connection: `docker-compose exec redis-floware redis-cli ping` + +### Port Conflicts + +If ports are already in use, modify the port mappings in docker-compose.yml: +```yaml +ports: + - '8002:8001' # Map to different external port +``` + +### Out of Memory + +Increase Docker memory limit in Docker Desktop settings (recommended: 8GB minimum). + +## Security Best Practices + +1. **Never commit docker-compose.yml with real credentials** +2. **Use strong passwords** for database and admin accounts +3. **Rotate JWT keys** regularly +4. **Use environment-specific configurations** (don't use dev credentials in production) +5. **Enable KMS encryption** for production deployments (`ENABLE_CLOUD_KMS=true`) +6. **Use HTTPS** for production deployments (add reverse proxy like Nginx) +7. **Restrict network access** using Docker networks and firewall rules + +## Initial Setup + +After starting all services, you need to configure floconsole to connect to floware: + +### Option 1: Automatic App Creation (Recommended) + +Configure the default app using environment variables in docker-compose.yml: + +```yaml +DEFAULT_APP_NAME: floware-dev +DEFAULT_APP_PUBLIC_URL: http://floware:8001 +DEFAULT_APP_PRIVATE_URL: http://floware:8001 +``` + +When the FloConsole service starts, it will automatically create this app with `success` status if these environment variables are set. + +**Why `http://floware:8001` and not `http://localhost:8001`?** +- Inside Docker containers, services communicate using Docker service names +- `localhost` inside a container refers to the container itself, not other containers +- Using `http://floware:8001` allows floconsole to properly proxy requests to the floware service + +### Option 2: Manual App Creation + +If you prefer to create the app manually or need additional apps: + +1. **Access FloConsole**: Navigate to `http://localhost:8002` + +2. **Login** with the credentials configured in docker-compose.yml: + - Email: Value of `CONSOLE_EMAIL` + - Password: Value of `CONSOLE_PASSWORD` + +3. **Create New App**: + - **Deployment Type**: Select `Manual` + - **App Name**: Any name you prefer (e.g., `floware-dev`) + - **Public URL**: `http://floware:8001` (**IMPORTANT**: Use Docker service name, not `localhost`) + - **Private URL**: `http://floware:8001` + +## Production Deployment Notes + +This docker-compose setup is designed for **local development only**. For production: + +1. Use Kubernetes or Docker Swarm for orchestration +2. Implement proper secrets management (Vault, AWS Secrets Manager, etc.) +3. Set up monitoring and logging (Prometheus, Grafana, ELK stack) +4. Configure auto-scaling based on load +5. Use managed databases (RDS, Cloud SQL) instead of containerized databases +6. Implement backup and disaster recovery procedures +7. Use CDN for static assets +8. Set up CI/CD pipelines for automated deployments + +## Support + +For issues or questions: +- Check logs: `docker-compose logs -f` +- Review this documentation +- Check service health: `docker-compose ps` diff --git a/wavefront/server/.python-version b/wavefront/server/.python-version new file mode 100644 index 00000000..902b2c90 --- /dev/null +++ b/wavefront/server/.python-version @@ -0,0 +1 @@ +3.11 \ No newline at end of file diff --git a/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_12_13_1400-a1b2c3d4e5f6_create_default_app.py b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_12_13_1400-a1b2c3d4e5f6_create_default_app.py new file mode 100644 index 00000000..e6d37633 --- /dev/null +++ b/wavefront/server/apps/floconsole/floconsole/db/alembic/versions/2025_12_13_1400-a1b2c3d4e5f6_create_default_app.py @@ -0,0 +1,68 @@ +"""create default app + +Revision ID: a1b2c3d4e5f6 +Revises: 85a63aed0f81 +Create Date: 2025-12-13 14:00:00.000000 + +""" + +import os +import uuid +from datetime import datetime +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = 'a1b2c3d4e5f6' +down_revision: Union[str, None] = '85a63aed0f81' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Get environment variables for default app + default_app_name = os.getenv('DEFAULT_APP_NAME') + default_app_public_url = os.getenv('DEFAULT_APP_PUBLIC_URL') + default_app_private_url = os.getenv('DEFAULT_APP_PRIVATE_URL') + + # Only create default app if all required env variables are set + if default_app_name and default_app_public_url and default_app_private_url: + # Check if a default app already exists with the same name + conn = op.get_bind() + # Insert default app + app_id = uuid.uuid4() + conn.execute( + sa.text(""" + INSERT INTO app (id, app_name, public_url, private_url, deleted, status, config, deployment_type, type, created_at, updated_at) + VALUES (:id, :app_name, :public_url, :private_url, :deleted, :status, :config, :deployment_type, :type, :created_at, :updated_at) + """), + { + 'id': app_id, + 'app_name': default_app_name, + 'public_url': default_app_public_url, + 'private_url': default_app_private_url, + 'deleted': False, + 'status': 'success', + 'config': '{}', + 'deployment_type': 'manual', + 'type': 'custom', + 'created_at': datetime.now(), + 'updated_at': datetime.now(), + }, + ) + + +def downgrade() -> None: + # Get environment variables for default app + default_app_name = os.getenv('DEFAULT_APP_NAME') + + # Only remove default app if env variable is set + if default_app_name: + conn = op.get_bind() + conn.execute( + sa.text('DELETE FROM app WHERE app_name = :app_name'), + {'app_name': default_app_name}, + ) diff --git a/wavefront/server/docker-compose.yml b/wavefront/server/docker-compose.yml deleted file mode 100644 index a0b78199..00000000 --- a/wavefront/server/docker-compose.yml +++ /dev/null @@ -1,34 +0,0 @@ -version: '3.8' - -services: - postgres: - image: ankane/pgvector - container_name: postgres - restart: always - environment: - - POSTGRES_USER=postgres - - POSTGRES_PASSWORD=postgres - - POSTGRES_DB=floware - ports: - - '5432:5432' - volumes: - - /var/lib/postgres-data:/var/lib/postgresql/data - - /var/lib/init.sql:/docker-entrypoint-initdb.d/init.sql - networks: - - floware-network - - redis: - image: redis:7.4 - container_name: redis - ports: - - "6379:6379" - networks: - - floware-network - -volumes: - db: - driver: local - -networks: - floware-network: - driver: bridge diff --git a/wavefront/server/docker/call_processing.Dockerfile b/wavefront/server/docker/call_processing.Dockerfile new file mode 100644 index 00000000..0e6d11b4 --- /dev/null +++ b/wavefront/server/docker/call_processing.Dockerfile @@ -0,0 +1,23 @@ +FROM python:3.11-slim + +WORKDIR /app + +COPY --from=ghcr.io/astral-sh/uv:0.7.15 /uv /uvx /bin/ + +RUN apt-get update && apt-get install -y \ + libpq-dev \ + gcc \ + libgl1 \ + libglib2.0-0 \ + ffmpeg \ + && rm -rf /var/lib/apt/lists/* + +COPY wavefront/server/pyproject.toml wavefront/server/uv.lock ./ + +COPY wavefront/server/apps/call_processing /app/apps/call_processing + +RUN uv sync --package call_processing --frozen --no-dev + +WORKDIR /app/apps/call_processing/call_processing + +CMD ["uv", "run", "server.py"] diff --git a/wavefront/server/docker/floconsole.Dockerfile b/wavefront/server/docker/floconsole.Dockerfile new file mode 100644 index 00000000..b7353933 --- /dev/null +++ b/wavefront/server/docker/floconsole.Dockerfile @@ -0,0 +1,24 @@ +FROM python:3.11-slim + +WORKDIR /app + +COPY --from=ghcr.io/astral-sh/uv:0.7.15 /uv /uvx /bin/ + +RUN apt-get update && apt-get install -y \ + libpq-dev \ + gcc \ + && rm -rf /var/lib/apt/lists/* + +COPY wavefront/server/pyproject.toml wavefront/server/uv.lock ./ + +COPY wavefront/server/modules/common_module /app/modules/common_module + +COPY wavefront/server/packages/flo_cloud /app/packages/flo_cloud + +COPY wavefront/server/apps/floconsole /app/apps/floconsole + +RUN uv sync --package floconsole --frozen --no-dev + +WORKDIR /app/apps/floconsole/floconsole + +CMD ["uv", "run", "server.py"] diff --git a/wavefront/server/docker/floware.Dockerfile b/wavefront/server/docker/floware.Dockerfile new file mode 100644 index 00000000..7d92f228 --- /dev/null +++ b/wavefront/server/docker/floware.Dockerfile @@ -0,0 +1,44 @@ +FROM python:3.11-slim + +WORKDIR /app + +COPY --from=ghcr.io/astral-sh/uv:0.7.15 /uv /uvx /bin/ + +RUN apt-get update && apt-get install -y \ + libpq-dev \ + gcc \ + libgl1 \ + libglib2.0-0 \ + && rm -rf /var/lib/apt/lists/* + +COPY wavefront/server/pyproject.toml wavefront/server/uv.lock ./ + +COPY wavefront/server/modules/auth_module /app/modules/auth_module +COPY wavefront/server/modules/common_module /app/modules/common_module +COPY wavefront/server/modules/db_repo_module /app/modules/db_repo_module +COPY wavefront/server/modules/gold_module /app/modules/gold_module +COPY wavefront/server/modules/insights_module /app/modules/insights_module +COPY wavefront/server/modules/knowledge_base_module /app/modules/knowledge_base_module +COPY wavefront/server/modules/user_management_module /app/modules/user_management_module +COPY wavefront/server/modules/llm_inference_config_module /app/modules/llm_inference_config_module +COPY wavefront/server/modules/agents_module /app/modules/agents_module +COPY wavefront/server/modules/plugins_module/ /app/modules/plugins_module +COPY wavefront/server/modules/product_analysis_module /app/modules/product_analysis_module +COPY wavefront/server/modules/inference_module /app/modules/inference_module +COPY wavefront/server/modules/tools_module /app/modules/tools_module +COPY wavefront/server/modules/voice_agents_module /app/modules/voice_agents_module +COPY wavefront/server/modules/api_services_module /app/modules/api_services_module + +COPY wavefront/server/packages/flo_cloud /app/packages/flo_cloud +COPY wavefront/server/packages/flo_utils /app/packages/flo_utils + +COPY wavefront/server/plugins/datasource /app/plugins/datasource +COPY wavefront/server/plugins/authenticator /app/plugins/authenticator + +COPY wavefront/server/apps/floware /app/apps/floware + +RUN uv sync --package floware --frozen --no-dev + +WORKDIR /app/apps/floware/floware + +CMD ["uv", "run", "server.py"] diff --git a/wavefront/server/docker/inference_app.Dockerfile b/wavefront/server/docker/inference_app.Dockerfile new file mode 100644 index 00000000..3c2d730f --- /dev/null +++ b/wavefront/server/docker/inference_app.Dockerfile @@ -0,0 +1,24 @@ +FROM nvidia/cuda:12.6.3-cudnn-devel-ubuntu22.04 + +WORKDIR /app + +COPY --from=ghcr.io/astral-sh/uv:0.7.15 /uv /uvx /bin/ + +RUN apt-get update && apt-get install -y \ + libpq-dev \ + gcc \ + libgl1 \ + libglib2.0-0 \ + && rm -rf /var/lib/apt/lists/* + +COPY wavefront/server/pyproject.toml wavefront/server/uv.lock wavefront/server/.python-version ./ + +COPY wavefront/server/modules/common_module /app/modules/common_module +COPY wavefront/server/packages/flo_cloud /app/packages/flo_cloud +COPY wavefront/server/apps/inference_app /app/apps/inference_app + +RUN uv sync --package inference-app --frozen --no-dev + +WORKDIR /app/apps/inference_app/inference_app + +CMD ["uv", "run", "server.py"] diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_10_1133-c7800bd1d9c3_for_actionable_insights.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_10_1133-c7800bd1d9c3_for_actionable_insights.py deleted file mode 100644 index f906b47b..00000000 --- a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_10_1133-c7800bd1d9c3_for_actionable_insights.py +++ /dev/null @@ -1,49 +0,0 @@ -"""for actionable insights - -Revision ID: c7800bd1d9c3 -Revises: 01a4c5202566 -Create Date: 2025-02-10 11:33:33.664976 - -""" - -from typing import Sequence, Union -import uuid - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects.postgresql import JSONB -from sqlalchemy.dialects.postgresql import UUID -from sqlalchemy.ext.mutable import MutableDict - -# revision identifiers, used by Alembic. -revision: str = 'c7800bd1d9c3' -down_revision: Union[str, None] = '01a4c5202566' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - op.create_table( - 'actionable_alerts', - sa.Column('id', UUID(as_uuid=True), primary_key=True, default=uuid.uuid4), - sa.Column('signal_id', sa.String, nullable=False), - sa.Column('title', sa.String, nullable=True), - sa.Column('description', sa.String, nullable=True), - sa.Column('signal_type', sa.String, nullable=False), - sa.Column('alerts', MutableDict.as_mutable(JSONB), nullable=True), - sa.Column('data', MutableDict.as_mutable(JSONB), nullable=True), - sa.Column( - 'created_at', sa.DateTime(), nullable=False, server_default=sa.func.now() - ), - sa.Column( - 'updated_at', - sa.DateTime(), - nullable=False, - server_default=sa.func.now(), - onupdate=sa.func.now(), - ), - ) - - -def downgrade() -> None: - op.drop_table('actionable_alerts') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_18_1751-76ba9543af92_create_signal_name_in_actionable_alerts.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_18_1751-76ba9543af92_create_signal_name_in_actionable_alerts.py deleted file mode 100644 index a54f917e..00000000 --- a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_18_1751-76ba9543af92_create_signal_name_in_actionable_alerts.py +++ /dev/null @@ -1,28 +0,0 @@ -"""create signal name in actionable alerts - -Revision ID: 76ba9543af92 -Revises: c7800bd1d9c3 -Create Date: 2025-02-18 17:51:32.463298 - -""" - -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - -# revision identifiers, used by Alembic. -revision: str = '76ba9543af92' -down_revision: Union[str, None] = 'c7800bd1d9c3' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - op.add_column( - 'actionable_alerts', sa.Column('signal_name', sa.String, nullable=True) - ) - - -def downgrade() -> None: - op.drop_column('actionable_alerts', 'signal_name') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_22_1236-f9c4c1c48d46_remove_all_wrongly_generated_alerts.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_22_1236-f9c4c1c48d46_remove_all_wrongly_generated_alerts.py deleted file mode 100644 index 0507f025..00000000 --- a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_22_1236-f9c4c1c48d46_remove_all_wrongly_generated_alerts.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Remove all wrongly generated alerts - -Revision ID: f9c4c1c48d46 -Revises: 76ba9543af92 -Create Date: 2025-02-22 12:36:16.510535 - -""" - -from typing import Sequence, Union - -from alembic import op -from sqlalchemy.orm import Session -from sqlalchemy.sql import text - -# revision identifiers, used by Alembic. -revision: str = 'f9c4c1c48d46' -down_revision: Union[str, None] = '76ba9543af92' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - bind = op.get_bind() - session = Session(bind=bind) - - latest_20_rows = session.execute( - text('SELECT id FROM actionable_alerts ORDER BY created_at DESC LIMIT 20') - ).fetchall() - - if latest_20_rows: - # Extract the 20 IDs to keep - ids_to_keep = tuple(row[0] for row in latest_20_rows) - - # Delete all rows except these 20 - session.execute( - text('DELETE FROM actionable_alerts WHERE id NOT IN :ids'), - {'ids': ids_to_keep}, - ) - session.commit() - - -def downgrade() -> None: - pass diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_28_1602-78655faf6488_adding_notification.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_28_1602-78655faf6488_adding_notification.py index a3edd9a1..6c4955bb 100644 --- a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_28_1602-78655faf6488_adding_notification.py +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_02_28_1602-78655faf6488_adding_notification.py @@ -1,7 +1,7 @@ """adding notification Revision ID: 78655faf6488 -Revises: f9c4c1c48d46 +Revises: 01a4c5202566 Create Date: 2025-02-28 16:02:31.108730 """ @@ -14,7 +14,7 @@ # revision identifiers, used by Alembic. revision: str = '78655faf6488' -down_revision: Union[str, None] = 'f9c4c1c48d46' +down_revision: Union[str, None] = '01a4c5202566' branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_22_1448-ff32e2dd3106_created_actionable_insight_query_table.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_22_1448-ff32e2dd3106_created_actionable_insight_query_table.py deleted file mode 100644 index 5881338f..00000000 --- a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_22_1448-ff32e2dd3106_created_actionable_insight_query_table.py +++ /dev/null @@ -1,54 +0,0 @@ -"""created actionable_insight_query table - -Revision ID: ff32e2dd3106 -Revises: 36703628c7a6 -Create Date: 2025-04-22 14:48:12.819342 - -""" - -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -# revision identifiers, used by Alembic. -revision: str = 'ff32e2dd3106' -down_revision: Union[str, None] = '36703628c7a6' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - op.create_table( - 'actionable_insight_queries', - sa.Column('id', sa.String(length=255), nullable=False), - sa.Column('version', sa.Integer(), nullable=False), - sa.Column('type', sa.String(length=50), nullable=False), - sa.Column('title', sa.String(length=255), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('enabled', sa.Boolean(), nullable=False), - sa.Column( - 'periodicity', postgresql.JSONB(astext_type=sa.Text()), nullable=False - ), - sa.Column( - 'goal_lines', postgresql.JSONB(astext_type=sa.Text()), nullable=False - ), - sa.Column( - 'projections', postgresql.JSONB(astext_type=sa.Text()), nullable=False - ), - sa.Column('query', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column('plots', postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column( - 'created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True - ), - sa.Column( - 'updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True - ), - sa.PrimaryKeyConstraint('id'), - ) - - -def downgrade() -> None: - op.drop_table('actionable_insight_queries') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_23_1839-96b784074d1c_actionable_alerts_and_query_migrations.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_23_1839-96b784074d1c_actionable_alerts_and_query_migrations.py deleted file mode 100644 index 97b55b93..00000000 --- a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_23_1839-96b784074d1c_actionable_alerts_and_query_migrations.py +++ /dev/null @@ -1,79 +0,0 @@ -"""actionable alerts and query migrations - -Revision ID: 96b784074d1c -Revises: ff32e2dd3106 -Create Date: 2025-04-23 18:39:07.626918 - -""" - -import json -import os -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -import yaml - -# revision identifiers, used by Alembic. -revision: str = '96b784074d1c' -down_revision: Union[str, None] = 'ff32e2dd3106' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - query_dir = os.environ.get('SIGNAL_QUERY_DIR', 'default') - base_dir = os.path.dirname(__file__) - - directory = os.path.join(base_dir, '../../queries', query_dir) - directory = os.path.normpath(directory) - - for filename in os.listdir(directory): - if filename.endswith('.yaml') or filename.endswith('.yml'): - file_path = os.path.join(directory, filename) - with open(file_path, 'r') as f: - yaml_data = yaml.safe_load(f) - conn = op.get_bind() - conn.execute( - sa.text( - """ - INSERT INTO actionable_insight_queries ( - id, version, type, title, name, description, enabled, - periodicity, goal_lines, projections, query, plots, - created_at, updated_at - ) - VALUES ( - :id, :version, :type, :title, :name, :description, :enabled, - CAST(:periodicity AS jsonb), CAST(:goal_lines AS jsonb), - CAST(:projections AS jsonb), CAST(:query AS jsonb), - CAST(:plots AS jsonb), now(), now() - ) - """ - ), - { - 'id': yaml_data['id'], - 'version': yaml_data['version'], - 'type': yaml_data['type'], - 'title': yaml_data['title'], - 'name': yaml_data['name'], - 'description': yaml_data.get('description', ''), - 'enabled': yaml_data.get('enabled', True), - 'periodicity': json.dumps(yaml_data['periodicity']), - 'goal_lines': json.dumps(yaml_data['goal_lines']), - 'projections': json.dumps(yaml_data['projections']), - 'query': json.dumps(yaml_data['query']), - 'plots': json.dumps(yaml_data['plots']), - }, - ) - op.create_foreign_key( - 'fk_insight_query', - 'actionable_alerts', - 'actionable_insight_queries', - ['signal_id'], - ['id'], - ) - - -def downgrade() -> None: - op.drop_constraint('fk_insight_query', 'actionable_alerts', type_='foreignkey') - op.execute('TRUNCATE TABLE actionable_insight_queries RESTART IDENTITY CASCADE') diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_24_1730-a0dfba41ef64_updated_knowledge_base_tables.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_24_1730-a0dfba41ef64_updated_knowledge_base_tables.py index 567aaa2e..76a14b9d 100644 --- a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_24_1730-a0dfba41ef64_updated_knowledge_base_tables.py +++ b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_04_24_1730-a0dfba41ef64_updated_knowledge_base_tables.py @@ -1,7 +1,7 @@ """Updated knowledge base tables Revision ID: a0dfba41ef64 -Revises: 96b784074d1c +Revises: 36703628c7a6 Create Date: 2025-04-24 17:30:04.147978 """ @@ -14,7 +14,7 @@ # revision identifiers, used by Alembic. revision: str = 'a0dfba41ef64' -down_revision: Union[str, None] = '96b784074d1c' +down_revision: Union[str, None] = '36703628c7a6' branch_labels: Union[str, Sequence[str], None] = None depends_on: Union[str, Sequence[str], None] = None diff --git a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_12_01_1619-10e09e25efa0_drop_actionable_alert_insight_leads_.py b/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_12_01_1619-10e09e25efa0_drop_actionable_alert_insight_leads_.py deleted file mode 100644 index a22b7ac0..00000000 --- a/wavefront/server/modules/db_repo_module/db_repo_module/alembic/versions/2025_12_01_1619-10e09e25efa0_drop_actionable_alert_insight_leads_.py +++ /dev/null @@ -1,104 +0,0 @@ -"""drop_actionable_alert_insight_leads_table - -Revision ID: 10e09e25efa0 -Revises: ca83b60258d6 -Create Date: 2025-12-01 16:19:58.228914 - -""" - -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa - - -# revision identifiers, used by Alembic. -revision: str = '10e09e25efa0' -down_revision: Union[str, None] = 'ca83b60258d6' -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None - - -def upgrade() -> None: - # Drop the three tables - op.drop_table('actionable_alerts') - op.drop_table('actionable_insight_queries') - op.drop_table('leads') - - -def downgrade() -> None: - # Recreate leads table - op.create_table( - 'leads', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('product_category', sa.String(), nullable=True), - sa.Column('conversation_id', sa.String(), nullable=True), - sa.Column('customer_id', sa.String(), nullable=True), - sa.Column('agent_id', sa.String(), nullable=True), - sa.Column('branch', sa.String(), nullable=True), - sa.Column('region', sa.String(), nullable=True), - sa.Column('start_date', sa.Date(), nullable=False), - sa.Column('end_date', sa.Date(), nullable=False), - sa.Column( - 'created_at', sa.DateTime(), nullable=False, server_default=sa.func.now() - ), - sa.Column( - 'updated_at', - sa.DateTime(), - nullable=False, - server_default=sa.func.now(), - onupdate=sa.func.now(), - ), - sa.Column('type', sa.String(), nullable=False), - sa.Column('product_name', sa.String(), nullable=False), - sa.PrimaryKeyConstraint('id'), - ) - - # Recreate actionable_insight_queries table - op.create_table( - 'actionable_insight_queries', - sa.Column('id', sa.String(length=255), nullable=False), - sa.Column('version', sa.Integer(), nullable=False), - sa.Column('type', sa.String(length=50), nullable=False), - sa.Column('title', sa.String(length=255), nullable=False), - sa.Column('name', sa.String(length=255), nullable=False), - sa.Column('description', sa.Text(), nullable=True), - sa.Column('enabled', sa.Boolean(), nullable=False, server_default='true'), - sa.Column('periodicity', sa.dialects.postgresql.JSONB(), nullable=False), - sa.Column('goal_lines', sa.dialects.postgresql.JSONB(), nullable=False), - sa.Column('projections', sa.dialects.postgresql.JSONB(), nullable=False), - sa.Column('query', sa.dialects.postgresql.JSONB(), nullable=False), - sa.Column('plots', sa.dialects.postgresql.JSONB(), nullable=False), - sa.Column('created_at', sa.DateTime(), server_default=sa.func.now()), - sa.Column( - 'updated_at', - sa.DateTime(), - server_default=sa.func.now(), - onupdate=sa.func.now(), - ), - sa.PrimaryKeyConstraint('id'), - ) - - # Recreate actionable_alerts table - op.create_table( - 'actionable_alerts', - sa.Column('id', sa.UUID(), nullable=False), - sa.Column('signal_id', sa.String(), nullable=False), - sa.Column('title', sa.String(), nullable=True), - sa.Column('description', sa.String(), nullable=True), - sa.Column('signal_type', sa.String(), nullable=False), - sa.Column('signal_name', sa.String(), nullable=True), - sa.Column('alerts', sa.dialects.postgresql.JSONB(), nullable=True), - sa.Column('data', sa.dialects.postgresql.JSONB(), nullable=True), - sa.Column( - 'created_at', sa.DateTime(), nullable=False, server_default=sa.func.now() - ), - sa.Column( - 'updated_at', - sa.DateTime(), - nullable=False, - server_default=sa.func.now(), - onupdate=sa.func.now(), - ), - sa.PrimaryKeyConstraint('id'), - ) diff --git a/wavefront/server/scripts/generate-keys.sh b/wavefront/server/scripts/generate-keys.sh new file mode 100755 index 00000000..cf7a755a --- /dev/null +++ b/wavefront/server/scripts/generate-keys.sh @@ -0,0 +1,37 @@ +#!/bin/bash + +# Generate RSA key pair for JWT authentication +# Output is base64-encoded for use in .env file + +set -e + +echo "Generating RSA key pair for JWT authentication..." + +# Generate private key +ssh-keygen -t rsa -b 2048 -m PEM -f private.pem -N "" -q + +# Restrict permissions on private key immediately +chmod 600 private.pem + +# Extract public key +openssl rsa -in private.pem -pubout -out public.pem 2>/dev/null + +echo "" +echo "Keys generated successfully! Copy these lines to your .env file:" +echo "" +echo "# JWT Keys (generated by scripts/generate-keys.sh)" + +# macOS uses base64 without -w flag, Linux uses -w 0 +if [[ "$OSTYPE" == "darwin"* ]]; then + echo "PRIVATE_KEY=$(base64 < private.pem | tr -d '\n')" + echo "PUBLIC_KEY=$(base64 < public.pem | tr -d '\n')" +else + echo "PRIVATE_KEY=$(base64 -w 0 < private.pem)" + echo "PUBLIC_KEY=$(base64 -w 0 < public.pem)" +fi + +# Clean up temporary files +rm private.pem public.pem + +echo "" +echo "Temporary key files removed. Keys are ready to use!"