Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions .github/workflows/build-wavefront-floconsole-develop.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@ env:
GAR_LOCATION: asia-south1-docker.pkg.dev/aesy-330511/root-hub
IMAGE_NAME: wavefront-floconsole

AWS_REGION: ap-south-1
ECR_REGISTRY: 025066241490.dkr.ecr.ap-south-1.amazonaws.com
ECR_REPOSITORY: rootflo/wavefront-floconsole

jobs:
build-push-artifact:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -60,7 +64,25 @@ jobs:
docker tag rootflo:${{ env.IMAGE_TAG }} ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
docker push ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}

# Configure AWS credentials and push to ECR
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}

- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v1

- name: Tag and push image to Amazon ECR
run: |
docker tag rootflo:${{ env.IMAGE_TAG }} ${{ env.ECR_REGISTRY }}/${{ env.ECR_REPOSITORY }}:${{ env.IMAGE_TAG }}
docker push ${{ env.ECR_REGISTRY }}/${{ env.ECR_REPOSITORY }}:${{ env.IMAGE_TAG }}

- name: Cleanup Docker images
run: |
docker rmi rootflo:${{ env.IMAGE_TAG }} || true
docker rmi ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} || true
docker rmi ${{ env.ECR_REGISTRY }}/${{ env.ECR_REPOSITORY }}:${{ env.IMAGE_TAG }} || true
22 changes: 22 additions & 0 deletions .github/workflows/build-wavefront-floware-develop.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@ env:
GAR_LOCATION: asia-south1-docker.pkg.dev/aesy-330511/root-hub
IMAGE_NAME: wavefront-floware

AWS_REGION: ap-south-1
ECR_REGISTRY: 025066241490.dkr.ecr.ap-south-1.amazonaws.com
ECR_REPOSITORY: rootflo/wavefront-floware

jobs:
build-push-artifact:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -60,7 +64,25 @@ jobs:
docker tag rootflo:${{ env.IMAGE_TAG }} ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
docker push ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}

# Configure AWS credentials and push to ECR
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}

- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v1

- name: Tag and push image to Amazon ECR
run: |
docker tag rootflo:${{ env.IMAGE_TAG }} ${{ env.ECR_REGISTRY }}/${{ env.ECR_REPOSITORY }}:${{ env.IMAGE_TAG }}
docker push ${{ env.ECR_REGISTRY }}/${{ env.ECR_REPOSITORY }}:${{ env.IMAGE_TAG }}

- name: Cleanup Docker images
run: |
docker rmi rootflo:${{ env.IMAGE_TAG }} || true
docker rmi ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} || true
docker rmi ${{ env.ECR_REGISTRY }}/${{ env.ECR_REPOSITORY }}:${{ env.IMAGE_TAG }} || true
22 changes: 22 additions & 0 deletions .github/workflows/build-wavefront-web-develop.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@ env:
GAR_LOCATION: asia-south1-docker.pkg.dev/aesy-330511/root-hub
IMAGE_NAME: wavefront-web

AWS_REGION: ap-south-1
ECR_REGISTRY: 025066241490.dkr.ecr.ap-south-1.amazonaws.com
ECR_REPOSITORY: rootflo/wavefront-web

jobs:
build-push-artifact:
runs-on: ubuntu-latest
Expand Down Expand Up @@ -60,7 +64,25 @@ jobs:
docker tag rootflo:${{ env.IMAGE_TAG }} ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
docker push ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}

# Configure AWS credentials and push to ECR
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}

- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v1

- name: Tag and push image to Amazon ECR
run: |
docker tag rootflo:${{ env.IMAGE_TAG }} ${{ env.ECR_REGISTRY }}/${{ env.ECR_REPOSITORY }}:${{ env.IMAGE_TAG }}
docker push ${{ env.ECR_REGISTRY }}/${{ env.ECR_REPOSITORY }}:${{ env.IMAGE_TAG }}

- name: Cleanup Docker images
run: |
docker rmi rootflo:${{ env.IMAGE_TAG }} || true
docker rmi ${{ env.GAR_LOCATION }}/${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }} || true
docker rmi ${{ env.ECR_REGISTRY }}/${{ env.ECR_REPOSITORY }}:${{ env.IMAGE_TAG }} || true
7 changes: 4 additions & 3 deletions wavefront/server/docker/floconsole.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,9 @@ COPY wavefront/server/packages/flo_cloud /app/packages/flo_cloud

COPY wavefront/server/apps/floconsole /app/apps/floconsole

RUN uv sync --package floconsole --frozen --no-dev
COPY wavefront/server/scripts/console-server-init.sh /app/scripts/console-server-init.sh
RUN chmod +x /app/scripts/console-server-init.sh

WORKDIR /app/apps/floconsole/floconsole
RUN uv sync --package floconsole --frozen --no-dev

CMD ["uv", "run", "server.py"]
ENTRYPOINT ["/app/scripts/console-server-init.sh"]
29 changes: 29 additions & 0 deletions wavefront/server/scripts/console-server-init.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
#!/bin/bash
set -e

export PATH="/app/.venv/bin:$PATH"

echo "Creating Console database if not exists..."
python3 -c "
import psycopg2, os

host = os.environ['CONSOLE_DB_HOST']
user = os.environ['CONSOLE_DB_USERNAME']
password = os.environ['CONSOLE_DB_PASSWORD']
dbname = os.environ['CONSOLE_DB_NAME']

conn = psycopg2.connect(host=host, user=user, password=password, dbname='postgres')
conn.autocommit = True
cur = conn.cursor()
cur.execute(\"SELECT 1 FROM pg_database WHERE datname = %s\", (dbname,))

if not cur.fetchone():
cur.execute('CREATE DATABASE \"' + dbname.replace('\"', '\"\"') + '\"')
print('Database created')
Comment on lines +15 to +22
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

🧩 Analysis chain

🏁 Script executed:

# First, check if the file exists and read the relevant lines
cat -n wavefront/server/scripts/console-server-init.sh | head -80

Repository: rootflo/wavefront

Length of output: 1022


🏁 Script executed:

# Check for psycopg2 version in requirements/dependencies
fd -t f -e txt -e pip -e in requirements
fd -t f -e py -e txt setup.py setup.cfg pyproject.toml
rg -l "psycopg2" --type py --type txt

Repository: rootflo/wavefront

Length of output: 218


🏁 Script executed:

# Find requirements files
fd requirements
fd "\.txt$|\.txt$|setup\.py" | head -20
rg -l "psycopg2" | head -10

Repository: rootflo/wavefront

Length of output: 322


🏁 Script executed:

# Check the floconsole pyproject.toml for psycopg2
cat wavefront/server/apps/floconsole/pyproject.toml | rg -A 5 -B 5 "psycopg2|dependencies"

Repository: rootflo/wavefront

Length of output: 625


🌐 Web query:

psycopg2 connect_timeout parameter documentation

💡 Result:

connect_timeout in psycopg2 is not a psycopg2-specific option; it’s a libpq connection parameter that psycopg2 passes through.

  • How to use (keyword arg):

    psycopg2.connect(host="...", dbname="...", user="...", password="...", connect_timeout=10)

    Psycopg2 passes “any other keyword parameter” through to the underlying client library (libpq), so connect_timeout is accepted this way. [2]

  • How to use (DSN string):

    psycopg2.connect("host=... dbname=... user=... password=... connect_timeout=10")

    Libpq documents connect_timeout as the maximum time to wait while connecting, in seconds; 0/negative/omitted means wait indefinitely, and the timeout is applied per host when multiple hosts are provided. [1]

Docs:

  • libpq connection parameters (includes connect_timeout) [1]
  • psycopg2 connect() parameters / pass-through behavior [2]

Sources:
[1] PostgreSQL libpq connection parameters documentation (libpq-connect / parameter keywords) (postgresql.org)
[2] Psycopg2 connect() documentation (“any other keyword parameter will be passed to the underlying client library”) (access.crunchydata.com)

Citations:


🌐 Web query:

psycopg2 sql module Identifier DuplicateDatabase exception

💡 Result:

In psycopg2 you can’t pass a database name as a %s parameter (placeholders are for values, not identifiers). For CREATE DATABASE, compose the identifier with psycopg2.sql.Identifier, then catch psycopg2.errors.DuplicateDatabase (SQLSTATE 42P04, base class ProgrammingError). [1][2]

import psycopg2
from psycopg2 import sql, errors

db_name = "mydb"

conn = psycopg2.connect("dbname=postgres user=postgres password=...")
conn.autocommit = True  # CREATE DATABASE must not run inside a transaction
cur = conn.cursor()

try:
    cur.execute(sql.SQL("CREATE DATABASE {}").format(sql.Identifier(db_name)))
except errors.DuplicateDatabase:
    # db already exists (42P04)
    pass
finally:
    cur.close()
    conn.close()

If you need a qualified identifier elsewhere (e.g., "schema"."table"), pass multiple parts: sql.Identifier("schema", "table"). [1]

Sources: psycopg2 SQL composition / Identifier docs [1]; psycopg2 SQLSTATE-to-exception mapping showing 42P04 -> DuplicateDatabase [2].


Harden DB creation against concurrent startup and connection hangs.

Line 15 lacks a connection timeout, and lines 18–22 use a check-then-create sequence vulnerable to race conditions when multiple containers start concurrently. One contender can fail with a duplicate database error.

Add connect_timeout parameter to psycopg2.connect(), replace the check-then-create pattern with exception handling, and use psycopg2.sql.Identifier for safe identifier composition:

Proposed fix
-python3 -c "
-import psycopg2, os
+python3 -c "
+import os
+import psycopg2
+from psycopg2 import sql, errors
 
 host = os.environ['CONSOLE_DB_HOST']
 user = os.environ['CONSOLE_DB_USERNAME']
 password = os.environ['CONSOLE_DB_PASSWORD']
 dbname = os.environ['CONSOLE_DB_NAME']
+connect_timeout = int(os.getenv('CONSOLE_DB_CONNECT_TIMEOUT', '5'))
 
-conn = psycopg2.connect(host=host, user=user, password=password, dbname='postgres')
+conn = psycopg2.connect(
+    host=host,
+    user=user,
+    password=password,
+    dbname='postgres',
+    connect_timeout=connect_timeout,
+)
 conn.autocommit = True
 cur = conn.cursor()
-cur.execute(\"SELECT 1 FROM pg_database WHERE datname = %s\", (dbname,))
-
-if not cur.fetchone():
-    cur.execute('CREATE DATABASE \"' + dbname.replace('\"', '\"\"') + '\"')
-    print('Database created')
-else:
-    print('Database already exists, skipping')
+try:
+    cur.execute(sql.SQL('CREATE DATABASE {}').format(sql.Identifier(dbname)))
+    print('Database created')
+except errors.DuplicateDatabase:
+    print('Database already exists, skipping')
+finally:
+    cur.close()
 conn.close()
 "
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
conn = psycopg2.connect(host=host, user=user, password=password, dbname='postgres')
conn.autocommit = True
cur = conn.cursor()
cur.execute(\"SELECT 1 FROM pg_database WHERE datname = %s\", (dbname,))
if not cur.fetchone():
cur.execute('CREATE DATABASE \"' + dbname.replace('\"', '\"\"') + '\"')
print('Database created')
import os
import psycopg2
from psycopg2 import sql, errors
host = os.environ['CONSOLE_DB_HOST']
user = os.environ['CONSOLE_DB_USERNAME']
password = os.environ['CONSOLE_DB_PASSWORD']
dbname = os.environ['CONSOLE_DB_NAME']
connect_timeout = int(os.getenv('CONSOLE_DB_CONNECT_TIMEOUT', '5'))
conn = psycopg2.connect(
host=host,
user=user,
password=password,
dbname='postgres',
connect_timeout=connect_timeout,
)
conn.autocommit = True
cur = conn.cursor()
try:
cur.execute(sql.SQL('CREATE DATABASE {}').format(sql.Identifier(dbname)))
print('Database created')
except errors.DuplicateDatabase:
print('Database already exists, skipping')
finally:
cur.close()
conn.close()
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@wavefront/server/scripts/console-server-init.sh` around lines 15 - 22, Add a
connection timeout to psycopg2.connect by passing connect_timeout (e.g.,
connect_timeout=5) to avoid hangs, and replace the SELECT-then-CREATE pattern
with a safe try/except around the CREATE DATABASE call: use
psycopg2.sql.Identifier to build the identifier for dbname when calling
cur.execute(sql.SQL('CREATE DATABASE {}').format(Identifier(dbname))) and catch
the specific duplicate-database error (psycopg2.errors.DuplicateDatabase or the
appropriate DatabaseError SQLSTATE) to ignore it while re-raising other
exceptions; ensure conn.autocommit remains True so CREATE runs outside
transactions.

else:
print('Database already exists, skipping')
conn.close()
"

cd /app/apps/floconsole/floconsole
exec uv run server.py