Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions wavefront/server/apps/floconsole/floconsole/config.ini
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@ db_name = ${CONSOLE_DB_NAME}
[env_config]
app_env = ${APP_ENV}

[cloud_config]
cloud_provider = ${CLOUD_PROVIDER}

[jwt_token]
token_expiry=${TOKEN_EXPIRY}
temporary_token_expiry=${TEMPORARY_TOKEN_EXPIRY}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,12 +66,8 @@ class ApplicationContainer(containers.DeclarativeContainer):
app_user_repository=app_user_repository,
)

kms_service = providers.Selector(
config.jwt_token.enable_cloud_kms,
true=providers.Singleton(
FloKmsService, cloud_provider=config.cloud_config.cloud_provider
),
false=providers.Object(None), # No KMS service if cloud KMS is not enabled
kms_service = providers.Singleton(
FloKmsService, cloud_provider=config.cloud_config.cloud_provider
)
Comment on lines +69 to 71
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Same breaking change as auth_container.py; also creates dead code in TokenService.

This change mirrors auth_container.py and has the same issue: container initialization will fail if cloud_config.cloud_provider is not properly configured.

Additionally, the floconsole TokenService has explicit defensive guards:

  • Lines 100-101: if self.kms_service is None: raise ValueError(...)
  • Lines 135-136: if self.kms_service is None: raise ValueError(...)

These guards become unreachable dead code since kms_service will never be None. The type hint kms_service: FloKMS | None in floconsole/services/token_service.py:30 is also now misleading.

Consider cleaning up the dead code and updating the type hint to FloKMS if this is the intended behavior going forward.

🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@wavefront/server/apps/floconsole/floconsole/di/application_container.py`
around lines 69 - 71, The container now always provides kms_service which makes
TokenService's defensive checks and optional type incorrect; update
floconsole/services/token_service.py by changing the kms_service annotation from
"FloKMS | None" to "FloKMS", remove the two unreachable guards that raise
ValueError (the checks around "if self.kms_service is None" at the spots
referenced), and delete any code paths that rely on kms_service being None so
the class consistently assumes a present FloKMS; keep the providers.Singleton in
application_container.py as-is if the intended behavior is to require a
configured cloud_provider.


token_service = providers.Singleton(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,15 @@ def main():
cache_manager = CacheManager(namespace='rag')
encryption_service = None
if (
(CLOUD_PROVIDER == 'aws' and os.getenv('AWS_KMS_ARN') is not None)
or CLOUD_PROVIDER == 'gcp'
and (
os.getenv('GCP_KMS_KEY_RING') is not None
and os.getenv('GCP_KMS_CRYPTO_KEY') is not None
(CLOUD_PROVIDER == 'aws' and os.getenv('AWS_KMS_ARN'))
or (
CLOUD_PROVIDER == 'gcp'
and (os.getenv('GCP_KMS_KEY_RING') and os.getenv('GCP_KMS_CRYPTO_KEY'))
)
or (
CLOUD_PROVIDER == 'azure'
and os.getenv('AZURE_KEY_VAULT_URL')
and os.getenv('AZURE_KEY_VAULT_KEY_NAME')
)
Comment thread
coderabbitai[bot] marked this conversation as resolved.
):
encryption_service = FloKmsService(cloud_provider=CLOUD_PROVIDER)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,12 +37,8 @@ class AuthContainer(containers.DeclarativeContainer):
db_client=db_client,
)

kms_service = providers.Selector(
config.jwt_token.enable_cloud_kms,
true=providers.Singleton(
FloKmsService, cloud_provider=config.cloud_config.cloud_provider
),
false=providers.Object(None), # No KMS service if cloud KMS is not enabled
kms_service = providers.Singleton(
FloKmsService, cloud_provider=config.cloud_config.cloud_provider
)

token_service = providers.Singleton(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
import logging

from .blob_storage import AzureBlobStorage
from .storage_queue import StorageQueue
from .key_vault import AzureKMS

logging.getLogger('azure').setLevel(logging.WARNING)

__all__ = ['AzureBlobStorage', 'StorageQueue']
__all__ = ['AzureBlobStorage', 'AzureKMS', 'StorageQueue']
117 changes: 117 additions & 0 deletions wavefront/server/packages/flo_cloud/flo_cloud/azure/key_vault.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
import os
from typing import Optional

from azure.identity import ClientSecretCredential, DefaultAzureCredential
from azure.keyvault.keys import KeyClient
from azure.keyvault.keys.crypto import (
CryptographyClient,
EncryptionAlgorithm,
SignatureAlgorithm,
)
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric.rsa import RSAPublicNumbers

from .._types import FloKMS


class AzureKMS(FloKMS):
"""Azure Key Vault implementation of FloKMS.

Authentication modes (same as AzureBlobStorage):
1. Service Principal — provide client_id, client_secret, tenant_id explicitly,
or set AZURE_CLIENT_ID / AZURE_CLIENT_SECRET / AZURE_TENANT_ID env vars.
2. DefaultAzureCredential — falls back to Workload Identity, Managed Identity,
Azure CLI, etc.

Required env vars:
AZURE_KEY_VAULT_URL — e.g. https://my-vault.vault.azure.net/
AZURE_KEY_VAULT_KEY_NAME — name of the RSA key in the vault

Optional env var:
AZURE_KEY_VAULT_KEY_VERSION — specific key version; omit to use the latest
"""

def __init__(
self,
vault_url: Optional[str] = None,
key_name: Optional[str] = None,
key_version: Optional[str] = None,
client_id: Optional[str] = None,
client_secret: Optional[str] = None,
tenant_id: Optional[str] = None,
):
resolved_vault_url = vault_url or os.environ.get('AZURE_KEY_VAULT_URL')
resolved_key_name = key_name or os.environ.get('AZURE_KEY_VAULT_KEY_NAME')
resolved_key_version = key_version or os.environ.get(
'AZURE_KEY_VAULT_KEY_VERSION'
)

if not resolved_vault_url:
raise ValueError(
'vault_url must be provided or AZURE_KEY_VAULT_URL must be set'
)
if not resolved_key_name:
raise ValueError(
'key_name must be provided or AZURE_KEY_VAULT_KEY_NAME must be set'
)

creds_provided = [client_id, client_secret, tenant_id]
if all(creds_provided):
credential = ClientSecretCredential(
tenant_id=tenant_id,
client_id=client_id,
client_secret=client_secret,
)
elif any(creds_provided):
raise ValueError(
'Partial credentials provided. Supply all of client_id, '
'client_secret, and tenant_id, or none to use DefaultAzureCredential.'
)
else:
credential = DefaultAzureCredential()

self._key_name = resolved_key_name
self._key_version = resolved_key_version
self.key_client = KeyClient(vault_url=resolved_vault_url, credential=credential)
key = self.key_client.get_key(resolved_key_name, version=resolved_key_version)
self.crypto_client = CryptographyClient(key, credential=credential)

def encrypt(self, plaintext: str) -> bytes:
if isinstance(plaintext, str):
plaintext = plaintext.encode('utf-8')
result = self.crypto_client.encrypt(EncryptionAlgorithm.rsa_oaep_256, plaintext)
return result.ciphertext

def decrypt(self, ciphertext: str) -> bytes:
if isinstance(ciphertext, str):
ciphertext = ciphertext.encode('utf-8')
result = self.crypto_client.decrypt(
EncryptionAlgorithm.rsa_oaep_256, ciphertext
)
return result.plaintext

def sign(self, message: bytes, **kwargs) -> bytes:
algorithm = kwargs.get('signing_algorithm', SignatureAlgorithm.ps256)
result = self.crypto_client.sign(algorithm, message)
return result.signature

def verify(self, message: bytes, signature: bytes, **kwargs) -> bool:
algorithm = kwargs.get('signing_algorithm', SignatureAlgorithm.ps256)
result = self.crypto_client.verify(algorithm, message, signature)
return result.is_valid
Comment on lines +94 to +102
Copy link
Copy Markdown

@coderabbitai coderabbitai Bot Mar 25, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

🧩 Analysis chain

🌐 Web query:

In azure-keyvault-keys v4.9.x, what inputs are expected by CryptographyClient.sign/verify vs sign_data/verify_data?

💡 Result:

In azure-keyvault-keys v4.9.x for Python, the CryptographyClient class provides sign/verify methods that expect a pre-computed digest (hash) as input, along with the algorithm and signature. There are no separate sign_data/verify_data methods in the Python SDK, unlike other language SDKs (.NET, Java, JavaScript) which distinguish between signing/verifying digests vs raw data (where _data variants handle hashing automatically). sign(algorithm: SignatureAlgorithm, digest: bytes, kwargs) -> SignResult - algorithm: e.g., SignatureAlgorithm.rs256 - digest: Pre-hashed bytes (e.g., SHA-256 hash of message) verify(algorithm: SignatureAlgorithm, digest: bytes, signature: bytes, kwargs) -> VerifyResult - algorithm: Verification algorithm - digest: Pre-hashed bytes matching the signature - signature: Signature bytes to verify Users must compute the digest client-side (e.g., using hashlib.sha256(message).digest) before calling sign/verify. This matches the official Python API docs for v4.9.x and source code analysis, where only digest-based signing is exposed directly on CryptographyClient. RSA key wrappers (KeyVaultRSAPrivateKey/KeyVaultRSAPublicKey, added in later versions) support raw data signing via cryptography library interfaces, but are not direct client methods.

Citations:


Compute digest before signing; Python SDK expects pre-hashed bytes, not raw messages.

Lines 96 and 101 pass raw message bytes to sign() and verify(), but the Azure Key Vault Python SDK (v4.9.x) CryptographyClient methods expect a pre-computed digest (hash). This will cause a runtime failure or incorrect behavior.

The Python SDK differs from other SDKs (.NET, Java, JavaScript) which provide sign_data()/verify_data() methods for raw data. In Python, you must compute the digest client-side before calling these methods:

Proposed fix
+import hashlib
+
     def sign(self, message: bytes, **kwargs) -> bytes:
         algorithm = kwargs.get('signing_algorithm', SignatureAlgorithm.ps256)
+        digest = hashlib.sha256(message).digest()
-        result = self.crypto_client.sign(algorithm, message)
+        result = self.crypto_client.sign(algorithm, digest)
         return result.signature

     def verify(self, message: bytes, signature: bytes, **kwargs) -> bool:
         algorithm = kwargs.get('signing_algorithm', SignatureAlgorithm.ps256)
+        digest = hashlib.sha256(message).digest()
-        result = self.crypto_client.verify(algorithm, message, signature)
+        result = self.crypto_client.verify(algorithm, digest, signature)
         return result.is_valid
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@wavefront/server/packages/flo_cloud/flo_cloud/azure/key_vault.py` around
lines 94 - 102, The sign and verify methods currently pass raw message bytes to
CryptographyClient.sign/verify, but the Python SDK requires a precomputed
digest; update flo_cloud.azure.key_vault.KeyVaultSigner.sign and .verify to
compute the SHA-256 digest (because SignatureAlgorithm.ps256 uses SHA-256) from
the input message (e.g., message_digest = hashlib.sha256(message).digest()) and
pass that digest to self.crypto_client.sign(...) and
self.crypto_client.verify(...), using the same algorithm selection logic and
ensure hashlib is imported; keep variable names like algorithm and result to
minimize changes.

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is a false positive. The codebase contract is to pass a pre-hashed SHA-256 digest into sign()/verify() — not raw message bytes. AWS KMS uses MessageType='DIGEST' and GCP KMS uses kms_v1.Digest(sha256=message), both explicitly signalling digest input. The Azure SDK's CryptographyClient.sign(algorithm, digest) also expects a digest for PS256, which is what we pass. Adding hashlib.sha256(message).digest() here would double-hash the input and break compatibility with all callers (e.g. token_service.py). No change needed.

Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@rootflo-hardik Thanks for clarifying. Agreed — this is a false positive. The FloKMS contract in this codebase is to pass a pre-computed SHA-256 digest into sign/verify, and Azure’s CryptographyClient.sign/verify for PS256 indeed expects a digest. Hashing again inside AzureKMS would double-hash and break existing callers. No change needed on this PR.

If helpful later, I can add a brief docstring or rename the parameter to “digest” in AzureKMS.sign/verify to make the expectation explicit, but I’ll leave it as-is per your guidance.

(•̀ᴗ•́)و thoughtful bunny


✏️ Learnings added
Learnt from: rootflo-hardik
Repo: rootflo/wavefront PR: 255
File: wavefront/server/packages/flo_cloud/flo_cloud/azure/key_vault.py:94-102
Timestamp: 2026-03-25T05:38:36.270Z
Learning: In wavefront/server/packages/flo_cloud, the FloKMS interface expects callers to pass a pre-computed SHA-256 digest to sign(message_digest: bytes) and verify(message_digest: bytes, signature: bytes). This applies across providers:
- AWS KMS uses MessageType='DIGEST'.
- GCP KMS uses kms_v1.Digest(sha256=...).
- Azure Key Vault’s CryptographyClient.sign/verify with PS256 also require a digest (not raw data).
Do not hash again inside provider adapters; callers (e.g., token_service.py) already supply the SHA-256 digest.

Note: Learnings are effective only in the context of similar code segments. To apply general coding guidelines, please configure review instructions. You can manage existing learnings in the app.


def get_public_key_pem(self, **kwargs) -> str | bytes:
key = self.key_client.get_key(self._key_name, version=self._key_version)
jwk = key.key

# Decode the JWK RSA public key components (big-endian bytes) to integers
n = int.from_bytes(jwk.n, byteorder='big')
e = int.from_bytes(jwk.e, byteorder='big')

public_key = RSAPublicNumbers(e=e, n=n).public_key(default_backend())
pem_bytes = public_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo,
)
return pem_bytes.decode('utf-8')
3 changes: 3 additions & 0 deletions wavefront/server/packages/flo_cloud/flo_cloud/kms.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from .aws.kms import AwsKMS
from .azure.key_vault import AzureKMS
from .gcp.kms import GcpKMS
from ._types import CloudProvider, FloKMS

Expand All @@ -13,6 +14,8 @@ def __get_kms_client(self) -> FloKMS:
return AwsKMS()
elif self.cloud_provider == CloudProvider.GCP.value:
return GcpKMS()
elif self.cloud_provider == CloudProvider.AZURE.value:
return AzureKMS()
else:
raise ValueError(f'Unsupported cloud provider: {self.cloud_provider}')

Expand Down
1 change: 1 addition & 0 deletions wavefront/server/packages/flo_cloud/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ readme = "README.md"
requires-python = ">=3.11"
dependencies = [
"azure-identity>=1.17.0",
"azure-keyvault-keys>=4.9.0",
"azure-storage-blob>=12.20.0",
"azure-storage-queue>=12.10.0",
"boto3<=1.38.40",
Expand Down
17 changes: 17 additions & 0 deletions wavefront/server/uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading