From 2222e2c2444a639dc40bc9f2a67df7808f0f567b Mon Sep 17 00:00:00 2001 From: Tushar sabharwal Date: Mon, 12 Jan 2026 14:37:58 +0000 Subject: [PATCH] fix: remove deprecated proxies argument to support OpenAI SDK v1.0+ --- runtime/node/agent/memory/embedding.py | 13 +++++++++++-- utils/token_tracker.py | 23 +++++++++++++++-------- 2 files changed, 26 insertions(+), 10 deletions(-) diff --git a/runtime/node/agent/memory/embedding.py b/runtime/node/agent/memory/embedding.py index 02bcf0bcf..89ede0017 100755 --- a/runtime/node/agent/memory/embedding.py +++ b/runtime/node/agent/memory/embedding.py @@ -87,10 +87,19 @@ def __init__(self, embedding_config: EmbeddingConfig): self.use_chunking = embedding_config.params.get('use_chunking', False) self.chunk_strategy = embedding_config.params.get('chunk_strategy', 'average') + # Check if a custom base_url is provided for the OpenAI client if self.base_url: - self.client = openai.OpenAI(api_key=self.api_key, base_url=self.base_url) + # Initialize OpenAI client with custom base_url. + # Note: 'proxies' argument is removed to maintain compatibility with OpenAI SDK v1.0.0+ + self.client = openai.OpenAI( + api_key=self.api_key, + base_url=self.base_url + ) else: - self.client = openai.OpenAI(api_key=self.api_key) + # Initialize OpenAI client with default settings + self.client = openai.OpenAI( + api_key=self.api_key + ) @retry(wait=wait_random_exponential(min=2, max=5), stop=stop_after_attempt(10)) def get_embedding(self, text): diff --git a/utils/token_tracker.py b/utils/token_tracker.py index 0f46b1952..1dd1dc124 100755 --- a/utils/token_tracker.py +++ b/utils/token_tracker.py @@ -16,10 +16,12 @@ class TokenUsage: node_id: Optional[str] = None model_name: Optional[str] = None workflow_id: Optional[str] = None - provider: Optional[str] = None # Add provider field + provider: Optional[str] = None + # New: Add proxies field to track networking configuration + proxies: Optional[Dict[str, str]] = None def to_dict(self): - """Convert to dictionary format.""" + """Convert the usage metrics to a dictionary format for export.""" return { "input_tokens": self.input_tokens, "output_tokens": self.output_tokens, @@ -29,7 +31,8 @@ def to_dict(self): "node_id": self.node_id, "model_name": self.model_name, "workflow_id": self.workflow_id, - "provider": self.provider # Include provider in output + "provider": self.provider, + "proxies": self.proxies # Include proxies in dictionary output } @@ -44,11 +47,13 @@ def __init__(self, workflow_id: str): self.call_history = [] self.node_call_counts = defaultdict(int) # Track how many times each node is called - def record_usage(self, node_id: str, model_name: str, usage: TokenUsage, provider: str = None): - """Records token usage for a specific call, handling multiple node executions.""" - # Update the usage with provider if it wasn't set already + def record_usage(self, node_id: str, model_name: str, usage: TokenUsage, provider: str = None, proxies: Dict = None): + """Records token usage for a specific call and tracks proxy information.""" + # Update usage object with provider and proxy info if available if provider and not usage.provider: usage.provider = provider + if proxies and not usage.proxies: + usage.proxies = proxies # Add to total usage self.total_usage.input_tokens += usage.input_tokens @@ -83,12 +88,14 @@ def record_usage(self, node_id: str, model_name: str, usage: TokenUsage, provide "total_tokens": usage.total_tokens, "metadata": dict(usage.metadata), "timestamp": usage.timestamp.isoformat(), - "execution_number": self.node_call_counts[node_id] # Track which execution this is + "execution_number": self.node_call_counts[node_id] } - # Add provider to history entry if available + # Include optional metadata in history if provider: history_entry["provider"] = provider + if proxies: + history_entry["proxies"] = proxies # Fix for issue #494 tracking self.call_history.append(history_entry)