Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
284 changes: 284 additions & 0 deletions plugin-repos/ufc-scoreboard/base_odds_manager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,284 @@
"""
BaseOddsManager - Odds data fetching adapted for MMA/UFC.

Based on LEDMatrix BaseOddsManager with MMA-specific adaptations for
homeAthleteOdds/awayAthleteOdds and separate event_id/comp_id support.

UFC/MMA odds adaptation based on work by Alex Resnick (legoguy1000) - PR #137
"""

import time
import logging
import requests
import json
from datetime import datetime, timedelta, timezone
from typing import Dict, Any, Optional, List

# Import the API counter function from web interface
try:
from web_interface_v2 import increment_api_counter
except ImportError:
# Fallback if web interface is not available
def increment_api_counter(kind: str, count: int = 1):
pass


class BaseOddsManager:
"""
Base class for odds data fetching and management.

Provides core functionality for:
- ESPN API odds fetching
- Caching and data processing
- Error handling and timeouts
- League mapping and data extraction
- MMA athlete odds support (homeAthleteOdds/awayAthleteOdds)
"""

def __init__(self, cache_manager, config_manager=None):
self.cache_manager = cache_manager
self.config_manager = config_manager
self.logger = logging.getLogger(__name__)
self.base_url = "https://sports.core.api.espn.com/v2/sports"

# Configuration with defaults
self.update_interval = 3600 # 1 hour default
self.request_timeout = 30 # 30 seconds default
self.cache_ttl = 1800 # 30 minutes default

# Load configuration if available
if config_manager:
self._load_configuration()

def _load_configuration(self):
"""Load configuration from config manager."""
if not self.config_manager:
return

try:
config = self.config_manager.get_config()
odds_config = config.get("base_odds_manager", {})

self.update_interval = odds_config.get(
"update_interval", self.update_interval
)
self.request_timeout = odds_config.get("timeout", self.request_timeout)
self.cache_ttl = odds_config.get("cache_ttl", self.cache_ttl)

self.logger.debug(
f"BaseOddsManager configuration loaded: "
f"update_interval={self.update_interval}s, "
f"timeout={self.request_timeout}s, "
f"cache_ttl={self.cache_ttl}s"
)

except Exception as e:
self.logger.warning(f"Failed to load BaseOddsManager configuration: {e}")

def get_odds(
self,
sport: str,
league: str,
event_id: str,
comp_id: str = None,
update_interval_seconds: int = None,
) -> Optional[Dict[str, Any]]:
"""
Fetch odds data for a specific fight/game.

Args:
sport: Sport name (e.g., 'mma', 'football')
league: League name (e.g., 'ufc', 'nfl')
event_id: ESPN event ID
comp_id: ESPN competition ID (for MMA where events have multiple fights).
If None, defaults to event_id.
update_interval_seconds: Override default update interval

Returns:
Dictionary containing odds data or None if unavailable
"""
if sport is None or league is None or event_id is None:
raise ValueError("Sport, League, and event_id cannot be None")

if comp_id is None:
comp_id = event_id

# Use provided interval or default
interval = update_interval_seconds or self.update_interval
cache_key = f"odds_espn_{sport}_{league}_{event_id}_{comp_id}"

# Check cache first
cached_data = self.cache_manager.get(cache_key)

if cached_data:
if isinstance(cached_data, dict) and cached_data.get("no_odds"):
self.logger.debug(f"Cached no-odds marker for {cache_key}, skipping")
else:
self.logger.info(f"Using cached odds from ESPN for {cache_key}")
return cached_data
Comment on lines +107 to +118
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

Cached "no_odds" marker doesn't prevent re-fetching — falls through to the API call.

When the cache contains a {"no_odds": True} marker (Line 114), the code logs a debug message but doesn't return None. Execution falls through to the fresh-fetch block on Line 120, defeating the purpose of caching no-odds to avoid repeated API calls.

Also, interval on Line 107 is assigned but never used (confirmed by static analysis F841).

🐛 Proposed fix
         # Check cache first
         cached_data = self.cache_manager.get(cache_key)
 
         if cached_data:
             if isinstance(cached_data, dict) and cached_data.get("no_odds"):
                 self.logger.debug(f"Cached no-odds marker for {cache_key}, skipping")
+                return None
             else:
                 self.logger.info(f"Using cached odds from ESPN for {cache_key}")
                 return cached_data

And remove the unused variable:

-        # Use provided interval or default
-        interval = update_interval_seconds or self.update_interval
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
interval = update_interval_seconds or self.update_interval
cache_key = f"odds_espn_{sport}_{league}_{event_id}_{comp_id}"
# Check cache first
cached_data = self.cache_manager.get(cache_key)
if cached_data:
if isinstance(cached_data, dict) and cached_data.get("no_odds"):
self.logger.debug(f"Cached no-odds marker for {cache_key}, skipping")
else:
self.logger.info(f"Using cached odds from ESPN for {cache_key}")
return cached_data
interval = update_interval_seconds or self.update_interval
cache_key = f"odds_espn_{sport}_{league}_{event_id}_{comp_id}"
# Check cache first
cached_data = self.cache_manager.get(cache_key)
if cached_data:
if isinstance(cached_data, dict) and cached_data.get("no_odds"):
self.logger.debug(f"Cached no-odds marker for {cache_key}, skipping")
return None
else:
self.logger.info(f"Using cached odds from ESPN for {cache_key}")
return cached_data
🧰 Tools
🪛 Ruff (0.15.0)

[error] 107-107: Local variable interval is assigned to but never used

Remove assignment to unused variable interval

(F841)

🤖 Prompt for AI Agents
In `@plugin-repos/ufc-scoreboard/base_odds_manager.py` around lines 107 - 118, The
cached "no_odds" marker is only logged but not acted on, so when
cache_manager.get(cache_key) returns a dict with "no_odds" you should
immediately stop and return None (or a sentinel) instead of falling through to
the API fetch; update the branch that checks isinstance(cached_data, dict) and
cached_data.get("no_odds") inside the method that builds cache_key to return
early. Also remove the unused local variable interval (the assignment from
update_interval_seconds or self.update_interval) — either delete that line or
use update_interval_seconds directly where needed to eliminate the F841
unused-variable warning.


self.logger.info(f"Cache miss - fetching fresh odds from ESPN for {cache_key}")

try:
# Map league names to ESPN API format
league_mapping = {
"ufc": "ufc",
"ncaa_fb": "college-football",
"nfl": "nfl",
"nba": "nba",
"mlb": "mlb",
"nhl": "nhl",
}

espn_league = league_mapping.get(league, league)
url = (
f"{self.base_url}/{sport}/leagues/{espn_league}"
f"/events/{event_id}/competitions/{comp_id}/odds"
)
self.logger.info(f"Requesting odds from URL: {url}")

response = requests.get(url, timeout=self.request_timeout)
response.raise_for_status()
raw_data = response.json()

# Increment API counter for odds data
increment_api_counter("odds", 1)
self.logger.debug(
f"Received raw odds data from ESPN: {json.dumps(raw_data, indent=2)}"
)

odds_data = self._extract_espn_data(raw_data)
if odds_data:
self.logger.info(f"Successfully extracted odds data: {odds_data}")
else:
self.logger.debug("No odds data available for this fight")

if odds_data:
self.cache_manager.set(cache_key, odds_data)
self.logger.info(f"Saved odds data to cache for {cache_key}")
else:
self.logger.debug(f"No odds data available for {cache_key}")
# Cache the fact that no odds are available to avoid repeated API calls
self.cache_manager.set(cache_key, {"no_odds": True})

return odds_data

except requests.exceptions.RequestException as e:
self.logger.error(f"Error fetching odds from ESPN API for {cache_key}: {e}")
except json.JSONDecodeError:
self.logger.error(
f"Error decoding JSON response from ESPN API for {cache_key}."
)

return self.cache_manager.get(cache_key)
Comment on lines +166 to +173
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Error fallback may return the {"no_odds": True} marker to the caller.

On request/parse failure (Lines 166-171), the code falls through to Line 173 which returns self.cache_manager.get(cache_key). If a previous successful call cached {"no_odds": True}, this will return that marker dict to the caller, which likely doesn't expect it and may treat it as valid odds data.

🐛 Proposed fix: return None on error
         except json.JSONDecodeError:
             self.logger.error(
                 f"Error decoding JSON response from ESPN API for {cache_key}."
             )
 
-        return self.cache_manager.get(cache_key)
+        return None
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
except requests.exceptions.RequestException as e:
self.logger.error(f"Error fetching odds from ESPN API for {cache_key}: {e}")
except json.JSONDecodeError:
self.logger.error(
f"Error decoding JSON response from ESPN API for {cache_key}."
)
return self.cache_manager.get(cache_key)
except requests.exceptions.RequestException as e:
self.logger.error(f"Error fetching odds from ESPN API for {cache_key}: {e}")
except json.JSONDecodeError:
self.logger.error(
f"Error decoding JSON response from ESPN API for {cache_key}."
)
return None
🧰 Tools
🪛 Ruff (0.15.0)

[warning] 167-167: Use logging.exception instead of logging.error

Replace with exception

(TRY400)


[warning] 169-171: Use logging.exception instead of logging.error

Replace with exception

(TRY400)

🤖 Prompt for AI Agents
In `@plugin-repos/ufc-scoreboard/base_odds_manager.py` around lines 166 - 173, The
except handlers currently log errors but then fall through to returning
self.cache_manager.get(cache_key), which can return a cached sentinel like
{"no_odds": True}; modify the error path so that on
requests.exceptions.RequestException and json.JSONDecodeError you return None
immediately after logging (i.e., add return None inside those except blocks or
otherwise ensure the method returns None on error instead of calling
self.cache_manager.get(cache_key)); update references to the cache access
(self.cache_manager.get(cache_key)) only for the success path.


def _extract_espn_data(self, data: Dict[str, Any]) -> Optional[Dict[str, Any]]:
"""
Extract and format odds data from ESPN API response.

Supports both team-based odds (homeTeamOdds/awayTeamOdds) and
MMA athlete-based odds (homeAthleteOdds/awayAthleteOdds).

Args:
data: Raw ESPN API response data

Returns:
Formatted odds data dictionary or None
"""
self.logger.debug(f"Extracting ESPN odds data. Data keys: {list(data.keys())}")

if "items" in data and data["items"]:
self.logger.debug(f"Found {len(data['items'])} items in odds data")
item = data["items"][0]
self.logger.debug(f"First item keys: {list(item.keys())}")

# MMA uses homeAthleteOdds/awayAthleteOdds instead of homeTeamOdds/awayTeamOdds
home_odds = item.get("homeTeamOdds", item.get("homeAthleteOdds", {}))
away_odds = item.get("awayTeamOdds", item.get("awayAthleteOdds", {}))

extracted_data = {
"details": item.get("details"),
"over_under": item.get("overUnder"),
"spread": item.get("spread"),
"home_team_odds": {
"money_line": home_odds.get("moneyLine"),
"spread_odds": home_odds.get("current", {})
.get("pointSpread", {})
.get("value"),
},
"away_team_odds": {
"money_line": away_odds.get("moneyLine"),
"spread_odds": away_odds.get("current", {})
.get("pointSpread", {})
.get("value"),
},
}
self.logger.debug(
f"Returning extracted odds data: {json.dumps(extracted_data, indent=2)}"
)
return extracted_data

# Check if this is a valid empty response
if (
"count" in data
and data["count"] == 0
and "items" in data
and data["items"] == []
):
self.logger.debug("Valid empty response - no odds available for this fight")
return None

# Unexpected structure
self.logger.warning(
f"Unexpected odds data structure: {json.dumps(data, indent=2)}"
)
return None

def get_multiple_odds(
self,
sport: str,
league: str,
event_ids: List[str],
comp_ids: List[str] = None,
update_interval_seconds: int = None,
) -> Dict[str, Dict[str, Any]]:
"""
Fetch odds data for multiple fights.

Args:
sport: Sport name
league: League name
event_ids: List of ESPN event IDs
comp_ids: List of competition IDs (parallel to event_ids). If None, uses event_ids.
update_interval_seconds: Override default update interval

Returns:
Dictionary mapping comp_id to odds data
"""
results = {}

if comp_ids is None:
comp_ids = event_ids

for event_id, comp_id in zip(event_ids, comp_ids):
try:
odds_data = self.get_odds(
sport, league, event_id, comp_id, update_interval_seconds
)
if odds_data:
results[comp_id] = odds_data
except Exception as e:
self.logger.error(f"Error fetching odds for event {event_id}/{comp_id}: {e}")
continue

return results

def clear_cache(self, sport: str = None, league: str = None, event_id: str = None):
"""Clear odds cache for specific criteria."""
if sport and league and event_id:
cache_key = f"odds_espn_{sport}_{league}_{event_id}"
self.cache_manager.delete(cache_key)
self.logger.info(f"Cleared cache for {cache_key}")
else:
self.cache_manager.clear()
self.logger.info("Cleared all cache")
Comment on lines +276 to +284
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

clear_cache uses a different cache key pattern than get_odds, so it can never clear specific entries.

get_odds (Line 108) uses cache key odds_espn_{sport}_{league}_{event_id}_{comp_id}, but clear_cache (Line 279) constructs odds_espn_{sport}_{league}_{event_id} — missing the _{comp_id} suffix. The targeted delete will never match an existing cache entry.

🐛 Proposed fix: add comp_id parameter
-    def clear_cache(self, sport: str = None, league: str = None, event_id: str = None):
+    def clear_cache(self, sport: str = None, league: str = None, event_id: str = None, comp_id: str = None):
         """Clear odds cache for specific criteria."""
-        if sport and league and event_id:
-            cache_key = f"odds_espn_{sport}_{league}_{event_id}"
+        if sport and league and event_id and comp_id:
+            cache_key = f"odds_espn_{sport}_{league}_{event_id}_{comp_id}"
             self.cache_manager.delete(cache_key)
             self.logger.info(f"Cleared cache for {cache_key}")
+        elif sport and league and event_id:
+            # Clear all comp_ids for this event (would need prefix-based delete)
+            cache_key = f"odds_espn_{sport}_{league}_{event_id}"
+            self.cache_manager.delete(cache_key)
+            self.logger.info(f"Cleared cache for {cache_key}")
         else:
             self.cache_manager.clear()
             self.logger.info("Cleared all cache")
🧰 Tools
🪛 Ruff (0.15.0)

[warning] 276-276: PEP 484 prohibits implicit Optional

Convert to T | None

(RUF013)


[warning] 276-276: PEP 484 prohibits implicit Optional

Convert to T | None

(RUF013)


[warning] 276-276: PEP 484 prohibits implicit Optional

Convert to T | None

(RUF013)

🤖 Prompt for AI Agents
In `@plugin-repos/ufc-scoreboard/base_odds_manager.py` around lines 276 - 284, The
clear_cache implementation uses a different key pattern than get_odds so it
never deletes specific entries; update clear_cache to accept comp_id (e.g., def
clear_cache(self, sport: str = None, league: str = None, event_id: str = None,
comp_id: str = None)), and when sport, league, event_id (and comp_id when
provided) are present, build the same cache_key format used in get_odds
(odds_espn_{sport}_{league}_{event_id}_{comp_id}) and call
self.cache_manager.delete(cache_key); if comp_id is missing but the intent is to
clear all comps for an event, iterate or use a prefix-based deletion if
supported by cache_manager, otherwise fall back to clear() and log accordingly
(refer to get_odds, clear_cache, cache_key, cache_manager.delete).

Loading