Intro stage complete, fixture update management for failed downloads

completed
parent 816816c7
...@@ -7,6 +7,7 @@ import logging ...@@ -7,6 +7,7 @@ import logging
import json import json
import threading import threading
import os import os
import hashlib
from pathlib import Path from pathlib import Path
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import Dict, Any, Optional, List, Union from typing import Dict, Any, Optional, List, Union
...@@ -217,6 +218,7 @@ class UpdatesResponseHandler(ResponseHandler): ...@@ -217,6 +218,7 @@ class UpdatesResponseHandler(ResponseHandler):
'timestamp': datetime.utcnow().isoformat(), 'timestamp': datetime.utcnow().isoformat(),
'synchronized_matches': 0, 'synchronized_matches': 0,
'downloaded_zips': 0, 'downloaded_zips': 0,
'expected_zips': 0,
'errors': [] 'errors': []
} }
...@@ -237,11 +239,16 @@ class UpdatesResponseHandler(ResponseHandler): ...@@ -237,11 +239,16 @@ class UpdatesResponseHandler(ResponseHandler):
return processed_data return processed_data
# First pass: synchronize all match data to database # First pass: synchronize all match data to database
logger.debug(f"Starting fixture synchronization for {len(fixtures)} fixtures")
session = self.db_manager.get_session() session = self.db_manager.get_session()
try: try:
for fixture_data in fixtures: for fixture_data in fixtures:
fixture_id = fixture_data.get('fixture_id', 'unknown')
matches = fixture_data.get('matches', []) matches = fixture_data.get('matches', [])
logger.debug(f"Synchronizing fixture {fixture_id} with {len(matches)} matches")
for match_data in matches: for match_data in matches:
match_number = match_data.get('match_number', 'unknown')
try: try:
# Update heartbeat during processing to prevent health check failures # Update heartbeat during processing to prevent health check failures
if self.api_client: if self.api_client:
...@@ -253,12 +260,15 @@ class UpdatesResponseHandler(ResponseHandler): ...@@ -253,12 +260,15 @@ class UpdatesResponseHandler(ResponseHandler):
if 'fixture_active_time' in fixture_data: if 'fixture_active_time' in fixture_data:
match_data['fixture_active_time'] = fixture_data['fixture_active_time'] match_data['fixture_active_time'] = fixture_data['fixture_active_time']
logger.debug(f"Synchronizing match {match_number} in fixture {fixture_id}")
# Synchronize match data to database # Synchronize match data to database
self._synchronize_match(session, match_data) self._synchronize_match(session, match_data)
processed_data['synchronized_matches'] += 1 processed_data['synchronized_matches'] += 1
logger.debug(f"Successfully synchronized match {match_number}")
except Exception as e: except Exception as e:
error_msg = f"Failed to process match {match_data.get('match_number', 'unknown')} in fixture {fixture_data.get('fixture_id', 'unknown')}: {e}" error_msg = f"Failed to process match {match_number} in fixture {fixture_id}: {e}"
logger.error(error_msg) logger.error(error_msg)
processed_data['errors'].append(error_msg) processed_data['errors'].append(error_msg)
# Continue processing other matches even if this one fails # Continue processing other matches even if this one fails
...@@ -268,15 +278,26 @@ class UpdatesResponseHandler(ResponseHandler): ...@@ -268,15 +278,26 @@ class UpdatesResponseHandler(ResponseHandler):
if self.api_client: if self.api_client:
self.api_client.heartbeat() self.api_client.heartbeat()
logger.debug(f"Committing {processed_data['synchronized_matches']} synchronized matches to database")
session.commit() session.commit()
logger.debug("Database commit completed successfully")
finally: finally:
session.close() session.close()
# Second pass: download ZIP files without holding database session # Second pass: download ZIP files without holding database session
# This prevents database locking during potentially slow downloads # This prevents database locking during potentially slow downloads
logger.debug(f"Starting ZIP file downloads for {len(fixtures)} fixtures")
total_matches_with_zips = 0
for fixture_data in fixtures: for fixture_data in fixtures:
fixture_id = fixture_data.get('fixture_id', 'unknown')
matches = fixture_data.get('matches', []) matches = fixture_data.get('matches', [])
logger.debug(f"Processing fixture {fixture_id} with {len(matches)} matches")
for match_data in matches: for match_data in matches:
match_number = match_data.get('match_number', 'unknown')
zip_filename = match_data.get('zip_filename')
try: try:
# Update heartbeat before each download # Update heartbeat before each download
if self.api_client: if self.api_client:
...@@ -284,16 +305,36 @@ class UpdatesResponseHandler(ResponseHandler): ...@@ -284,16 +305,36 @@ class UpdatesResponseHandler(ResponseHandler):
# Download ZIP file if available (check match-level zip_download_url) # Download ZIP file if available (check match-level zip_download_url)
if 'zip_download_url' in match_data: if 'zip_download_url' in match_data:
processed_data['expected_zips'] += 1
total_matches_with_zips += 1
match_data['zip_url'] = match_data['zip_download_url'] match_data['zip_url'] = match_data['zip_download_url']
if self._download_zip_file(match_data): logger.debug(f"Found ZIP file to download: {zip_filename} for match {match_number} in fixture {fixture_id}")
download_success = self._download_zip_file(match_data)
if download_success:
processed_data['downloaded_zips'] += 1 processed_data['downloaded_zips'] += 1
logger.debug(f"Successfully downloaded ZIP file: {zip_filename} for match {match_number}")
elif 'zip_download_url' in match_data:
logger.debug(f"ZIP file download skipped or failed: {zip_filename} for match {match_number}")
except requests.exceptions.HTTPError as http_err:
# Check if this is a "fixture no longer available" error (404)
if hasattr(http_err.response, 'status_code') and http_err.response.status_code == 404:
logger.debug(f"ZIP file no longer available on server (404): {zip_filename or 'unknown'} for match {match_number} - continuing with normal operations")
# Don't add to errors list, just skip this download
else:
# Other HTTP errors are actual failures
error_msg = f"HTTP error downloading ZIP for match {match_number}: {http_err}"
logger.error(error_msg)
processed_data['errors'].append(error_msg)
except Exception as e: except Exception as e:
error_msg = f"Failed to download ZIP for match {match_data.get('match_number', 'unknown')}: {e}" error_msg = f"Failed to download ZIP for match {match_number}: {e}"
logger.error(error_msg) logger.error(error_msg)
processed_data['errors'].append(error_msg) processed_data['errors'].append(error_msg)
# Continue with other downloads even if this one fails # Continue with other downloads even if this one fails
continue continue
logger.debug(f"ZIP download summary: {processed_data['downloaded_zips']}/{processed_data['expected_zips']} ZIP files downloaded successfully from {total_matches_with_zips} matches")
logger.info(f"Synchronized {processed_data['synchronized_matches']} matches, downloaded {processed_data['downloaded_zips']} ZIP files") logger.info(f"Synchronized {processed_data['synchronized_matches']} matches, downloaded {processed_data['downloaded_zips']} ZIP files")
return processed_data return processed_data
...@@ -377,16 +418,19 @@ class UpdatesResponseHandler(ResponseHandler): ...@@ -377,16 +418,19 @@ class UpdatesResponseHandler(ResponseHandler):
def _download_zip_file(self, match_data: Dict[str, Any]) -> bool: def _download_zip_file(self, match_data: Dict[str, Any]) -> bool:
"""Download ZIP file to persistent storage with API token authentication""" """Download ZIP file to persistent storage with API token authentication"""
zip_url = match_data.get('zip_url')
zip_filename = match_data.get('zip_filename')
if not zip_url or not zip_filename:
logger.debug(f"Skipping ZIP download - missing URL or filename: url={zip_url}, filename={zip_filename}")
return False
logger.debug(f"Starting ZIP file download: {zip_filename} from {zip_url}")
try: try:
zip_url = match_data.get('zip_url')
zip_filename = match_data.get('zip_filename')
if not zip_url or not zip_filename:
return False
# Prepare headers with API token authentication # Prepare headers with API token authentication
headers = {} headers = {}
# Get API token from the API client's fastapi_main endpoint # Get API token from the API client's fastapi_main endpoint
if self.api_client and hasattr(self.api_client, 'endpoints'): if self.api_client and hasattr(self.api_client, 'endpoints'):
fastapi_endpoint = self.api_client.endpoints.get('fastapi_main') fastapi_endpoint = self.api_client.endpoints.get('fastapi_main')
...@@ -396,40 +440,79 @@ class UpdatesResponseHandler(ResponseHandler): ...@@ -396,40 +440,79 @@ class UpdatesResponseHandler(ResponseHandler):
if token: if token:
headers['Authorization'] = f"Bearer {token}" headers['Authorization'] = f"Bearer {token}"
logger.debug(f"Using API token for ZIP download: {zip_filename}") logger.debug(f"Using API token for ZIP download: {zip_filename}")
else:
logger.debug(f"No API token available for ZIP download: {zip_filename}")
if not headers: if not headers:
logger.warning(f"No API token available for ZIP download: {zip_filename}") logger.warning(f"No API token available for ZIP download: {zip_filename}")
# Download ZIP file with authentication using session with SSL support # Download ZIP file with authentication using session with SSL support
logger.debug(f"Initiating HTTP request for ZIP file: {zip_filename}")
response = self.api_client.session.get(zip_url, stream=True, timeout=30, headers=headers) response = self.api_client.session.get(zip_url, stream=True, timeout=30, headers=headers)
response.raise_for_status() response.raise_for_status()
logger.debug(f"HTTP request successful for {zip_filename}, status: {response.status_code}")
# Save to persistent storage # Save to persistent storage
zip_path = self.zip_storage_dir / zip_filename zip_path = self.zip_storage_dir / zip_filename
logger.debug(f"Saving ZIP file to: {zip_path}")
# Update heartbeat before potentially slow file write # Update heartbeat before potentially slow file write
if self.api_client: if self.api_client:
self.api_client.heartbeat() self.api_client.heartbeat()
bytes_downloaded = 0
with open(zip_path, 'wb') as f: with open(zip_path, 'wb') as f:
for chunk in response.iter_content(chunk_size=8192): for chunk in response.iter_content(chunk_size=8192):
f.write(chunk) if chunk: # Filter out keep-alive new chunks
# Update heartbeat every few chunks for large files f.write(chunk)
if self.api_client and f.tell() % (1024 * 1024) == 0: # Every MB bytes_downloaded += len(chunk)
self.api_client.heartbeat()
# Update heartbeat every few chunks for large files
logger.info(f"Downloaded ZIP file: {zip_filename}") if self.api_client and f.tell() % (1024 * 1024) == 0: # Every MB
logger.debug(f"Downloaded {f.tell() // (1024 * 1024)}MB for {zip_filename}")
self.api_client.heartbeat()
logger.debug(f"File download completed: {zip_filename}, total bytes: {bytes_downloaded}")
# Validate downloaded file integrity
logger.debug(f"Starting validation for downloaded ZIP file: {zip_filename}")
if not self._validate_downloaded_zip(zip_path, match_data):
logger.error(f"ZIP file validation failed: {zip_filename}")
# Remove corrupted file
try:
zip_path.unlink()
logger.debug(f"Removed corrupted ZIP file: {zip_filename}")
except Exception as cleanup_e:
logger.warning(f"Failed to remove corrupted ZIP file {zip_filename}: {cleanup_e}")
return False
logger.info(f"Successfully downloaded and validated ZIP file: {zip_filename}")
return True return True
except requests.exceptions.HTTPError as http_err:
# Check if this is a "fixture no longer available" error (404)
if hasattr(http_err.response, 'status_code') and http_err.response.status_code == 404:
logger.debug(f"ZIP file no longer available on server (404): {zip_filename} - continuing with normal operations")
return False # Don't treat as error, just skip this download
else:
# Other HTTP errors are actual failures
logger.error(f"HTTP error downloading ZIP file {zip_filename}: {http_err}")
raise
except requests.exceptions.RequestException as req_err:
logger.error(f"Network error downloading ZIP file {zip_filename}: {req_err}")
return False
except IOError as io_err:
logger.error(f"File I/O error downloading ZIP file {zip_filename}: {io_err}")
return False
except Exception as e: except Exception as e:
logger.error(f"Failed to download ZIP file: {e}") logger.error(f"Unexpected error downloading ZIP file {zip_filename}: {e}")
return False return False
def _parse_datetime(self, datetime_str) -> Optional[datetime]: def _parse_datetime(self, datetime_str) -> Optional[datetime]:
"""Parse datetime string to datetime object""" """Parse datetime string to datetime object"""
if not datetime_str: if not datetime_str:
return None return None
try: try:
# Try ISO format first # Try ISO format first
return datetime.fromisoformat(datetime_str.replace('Z', '+00:00')) return datetime.fromisoformat(datetime_str.replace('Z', '+00:00'))
...@@ -440,6 +523,164 @@ class UpdatesResponseHandler(ResponseHandler): ...@@ -440,6 +523,164 @@ class UpdatesResponseHandler(ResponseHandler):
except: except:
return None return None
def _calculate_sha1(self, file_path: Path) -> str:
"""Calculate SHA1 checksum of a file"""
try:
sha1 = hashlib.sha1()
with open(file_path, 'rb') as f:
# Read in chunks to handle large files
for chunk in iter(lambda: f.read(8192), b""):
sha1.update(chunk)
return sha1.hexdigest()
except Exception as e:
logger.error(f"Failed to calculate SHA1 for {file_path}: {e}")
return ""
def _validate_downloaded_zip(self, zip_path: Path, match_data: Dict[str, Any]) -> bool:
"""Validate downloaded ZIP file integrity using checksum and size"""
try:
zip_filename = match_data.get('zip_filename', '')
expected_sha1 = match_data.get('zip_sha1sum', '')
expected_size = match_data.get('zip_size', None) # Assuming size might be provided
logger.debug(f"Starting validation for ZIP file: {zip_filename} at path: {zip_path}")
# Check if file exists and has content
if not zip_path.exists():
logger.error(f"ZIP file does not exist: {zip_path}")
return False
actual_size = zip_path.stat().st_size
logger.debug(f"ZIP file exists with size: {actual_size} bytes")
if actual_size == 0:
logger.error(f"ZIP file is empty: {zip_filename}")
return False
# Validate SHA1 checksum if provided
if expected_sha1 and expected_sha1.strip():
logger.debug(f"Validating SHA1 checksum for {zip_filename} (expected: {expected_sha1})")
try:
actual_sha1 = self._calculate_sha1(zip_path)
logger.debug(f"Calculated SHA1: {actual_sha1}")
if actual_sha1 != expected_sha1.lower():
logger.error(f"SHA1 checksum mismatch for {zip_filename}: expected {expected_sha1}, got {actual_sha1}")
return False
logger.debug(f"SHA1 checksum validated successfully for {zip_filename}")
except Exception as sha1_err:
logger.error(f"Failed to calculate SHA1 for {zip_filename}: {sha1_err}")
return False
else:
logger.debug(f"No SHA1 checksum provided for {zip_filename}, skipping checksum validation")
# Validate file size if provided (as additional check)
if expected_size is not None and isinstance(expected_size, (int, float)) and expected_size > 0:
logger.debug(f"Validating file size for {zip_filename} (expected: {expected_size}, actual: {actual_size})")
if actual_size != expected_size:
logger.warning(f"File size mismatch for {zip_filename}: expected {expected_size}, got {actual_size}")
# Size mismatch is a warning but not necessarily a failure for ZIP files
# as compression can vary, but we'll log it
else:
logger.debug(f"File size validation passed for {zip_filename}")
else:
logger.debug(f"No expected size provided for {zip_filename}, skipping size validation")
logger.debug(f"ZIP file validation successful: {zip_filename} (size: {actual_size} bytes)")
return True
except Exception as e:
logger.error(f"Failed to validate ZIP file {zip_path}: {e}")
return False
def _validate_existing_fixtures(self):
"""Validate existing fixtures on startup and remove invalid ones"""
try:
logger.info("Validating existing fixtures on startup...")
session = self.db_manager.get_session()
try:
# Get all fixtures with active matches
fixtures_to_check = session.query(MatchModel.fixture_id).filter(
MatchModel.active_status == True
).distinct().all()
logger.debug(f"Found {len(fixtures_to_check)} fixtures to validate")
fixtures_to_remove = []
for fixture_row in fixtures_to_check:
fixture_id = fixture_row.fixture_id
logger.debug(f"Validating fixture: {fixture_id}")
# Get all matches for this fixture that have ZIP files
matches_with_zips = session.query(MatchModel).filter(
MatchModel.fixture_id == fixture_id,
MatchModel.active_status == True,
MatchModel.zip_filename.isnot(None)
).all()
logger.debug(f"Fixture {fixture_id} has {len(matches_with_zips)} matches with ZIP files")
if not matches_with_zips:
logger.debug(f"Fixture {fixture_id} has no ZIP files to validate")
continue
# Check if all required ZIP files exist and are valid
fixture_valid = True
missing_or_invalid_zips = []
for match in matches_with_zips:
zip_filename = match.zip_filename
if not zip_filename:
continue
logger.debug(f"Validating ZIP file: {zip_filename} for fixture {fixture_id}")
zip_path = self.zip_storage_dir / zip_filename
# Create match data dict for validation
match_data = {
'zip_filename': zip_filename,
'zip_sha1sum': match.zip_sha1sum,
'zip_size': getattr(match, 'zip_size', None) # If size field exists
}
if not self._validate_downloaded_zip(zip_path, match_data):
logger.debug(f"ZIP file validation failed: {zip_filename}")
missing_or_invalid_zips.append(zip_filename)
fixture_valid = False
else:
logger.debug(f"ZIP file validation passed: {zip_filename}")
if not fixture_valid:
logger.warning(f"Fixture {fixture_id} has invalid/missing ZIP files: {missing_or_invalid_zips}")
fixtures_to_remove.append(fixture_id)
else:
logger.debug(f"Fixture {fixture_id} validation passed")
# Remove invalid fixtures
if fixtures_to_remove:
logger.info(f"Removing {len(fixtures_to_remove)} invalid fixtures: {fixtures_to_remove}")
for fixture_id in fixtures_to_remove:
logger.debug(f"Deleting all matches for invalid fixture: {fixture_id}")
# Delete all matches for this fixture
deleted_count = session.query(MatchModel).filter(
MatchModel.fixture_id == fixture_id
).delete()
logger.info(f"Removed fixture {fixture_id} with {deleted_count} matches")
session.commit()
logger.info(f"Successfully removed {len(fixtures_to_remove)} invalid fixtures")
else:
logger.info("All existing fixtures are valid")
finally:
session.close()
except Exception as e:
logger.error(f"Failed to validate existing fixtures: {e}")
# Don't fail initialization due to validation errors
class APIClient(ThreadedComponent): class APIClient(ThreadedComponent):
"""REST API Client component""" """REST API Client component"""
...@@ -489,10 +730,15 @@ class APIClient(ThreadedComponent): ...@@ -489,10 +730,15 @@ class APIClient(ThreadedComponent):
# Subscribe to messages # Subscribe to messages
self.message_bus.subscribe(self.name, MessageType.CONFIG_UPDATE, self._handle_config_update) self.message_bus.subscribe(self.name, MessageType.CONFIG_UPDATE, self._handle_config_update)
self.message_bus.subscribe(self.name, MessageType.API_REQUEST, self._handle_api_request) self.message_bus.subscribe(self.name, MessageType.API_REQUEST, self._handle_api_request)
# Validate existing fixtures on startup
updates_handler = self.response_handlers.get('updates')
if updates_handler and hasattr(updates_handler, '_validate_existing_fixtures'):
updates_handler._validate_existing_fixtures()
logger.info("APIClient initialized successfully") logger.info("APIClient initialized successfully")
return True return True
except Exception as e: except Exception as e:
logger.error(f"APIClient initialization failed: {e}") logger.error(f"APIClient initialization failed: {e}")
return False return False
...@@ -874,20 +1120,31 @@ class APIClient(ThreadedComponent): ...@@ -874,20 +1120,31 @@ class APIClient(ThreadedComponent):
self.message_bus.publish(response_message) self.message_bus.publish(response_message)
# Check if this is a successful fixture update that might trigger game start # Check if this is a successful fixture update that might trigger game start
if endpoint.name == 'fastapi_main' and processed_data.get('synchronized_matches', 0) > 0: synchronized_matches = processed_data.get('synchronized_matches', 0)
logger.info(f"Fixture update successful - {processed_data.get('synchronized_matches')} matches synchronized") downloaded_zips = processed_data.get('downloaded_zips', 0)
# Send a message to trigger game start check expected_zips = processed_data.get('expected_zips', 0)
game_start_check_message = Message(
type=MessageType.SYSTEM_STATUS, if endpoint.name == 'fastapi_main' and synchronized_matches > 0:
sender=self.name, if downloaded_zips == expected_zips:
data={ logger.info(f"Fixture update completed successfully - {synchronized_matches} matches synchronized, {downloaded_zips}/{expected_zips} ZIPs downloaded")
'status': 'fixture_update_completed', logger.debug("All expected ZIP files downloaded - fixtures are ready for games")
'synchronized_matches': processed_data.get('synchronized_matches', 0), # Send a message to trigger game start check
'downloaded_zips': processed_data.get('downloaded_zips', 0), game_start_check_message = Message(
'timestamp': datetime.utcnow().isoformat() type=MessageType.SYSTEM_STATUS,
} sender=self.name,
) data={
self.message_bus.publish(game_start_check_message) 'status': 'fixture_update_completed',
'synchronized_matches': synchronized_matches,
'downloaded_zips': downloaded_zips,
'expected_zips': expected_zips,
'timestamp': datetime.utcnow().isoformat()
}
)
self.message_bus.publish(game_start_check_message)
logger.debug("Sent fixture_update_completed message to trigger game start check")
else:
logger.info(f"Fixture update partially completed - {synchronized_matches} matches synchronized, {downloaded_zips}/{expected_zips} ZIPs downloaded")
logger.debug(f"Waiting for remaining {expected_zips - downloaded_zips} ZIP files before fixtures are ready for games")
logger.debug(f"API request successful: {endpoint.name}") logger.debug(f"API request successful: {endpoint.name}")
...@@ -1016,8 +1273,11 @@ class APIClient(ThreadedComponent): ...@@ -1016,8 +1273,11 @@ class APIClient(ThreadedComponent):
def _process_message(self, message: Message): def _process_message(self, message: Message):
"""Process received message""" """Process received message"""
try: try:
# Messages are handled by subscribed handlers if message.type == MessageType.CONFIG_UPDATE:
pass self._handle_config_update(message)
elif message.type == MessageType.API_REQUEST:
self._handle_api_request(message)
# Add other message types as needed
except Exception as e: except Exception as e:
logger.error(f"Failed to process message: {e}") logger.error(f"Failed to process message: {e}")
...@@ -1026,29 +1286,35 @@ class APIClient(ThreadedComponent): ...@@ -1026,29 +1286,35 @@ class APIClient(ThreadedComponent):
try: try:
config_section = message.data.get("config_section") config_section = message.data.get("config_section")
config_data = message.data.get("config_data", {}) config_data = message.data.get("config_data", {})
if config_section == "api_endpoints": if config_section == "api_endpoints":
logger.info("API endpoints configuration updated") logger.info("API endpoints configuration updated")
self._load_endpoints() self._load_endpoints()
elif config_section == "api": elif config_section == "api":
logger.info("API configuration updated, reloading endpoints") logger.info("API configuration updated, reloading endpoints")
# Check for token changes to enable/disable dynamic scheduling # Check for token and URL changes
new_token = config_data.get("api_token", "").strip() new_token = config_data.get("api_token", "").strip()
new_url = config_data.get("fastapi_url", "").strip()
old_token = "" old_token = ""
old_url = ""
try: try:
old_api_config = self.config_manager.get_section_config("api") or {} old_api_config = self.config_manager.get_section_config("api") or {}
old_token = old_api_config.get("api_token", "").strip() old_token = old_api_config.get("api_token", "").strip()
old_url = old_api_config.get("fastapi_url", "").strip()
except Exception: except Exception:
pass pass
# Reload endpoints to pick up new configuration # Reload endpoints to pick up new configuration
self._load_endpoints() self._load_endpoints()
# Handle dynamic timer start/stop based on token changes # Handle dynamic timer start/stop based on token changes
self._handle_token_change(old_token, new_token) self._handle_token_change(old_token, new_token)
# Handle URL changes - trigger immediate connection test
self._handle_url_change(old_url, new_url)
except Exception as e: except Exception as e:
logger.error(f"Failed to handle config update: {e}") logger.error(f"Failed to handle config update: {e}")
...@@ -1129,7 +1395,52 @@ class APIClient(ThreadedComponent): ...@@ -1129,7 +1395,52 @@ class APIClient(ThreadedComponent):
except Exception as e: except Exception as e:
logger.error(f"Failed to handle token change: {e}") logger.error(f"Failed to handle token change: {e}")
def _handle_url_change(self, old_url: str, new_url: str):
"""Handle URL changes - trigger immediate connection test"""
try:
if old_url != new_url and new_url.strip():
fastapi_endpoint = self.endpoints.get("fastapi_main")
if fastapi_endpoint:
logger.info(f"FastAPI URL changed from '{old_url}' to '{new_url}' - triggering immediate connection test")
# Temporarily enable endpoint for connection test if it was disabled
was_enabled = fastapi_endpoint.enabled
if not was_enabled:
fastapi_endpoint.enabled = True
logger.debug("Temporarily enabled fastapi_main endpoint for connection test")
# Reset failure count and last request to ensure immediate execution
fastapi_endpoint.consecutive_failures = 0
fastapi_endpoint.last_request = None
# Trigger immediate request to test new URL
logger.info("Triggering immediate connection test to new FastAPI endpoint")
self._execute_endpoint_request(fastapi_endpoint)
# Restore original enabled state after test
if not was_enabled:
fastapi_endpoint.enabled = False
logger.debug("Restored fastapi_main endpoint to disabled state after test")
# Send status update about URL change
status_message = Message(
type=MessageType.SYSTEM_STATUS,
sender=self.name,
data={
"status": "url_changed",
"endpoint": "fastapi_main",
"old_url": old_url,
"new_url": new_url,
"connection_test_triggered": True,
"endpoint_was_enabled": was_enabled
}
)
self.message_bus.publish(status_message)
except Exception as e:
logger.error(f"Failed to handle URL change: {e}")
def update_fastapi_url(self, new_url: str) -> bool: def update_fastapi_url(self, new_url: str) -> bool:
"""Update FastAPI server URL and reload endpoints""" """Update FastAPI server URL and reload endpoints"""
try: try:
......
...@@ -52,6 +52,7 @@ class MbetterClientApplication: ...@@ -52,6 +52,7 @@ class MbetterClientApplication:
# Timer for automated game start # Timer for automated game start
self._game_start_timer: Optional[threading.Timer] = None self._game_start_timer: Optional[threading.Timer] = None
self._original_timer_interval: Optional[int] = None # Store original interval for retries
logger.info("MbetterClient application initialized") logger.info("MbetterClient application initialized")
...@@ -178,6 +179,7 @@ class MbetterClientApplication: ...@@ -178,6 +179,7 @@ class MbetterClientApplication:
self.message_bus.subscribe("core", MessageType.SYSTEM_SHUTDOWN, self._handle_shutdown_message) self.message_bus.subscribe("core", MessageType.SYSTEM_SHUTDOWN, self._handle_shutdown_message)
self.message_bus.subscribe("core", MessageType.CONFIG_UPDATE, self._handle_config_update) self.message_bus.subscribe("core", MessageType.CONFIG_UPDATE, self._handle_config_update)
self.message_bus.subscribe("core", MessageType.LOG_ENTRY, self._handle_log_entry) self.message_bus.subscribe("core", MessageType.LOG_ENTRY, self._handle_log_entry)
self.message_bus.subscribe("core", MessageType.GAME_STATUS, self._handle_game_status_response)
logger.info("Message bus initialized") logger.info("Message bus initialized")
return True return True
...@@ -611,6 +613,8 @@ class MbetterClientApplication: ...@@ -611,6 +613,8 @@ class MbetterClientApplication:
self._handle_config_request(message) self._handle_config_request(message)
elif message.type == MessageType.START_GAME: elif message.type == MessageType.START_GAME:
self._handle_start_game_message(message) self._handle_start_game_message(message)
elif message.type == MessageType.GAME_STATUS:
self._handle_game_status_response(message)
elif message.type == MessageType.SYSTEM_SHUTDOWN: elif message.type == MessageType.SYSTEM_SHUTDOWN:
self._handle_shutdown_message(message) self._handle_shutdown_message(message)
else: else:
...@@ -758,6 +762,41 @@ class MbetterClientApplication: ...@@ -758,6 +762,41 @@ class MbetterClientApplication:
except Exception as e: except Exception as e:
logger.error(f"Failed to handle START_GAME message: {e}") logger.error(f"Failed to handle START_GAME message: {e}")
def _handle_game_status_response(self, message: Message):
"""Handle GAME_STATUS responses, particularly for timer-initiated START_GAME failures"""
try:
status = message.data.get("status", "unknown")
sender = message.sender
# Only process responses that might be related to our timer-initiated START_GAME
# We check if we have an active timer that might need restarting
if self._game_start_timer is None and self._original_timer_interval is None:
# No active timer management needed
return
# Check if this is a failure response that should trigger timer restart
failure_statuses = ["waiting_for_downloads", "discarded", "error", "no_matches"]
if status in failure_statuses:
logger.info(f"START_GAME failed with status '{status}' from {sender} - restarting timer")
# Cancel any existing timer
self._cancel_game_timer()
# Restart timer with original interval
if self._original_timer_interval is not None:
logger.info(f"Restarting game start timer with original interval: {self._original_timer_interval} minutes")
self._start_game_timer_with_interval(self._original_timer_interval)
else:
logger.warning("No original timer interval available for restart")
elif status == "started":
logger.info(f"START_GAME succeeded with status '{status}' from {sender} - timer job completed")
# Game started successfully, clear timer state
self._original_timer_interval = None
except Exception as e:
logger.error(f"Failed to handle GAME_STATUS response: {e}")
def _run_additional_tasks(self): def _run_additional_tasks(self):
"""Placeholder for additional periodic tasks""" """Placeholder for additional periodic tasks"""
...@@ -770,6 +809,9 @@ class MbetterClientApplication: ...@@ -770,6 +809,9 @@ class MbetterClientApplication:
if self._start_timer_minutes is None: if self._start_timer_minutes is None:
return return
# Store the original interval for potential retries
self._original_timer_interval = self._start_timer_minutes
# Special case: --start-timer 0 means 10 seconds delay for system initialization # Special case: --start-timer 0 means 10 seconds delay for system initialization
if self._start_timer_minutes == 0: if self._start_timer_minutes == 0:
delay_seconds = 10 delay_seconds = 10
...@@ -782,6 +824,27 @@ class MbetterClientApplication: ...@@ -782,6 +824,27 @@ class MbetterClientApplication:
self._game_start_timer.daemon = True self._game_start_timer.daemon = True
self._game_start_timer.start() self._game_start_timer.start()
def _start_game_timer_with_interval(self, minutes: int):
"""Start the game timer with a specific interval (used for retries)"""
if minutes < 0:
logger.error(f"Invalid timer interval: {minutes} minutes")
return
# Update stored interval
self._original_timer_interval = minutes
# Special case: timer 0 means 10 seconds delay for system initialization
if minutes == 0:
delay_seconds = 10
logger.info(f"Restarting command line game timer: 0 minutes = 10 seconds delay for system initialization")
else:
delay_seconds = minutes * 60
logger.info(f"Restarting command line game timer: {minutes} minutes ({delay_seconds} seconds)")
self._game_start_timer = threading.Timer(delay_seconds, self._on_game_timer_expired)
self._game_start_timer.daemon = True
self._game_start_timer.start()
def _on_game_timer_expired(self): def _on_game_timer_expired(self):
"""Called when the game start timer expires""" """Called when the game start timer expires"""
logger.info("Game start timer expired, sending START_GAME message") logger.info("Game start timer expired, sending START_GAME message")
...@@ -804,6 +867,7 @@ class MbetterClientApplication: ...@@ -804,6 +867,7 @@ class MbetterClientApplication:
logger.info("Cancelling game start timer") logger.info("Cancelling game start timer")
self._game_start_timer.cancel() self._game_start_timer.cancel()
self._game_start_timer = None self._game_start_timer = None
# Note: We keep _original_timer_interval for potential retries
def _check_component_health(self): def _check_component_health(self):
"""Check health of all components""" """Check health of all components"""
...@@ -841,8 +905,9 @@ class MbetterClientApplication: ...@@ -841,8 +905,9 @@ class MbetterClientApplication:
self.running = False self.running = False
self.shutdown_event.set() self.shutdown_event.set()
# Cancel game timer if running # Cancel game timer if running and clear timer state
self._cancel_game_timer() self._cancel_game_timer()
self._original_timer_interval = None
# Send shutdown message to all components # Send shutdown message to all components
if self.message_bus: if self.message_bus:
......
...@@ -193,7 +193,13 @@ class GamesThread(ThreadedComponent): ...@@ -193,7 +193,13 @@ class GamesThread(ThreadedComponent):
self._send_response(message, "discarded", f"Fixture {fixture_id} is already completed") self._send_response(message, "discarded", f"Fixture {fixture_id} is already completed")
return return
# Fixture is not terminal, activate it # Check if all required ZIP files are available for this fixture
if not self._are_fixture_zips_available(fixture_id):
logger.info(f"Fixture {fixture_id} has missing or invalid ZIP files - discarding START_GAME message")
self._send_response(message, "waiting_for_downloads", f"Fixture {fixture_id} is waiting for ZIP file downloads")
return
# Fixture is not terminal and has all ZIPs, activate it
logger.info(f"Activating provided fixture: {fixture_id}") logger.info(f"Activating provided fixture: {fixture_id}")
self._activate_fixture(fixture_id, message) self._activate_fixture(fixture_id, message)
return return
...@@ -213,6 +219,12 @@ class GamesThread(ThreadedComponent): ...@@ -213,6 +219,12 @@ class GamesThread(ThreadedComponent):
# Step 3: Check if there are active fixtures with today's date # Step 3: Check if there are active fixtures with today's date
active_fixture = self._find_active_today_fixture() active_fixture = self._find_active_today_fixture()
if active_fixture: if active_fixture:
# Check if all required ZIP files are available for this fixture
if not self._are_fixture_zips_available(active_fixture):
logger.info(f"Active fixture {active_fixture} has missing or invalid ZIP files - discarding START_GAME message")
self._send_response(message, "waiting_for_downloads", f"Fixture {active_fixture} is waiting for ZIP file downloads")
return
logger.info(f"Found active fixture for today: {active_fixture}") logger.info(f"Found active fixture for today: {active_fixture}")
self._activate_fixture(active_fixture, message) self._activate_fixture(active_fixture, message)
return return
...@@ -221,6 +233,12 @@ class GamesThread(ThreadedComponent): ...@@ -221,6 +233,12 @@ class GamesThread(ThreadedComponent):
logger.info("No active fixtures found - initializing new fixture") logger.info("No active fixtures found - initializing new fixture")
new_fixture_id = self._initialize_new_fixture() new_fixture_id = self._initialize_new_fixture()
if new_fixture_id: if new_fixture_id:
# Check if all required ZIP files are available for the new fixture
if not self._are_fixture_zips_available(new_fixture_id):
logger.info(f"New fixture {new_fixture_id} has missing or invalid ZIP files - discarding START_GAME message")
self._send_response(message, "waiting_for_downloads", f"Fixture {new_fixture_id} is waiting for ZIP file downloads")
return
self._activate_fixture(new_fixture_id, message) self._activate_fixture(new_fixture_id, message)
else: else:
logger.warning("Could not initialize new fixture") logger.warning("Could not initialize new fixture")
...@@ -353,6 +371,7 @@ class GamesThread(ThreadedComponent): ...@@ -353,6 +371,7 @@ class GamesThread(ThreadedComponent):
game_status = self._determine_game_status() game_status = self._determine_game_status()
# If status is "already_active" but game is not active, activate the fixture # If status is "already_active" but game is not active, activate the fixture
# But NOT if we're waiting for downloads
if game_status == "already_active" and not self.game_active: if game_status == "already_active" and not self.game_active:
logger.info("Status is 'already_active' but game is not active - activating fixture") logger.info("Status is 'already_active' but game is not active - activating fixture")
active_fixture = self._find_active_today_fixture() active_fixture = self._find_active_today_fixture()
...@@ -370,6 +389,8 @@ class GamesThread(ThreadedComponent): ...@@ -370,6 +389,8 @@ class GamesThread(ThreadedComponent):
game_status = "started" game_status = "started"
else: else:
logger.warning("Could not find active fixture to activate") logger.warning("Could not find active fixture to activate")
elif game_status == "waiting_for_downloads":
logger.info("Game status is 'waiting_for_downloads' - not activating fixture until all ZIP files are available")
# Send GAME_STATUS response back to the requester # Send GAME_STATUS response back to the requester
response = Message( response = Message(
...@@ -411,6 +432,8 @@ class GamesThread(ThreadedComponent): ...@@ -411,6 +432,8 @@ class GamesThread(ThreadedComponent):
self._handle_play_video_match_done(message) self._handle_play_video_match_done(message)
elif message.type == MessageType.MATCH_DONE: elif message.type == MessageType.MATCH_DONE:
self._handle_match_done(message) self._handle_match_done(message)
elif message.type == MessageType.MATCH_START:
self._handle_match_start(message)
elif message.type == MessageType.GAME_STATUS: elif message.type == MessageType.GAME_STATUS:
self._handle_game_status_request(message) self._handle_game_status_request(message)
...@@ -759,6 +782,63 @@ class GamesThread(ThreadedComponent): ...@@ -759,6 +782,63 @@ class GamesThread(ThreadedComponent):
logger.error(f"Failed to check if only non-terminal fixture: {e}") logger.error(f"Failed to check if only non-terminal fixture: {e}")
return False return False
def _are_fixture_zips_available(self, fixture_id: str) -> bool:
"""Check if all required ZIP files are available for a fixture"""
try:
session = self.db_manager.get_session()
try:
# Get all active matches for this fixture that have ZIP files
matches_with_zips = session.query(MatchModel).filter(
MatchModel.fixture_id == fixture_id,
MatchModel.active_status == True,
MatchModel.zip_filename.isnot(None)
).all()
if not matches_with_zips:
logger.debug(f"Fixture {fixture_id} has no matches requiring ZIP files")
return True # No ZIP files required, so it's "available"
# Check if all required ZIP files exist and are valid
from ..config.settings import get_user_data_dir
from pathlib import Path
user_data_dir = get_user_data_dir()
missing_or_invalid_zips = []
for match in matches_with_zips:
zip_filename = match.zip_filename
if not zip_filename:
continue
zip_path = user_data_dir / "zip_files" / zip_filename
if not zip_path.exists():
missing_or_invalid_zips.append(zip_filename)
logger.debug(f"ZIP file missing for match {match.match_number}: {zip_path}")
else:
# Quick validation - check file size > 0
try:
if zip_path.stat().st_size == 0:
missing_or_invalid_zips.append(zip_filename)
logger.debug(f"ZIP file empty for match {match.match_number}: {zip_path}")
except OSError as e:
missing_or_invalid_zips.append(zip_filename)
logger.debug(f"Cannot access ZIP file for match {match.match_number}: {e}")
if missing_or_invalid_zips:
logger.info(f"Fixture {fixture_id} has missing/invalid ZIP files: {missing_or_invalid_zips}")
return False
else:
logger.debug(f"All required ZIP files available for fixture {fixture_id}")
return True
finally:
session.close()
except Exception as e:
logger.error(f"Failed to check ZIP availability for fixture {fixture_id}: {e}")
return False
def _find_active_today_fixture(self) -> Optional[str]: def _find_active_today_fixture(self) -> Optional[str]:
"""Find an active fixture with today's date""" """Find an active fixture with today's date"""
try: try:
...@@ -1204,119 +1284,176 @@ class GamesThread(ThreadedComponent): ...@@ -1204,119 +1284,176 @@ class GamesThread(ThreadedComponent):
def _calculate_match_result(self, fixture_id: str, match_id: int) -> str: def _calculate_match_result(self, fixture_id: str, match_id: int) -> str:
"""Calculate match result based on betting patterns and CAP logic""" """Calculate match result based on betting patterns and CAP logic"""
try: try:
logger.info(f"🔍 [DEBUG] Starting match result calculation for fixture {fixture_id}, match {match_id}")
session = self.db_manager.get_session() session = self.db_manager.get_session()
try: try:
# Get UNDER/OVER coefficients from match outcomes # Step 2.1: Retrieve UNDER/OVER Coefficients
logger.info(f"📊 [DEBUG] Step 2.1: Retrieving UNDER/OVER coefficients for fixture {fixture_id}")
under_coeff, over_coeff = self._get_fixture_coefficients(fixture_id, session) under_coeff, over_coeff = self._get_fixture_coefficients(fixture_id, session)
logger.info(f"📊 [DEBUG] Retrieved coefficients - UNDER: {under_coeff}, OVER: {over_coeff}")
# Step 2.2: Fallback Check
if under_coeff is None or over_coeff is None: if under_coeff is None or over_coeff is None:
logger.warning(f"No coefficients found for fixture {fixture_id}, using random selection") logger.warning(f"⚠️ [DEBUG] Step 2.2: No coefficients found for fixture {fixture_id}, falling back to random selection")
return self._weighted_random_selection(1.0, 1.0) fallback_result = self._weighted_random_selection(1.0, 1.0)
logger.info(f"🎲 [DEBUG] Fallback random result: {fallback_result}")
return fallback_result
# Calculate payouts for both outcomes # Step 2.3: Calculate UNDER Payout
logger.info(f"💰 [DEBUG] Step 2.3: Calculating UNDER payout (match_id={match_id}, coeff={under_coeff})")
under_payout = self._calculate_payout(match_id, 'UNDER', under_coeff, session) under_payout = self._calculate_payout(match_id, 'UNDER', under_coeff, session)
logger.info(f"💰 [DEBUG] UNDER payout calculated: {under_payout:.2f}")
# Step 2.4: Calculate OVER Payout
logger.info(f"💰 [DEBUG] Step 2.4: Calculating OVER payout (match_id={match_id}, coeff={over_coeff})")
over_payout = self._calculate_payout(match_id, 'OVER', over_coeff, session) over_payout = self._calculate_payout(match_id, 'OVER', over_coeff, session)
logger.info(f"💰 [DEBUG] OVER payout calculated: {over_payout:.2f}")
# Calculate total payin # Step 2.5: Calculate Total Payin
logger.info(f"💵 [DEBUG] Step 2.5: Calculating total payin for match {match_id}")
total_payin = self._calculate_total_payin(match_id, session) total_payin = self._calculate_total_payin(match_id, session)
logger.info(f"💵 [DEBUG] Total payin calculated: {total_payin:.2f}")
# Get redistribution CAP percentage # Step 2.6: Get Redistribution CAP
logger.info(f"🎯 [DEBUG] Step 2.6: Retrieving redistribution CAP percentage")
cap_percentage = self._get_redistribution_cap() cap_percentage = self._get_redistribution_cap()
logger.info(f"🎯 [DEBUG] CAP percentage: {cap_percentage}%")
# Check CAP logic # Step 2.7: Calculate CAP Threshold
logger.info(f"📏 [DEBUG] Step 2.7: Calculating CAP threshold")
max_payout = max(under_payout, over_payout) max_payout = max(under_payout, over_payout)
cap_threshold = total_payin * (cap_percentage / 100.0) cap_threshold = total_payin * (cap_percentage / 100.0)
logger.info(f"📏 [DEBUG] Max payout: {max_payout:.2f}, CAP threshold: {cap_threshold:.2f}")
logger.info(f"Match {match_id}: UNDER payout={under_payout:.2f}, OVER payout={over_payout:.2f}, total_payin={total_payin:.2f}, CAP={cap_percentage}%, threshold={cap_threshold:.2f}") # Step 2.8: CAP Logic Decision
logger.info(f"🧠 [DEBUG] Step 2.8: Evaluating CAP logic")
logger.info(f"📊 [DEBUG] Summary - UNDER payout: {under_payout:.2f}, OVER payout: {over_payout:.2f}, total_payin: {total_payin:.2f}, CAP: {cap_percentage}%, threshold: {cap_threshold:.2f}")
if max_payout > cap_threshold: if max_payout > cap_threshold:
# CAP exceeded - select outcome with lower payout to minimize losses # CAP exceeded - select outcome with lower payout to minimize losses
logger.info(f"🚨 [DEBUG] CAP exceeded! Max payout ({max_payout:.2f}) > threshold ({cap_threshold:.2f})")
if under_payout <= over_payout: if under_payout <= over_payout:
result = 'UNDER' result = 'UNDER'
logger.info(f"CAP exceeded, selecting UNDER (lower payout: {under_payout:.2f} vs {over_payout:.2f})") logger.info(f"✅ [DEBUG] Selecting UNDER (lower payout: {under_payout:.2f} vs {over_payout:.2f})")
else: else:
result = 'OVER' result = 'OVER'
logger.info(f"CAP exceeded, selecting OVER (lower payout: {over_payout:.2f} vs {under_payout:.2f})") logger.info(f"✅ [DEBUG] Selecting OVER (lower payout: {over_payout:.2f} vs {under_payout:.2f})")
else: else:
# CAP not exceeded - use weighted random selection # CAP not exceeded - use weighted random selection
logger.info(f"✅ [DEBUG] CAP not exceeded. Max payout ({max_payout:.2f}) <= threshold ({cap_threshold:.2f})")
logger.info(f"🎲 [DEBUG] Step 2.9: Performing weighted random selection")
result = self._weighted_random_selection(under_coeff, over_coeff) result = self._weighted_random_selection(under_coeff, over_coeff)
logger.info(f"CAP not exceeded, using weighted random: {result}") logger.info(f"🎯 [DEBUG] Weighted random result: {result}")
logger.info(f"🏁 [DEBUG] Match result calculation completed: {result}")
return result return result
finally: finally:
session.close() session.close()
except Exception as e: except Exception as e:
logger.error(f"Failed to calculate match result: {e}") logger.error(f"❌ [DEBUG] Failed to calculate match result: {e}")
# Fallback to random # Fallback to random
return self._weighted_random_selection(1.0, 1.0) logger.info(f"🔄 [DEBUG] Using fallback random selection")
fallback_result = self._weighted_random_selection(1.0, 1.0)
logger.info(f"🎲 [DEBUG] Fallback result: {fallback_result}")
return fallback_result
def _calculate_payout(self, match_id: int, outcome: str, coefficient: float, session) -> float: def _calculate_payout(self, match_id: int, outcome: str, coefficient: float, session) -> float:
"""Calculate payout for an outcome""" """Calculate payout for an outcome"""
try: try:
logger.info(f"💰 [DEBUG] Calculating payout for {outcome} (match_id={match_id}, coefficient={coefficient})")
# Get total bets for this outcome on this match # Get total bets for this outcome on this match
logger.info(f"📊 [DEBUG] Querying bets for outcome '{outcome}' on match {match_id}")
total_bet_amount = session.query( total_bet_amount = session.query(
BetDetailModel.amount BetDetailModel.amount
).filter( ).filter(
BetDetailModel.match_id == match_id, BetDetailModel.match_id == match_id,
BetDetailModel.outcome == outcome, BetDetailModel.outcome == outcome,
BetDetailModel.active_status == True BetDetailModel.result == 'pending'
).all() ).all()
bet_count = len(total_bet_amount) if total_bet_amount else 0
logger.info(f"📊 [DEBUG] Found {bet_count} pending bets for {outcome}")
total_amount = sum(bet.amount for bet in total_bet_amount) if total_bet_amount else 0.0 total_amount = sum(bet.amount for bet in total_bet_amount) if total_bet_amount else 0.0
logger.info(f"💵 [DEBUG] Total bet amount for {outcome}: {total_amount:.2f}")
payout = total_amount * coefficient payout = total_amount * coefficient
logger.info(f"💰 [DEBUG] Calculated payout for {outcome}: {total_amount:.2f} × {coefficient} = {payout:.2f}")
return payout return payout
except Exception as e: except Exception as e:
logger.error(f"Failed to calculate payout for {outcome}: {e}") logger.error(f"❌ [DEBUG] Failed to calculate payout for {outcome}: {e}")
return 0.0 return 0.0
def _calculate_total_payin(self, match_id: int, session) -> float: def _calculate_total_payin(self, match_id: int, session) -> float:
"""Calculate total payin (sum of all UNDER + OVER bets)""" """Calculate total payin (sum of all UNDER + OVER bets)"""
try: try:
logger.info(f"💵 [DEBUG] Calculating total payin for match {match_id}")
# Query UNDER bets
logger.info(f"📊 [DEBUG] Querying UNDER bets for match {match_id}")
total_under = session.query( total_under = session.query(
BetDetailModel.amount BetDetailModel.amount
).filter( ).filter(
BetDetailModel.match_id == match_id, BetDetailModel.match_id == match_id,
BetDetailModel.outcome == 'UNDER', BetDetailModel.outcome == 'UNDER',
BetDetailModel.active_status == True BetDetailModel.result == 'pending'
).all() ).all()
under_count = len(total_under) if total_under else 0
under_amount = sum(bet.amount for bet in total_under) if total_under else 0.0
logger.info(f"💵 [DEBUG] UNDER bets: {under_count} bets, total amount: {under_amount:.2f}")
# Query OVER bets
logger.info(f"📊 [DEBUG] Querying OVER bets for match {match_id}")
total_over = session.query( total_over = session.query(
BetDetailModel.amount BetDetailModel.amount
).filter( ).filter(
BetDetailModel.match_id == match_id, BetDetailModel.match_id == match_id,
BetDetailModel.outcome == 'OVER', BetDetailModel.outcome == 'OVER',
BetDetailModel.active_status == True BetDetailModel.result == 'pending'
).all() ).all()
under_amount = sum(bet.amount for bet in total_under) if total_under else 0.0 over_count = len(total_over) if total_over else 0
over_amount = sum(bet.amount for bet in total_over) if total_over else 0.0 over_amount = sum(bet.amount for bet in total_over) if total_over else 0.0
logger.info(f"💵 [DEBUG] OVER bets: {over_count} bets, total amount: {over_amount:.2f}")
total_payin = under_amount + over_amount
logger.info(f"💵 [DEBUG] Total payin calculated: {under_amount:.2f} + {over_amount:.2f} = {total_payin:.2f}")
return under_amount + over_amount return total_payin
except Exception as e: except Exception as e:
logger.error(f"Failed to calculate total payin: {e}") logger.error(f"❌ [DEBUG] Failed to calculate total payin: {e}")
return 0.0 return 0.0
def _get_fixture_coefficients(self, fixture_id: str, session) -> tuple: def _get_fixture_coefficients(self, fixture_id: str, session) -> tuple:
"""Get UNDER/OVER coefficients from fixture/match outcomes""" """Get UNDER/OVER coefficients from fixture/match outcomes"""
try: try:
# Get match outcomes for this fixture # Get one active match from the fixture to get UNDER/OVER coefficients
outcomes = session.query(MatchOutcomeModel).filter( match = session.query(MatchModel).filter(
MatchOutcomeModel.fixture_id == fixture_id, MatchModel.fixture_id == fixture_id,
MatchOutcomeModel.active_status == True MatchModel.active_status == True
).all() ).first()
if not match:
logger.warning(f"No active matches found for fixture {fixture_id}")
return None, None
# Get UNDER/OVER coefficients from match outcomes
under_coeff = None under_coeff = None
over_coeff = None over_coeff = None
for outcome in outcomes: for outcome in match.outcomes:
if outcome.outcome_type == 'UNDER': if outcome.column_name == 'UNDER':
under_coeff = outcome.coefficient under_coeff = outcome.float_value
elif outcome.outcome_type == 'OVER': elif outcome.column_name == 'OVER':
over_coeff = outcome.coefficient over_coeff = outcome.float_value
return under_coeff, over_coeff return under_coeff, over_coeff
...@@ -1357,30 +1494,44 @@ class GamesThread(ThreadedComponent): ...@@ -1357,30 +1494,44 @@ class GamesThread(ThreadedComponent):
try: try:
import random import random
logger.info(f"🎲 [DEBUG] Weighted random selection - UNDER coeff: {under_coeff}, OVER coeff: {over_coeff}")
# Higher coefficients get lower probability (inverse weighting) # Higher coefficients get lower probability (inverse weighting)
logger.info(f"⚖️ [DEBUG] Calculating inverse weights (higher coeff = lower probability)")
under_weight = 1.0 / under_coeff if under_coeff > 0 else 1.0 under_weight = 1.0 / under_coeff if under_coeff > 0 else 1.0
over_weight = 1.0 / over_coeff if over_coeff > 0 else 1.0 over_weight = 1.0 / over_coeff if over_coeff > 0 else 1.0
logger.info(f"⚖️ [DEBUG] Weights calculated - UNDER: {under_weight:.4f}, OVER: {over_weight:.4f}")
total_weight = under_weight + over_weight total_weight = under_weight + over_weight
logger.info(f"📊 [DEBUG] Total weight: {total_weight:.4f}")
if total_weight == 0: if total_weight == 0:
# Fallback to equal weights # Fallback to equal weights
logger.warning(f"⚠️ [DEBUG] Total weight is zero, falling back to equal weights")
under_weight = over_weight = 1.0 under_weight = over_weight = 1.0
total_weight = 2.0 total_weight = 2.0
logger.info(f"⚖️ [DEBUG] Equal weights applied - UNDER: {under_weight}, OVER: {over_weight}")
# Generate random number # Generate random number
rand = random.uniform(0, total_weight) rand = random.uniform(0, total_weight)
logger.info(f"🎯 [DEBUG] Generated random number: {rand:.4f} (range: 0-{total_weight:.4f})")
# Determine result
if rand < under_weight: if rand < under_weight:
logger.info(f"🎯 [DEBUG] Random {rand:.4f} < UNDER weight {under_weight:.4f} → selecting UNDER")
return 'UNDER' return 'UNDER'
else: else:
logger.info(f"🎯 [DEBUG] Random {rand:.4f} >= UNDER weight {under_weight:.4f} → selecting OVER")
return 'OVER' return 'OVER'
except Exception as e: except Exception as e:
logger.error(f"Failed to perform weighted random selection: {e}") logger.error(f"❌ [DEBUG] Failed to perform weighted random selection: {e}")
# Fallback to 50/50 # Fallback to 50/50
logger.info(f"🔄 [DEBUG] Using 50/50 fallback")
import random import random
return 'UNDER' if random.random() < 0.5 else 'OVER' fallback_result = 'UNDER' if random.random() < 0.5 else 'OVER'
logger.info(f"🎲 [DEBUG] 50/50 fallback result: {fallback_result}")
return fallback_result
def _send_play_video_match(self, fixture_id: str, match_id: int, result: str): def _send_play_video_match(self, fixture_id: str, match_id: int, result: str):
"""Send PLAY_VIDEO_MATCH message with calculated result""" """Send PLAY_VIDEO_MATCH message with calculated result"""
...@@ -2078,7 +2229,39 @@ class GamesThread(ThreadedComponent): ...@@ -2078,7 +2229,39 @@ class GamesThread(ThreadedComponent):
).all() ).all()
if active_matches: if active_matches:
return "already_active" # Check if all required ZIP files are available for these active matches
# Only consider fixtures "active" if all their ZIP files are downloaded
fixtures_with_missing_zips = set()
for match in active_matches:
if match.zip_filename and match.active_status:
# Check if ZIP file exists and is valid
from ..config.settings import get_user_data_dir
from pathlib import Path
import os
user_data_dir = get_user_data_dir()
zip_path = user_data_dir / "zip_files" / match.zip_filename
if not zip_path.exists():
fixtures_with_missing_zips.add(match.fixture_id)
logger.debug(f"ZIP file missing for match {match.match_number} in fixture {match.fixture_id}: {zip_path}")
else:
# Quick validation - check file size > 0
try:
if zip_path.stat().st_size == 0:
fixtures_with_missing_zips.add(match.fixture_id)
logger.debug(f"ZIP file empty for match {match.match_number} in fixture {match.fixture_id}: {zip_path}")
except OSError as e:
fixtures_with_missing_zips.add(match.fixture_id)
logger.debug(f"Cannot access ZIP file for match {match.match_number} in fixture {match.fixture_id}: {e}")
if fixtures_with_missing_zips:
logger.info(f"Active fixtures found but missing ZIP files for fixtures: {fixtures_with_missing_zips} - waiting for downloads")
return "waiting_for_downloads" # New status indicating waiting for ZIP downloads
else:
logger.debug("All active fixtures have required ZIP files available")
return "already_active"
# Check if all today's fixtures are in terminal states # Check if all today's fixtures are in terminal states
if self._has_today_fixtures_all_terminal(): if self._has_today_fixtures_all_terminal():
......
...@@ -5,7 +5,7 @@ Flask web dashboard application for MbetterClient ...@@ -5,7 +5,7 @@ Flask web dashboard application for MbetterClient
import time import time
import logging import logging
from pathlib import Path from pathlib import Path
from typing import Optional, Dict, Any from typing import Optional, Dict, Any, List
from flask import Flask, request, jsonify, render_template, redirect, url_for, session, g from flask import Flask, request, jsonify, render_template, redirect, url_for, session, g
from flask_login import LoginManager, login_required, current_user from flask_login import LoginManager, login_required, current_user
from flask_jwt_extended import JWTManager, create_access_token, jwt_required as flask_jwt_required from flask_jwt_extended import JWTManager, create_access_token, jwt_required as flask_jwt_required
...@@ -57,6 +57,12 @@ class WebDashboard(ThreadedComponent): ...@@ -57,6 +57,12 @@ class WebDashboard(ThreadedComponent):
"match_id": None, "match_id": None,
"start_time": None "start_time": None
} }
# Client notification queue for long-polling clients
self.notification_queue: List[Dict[str, Any]] = []
self.notification_lock = threading.Lock()
self.waiting_clients: List[threading.Event] = [] # Events for waiting long-poll clients
self.waiting_clients_lock = threading.Lock()
# Register message queue # Register message queue
self.message_queue = self.message_bus.register_component(self.name) self.message_queue = self.message_bus.register_component(self.name)
...@@ -89,6 +95,10 @@ class WebDashboard(ThreadedComponent): ...@@ -89,6 +95,10 @@ class WebDashboard(ThreadedComponent):
self.message_bus.subscribe(self.name, MessageType.CONFIG_UPDATE, self._handle_config_update) self.message_bus.subscribe(self.name, MessageType.CONFIG_UPDATE, self._handle_config_update)
self.message_bus.subscribe(self.name, MessageType.SYSTEM_STATUS, self._handle_system_status) self.message_bus.subscribe(self.name, MessageType.SYSTEM_STATUS, self._handle_system_status)
self.message_bus.subscribe(self.name, MessageType.CUSTOM, self._handle_custom_message) self.message_bus.subscribe(self.name, MessageType.CUSTOM, self._handle_custom_message)
# Subscribe to messages for client notifications
self.message_bus.subscribe(self.name, MessageType.START_GAME, self._handle_client_notification)
self.message_bus.subscribe(self.name, MessageType.MATCH_START, self._handle_client_notification)
self.message_bus.subscribe(self.name, MessageType.GAME_STATUS, self._handle_client_notification)
logger.info("WebDashboard initialized successfully") logger.info("WebDashboard initialized successfully")
return True return True
...@@ -537,6 +547,7 @@ class WebDashboard(ThreadedComponent): ...@@ -537,6 +547,7 @@ class WebDashboard(ThreadedComponent):
try: try:
response = message.data.get("response") response = message.data.get("response")
timer_update = message.data.get("timer_update") timer_update = message.data.get("timer_update")
fixture_status_update = message.data.get("fixture_status_update")
if response == "timer_state": if response == "timer_state":
# Update stored timer state # Update stored timer state
...@@ -560,24 +571,96 @@ class WebDashboard(ThreadedComponent): ...@@ -560,24 +571,96 @@ class WebDashboard(ThreadedComponent):
# Handle periodic timer updates from match_timer component # Handle periodic timer updates from match_timer component
self.current_timer_state.update(timer_update) self.current_timer_state.update(timer_update)
logger.debug(f"Timer update received: {timer_update}") logger.debug(f"Timer update received: {timer_update}")
# Broadcast timer update to connected clients via global message bus # Add timer update to notification queue for long-polling clients
try: self._add_client_notification("TIMER_UPDATE", timer_update, message.timestamp)
timer_update_message = Message(
type=MessageType.CUSTOM, elif fixture_status_update:
sender=self.name, # Handle fixture status updates from games_thread
data={ logger.debug(f"Fixture status update received: {fixture_status_update}")
"timer_update": timer_update,
"timestamp": time.time() # Add fixture status update to notification queue for long-polling clients
} self._add_client_notification("FIXTURE_STATUS_UPDATE", fixture_status_update, message.timestamp)
)
self.message_bus.publish(timer_update_message, broadcast=True)
logger.debug("Timer update broadcasted to clients")
except Exception as broadcast_e:
logger.error(f"Failed to broadcast timer update: {broadcast_e}")
except Exception as e: except Exception as e:
logger.error(f"Failed to handle custom message: {e}") logger.error(f"Failed to handle custom message: {e}")
def _handle_client_notification(self, message: Message):
"""Handle messages that should be sent to long-polling clients"""
try:
# Convert message to notification format
notification_data = {
"type": message.type.value,
"data": message.data,
"timestamp": message.timestamp,
"sender": message.sender
}
# Add to notification queue
self._add_client_notification(message.type.value, message.data, message.timestamp, message.sender)
except Exception as e:
logger.error(f"Failed to handle client notification: {e}")
def _add_client_notification(self, notification_type: str, data: Dict[str, Any],
timestamp: float, sender: str = None):
"""Add notification to the client queue"""
try:
with self.notification_lock:
notification = {
"type": notification_type,
"data": data,
"timestamp": timestamp
}
if sender:
notification["sender"] = sender
self.notification_queue.append(notification)
# Keep queue size reasonable (limit to last 100 notifications)
if len(self.notification_queue) > 100:
self.notification_queue = self.notification_queue[-50:]
logger.debug(f"Added client notification: {notification_type}")
# Wake up all waiting clients
with self.waiting_clients_lock:
for event in self.waiting_clients:
event.set()
self.waiting_clients.clear()
except Exception as e:
logger.error(f"Failed to add client notification: {e}")
def get_pending_notifications(self) -> List[Dict[str, Any]]:
"""Get the first pending notification for a client (one at a time)"""
try:
with self.notification_lock:
if self.notification_queue:
# Return only the first notification and remove it from queue
notification = self.notification_queue.pop(0)
return [notification]
return []
except Exception as e:
logger.error(f"Failed to get pending notifications: {e}")
return []
def register_waiting_client(self, event: threading.Event):
"""Register a waiting client Event to be notified when notifications arrive"""
try:
with self.waiting_clients_lock:
self.waiting_clients.append(event)
except Exception as e:
logger.error(f"Failed to register waiting client: {e}")
def unregister_waiting_client(self, event: threading.Event):
"""Unregister a waiting client Event"""
try:
with self.waiting_clients_lock:
if event in self.waiting_clients:
self.waiting_clients.remove(event)
except Exception as e:
logger.error(f"Failed to unregister waiting client: {e}")
def get_app_context(self): def get_app_context(self):
"""Get Flask application context""" """Get Flask application context"""
......
...@@ -2600,90 +2600,61 @@ def notifications(): ...@@ -2600,90 +2600,61 @@ def notifications():
import threading import threading
import ssl import ssl
import socket import socket
from ..core.message_bus import MessageType
# Get timeout from query parameter (default 30 seconds) # Get timeout from query parameter (default 30 seconds)
timeout = int(request.args.get('timeout', 30)) timeout = int(request.args.get('timeout', 30))
if timeout > 60: # Max 60 seconds if timeout > 60: # Max 60 seconds
timeout = 60 timeout = 60
# Create a queue for this client # Create an event for timeout handling
notification_queue = []
notification_received = threading.Event() notification_received = threading.Event()
def message_handler(message): # Check for pending notifications from the web dashboard
"""Handle incoming messages for this client""" if hasattr(g, 'main_app') and g.main_app and hasattr(g.main_app, 'web_dashboard'):
if message.type in [MessageType.START_GAME, MessageType.GAME_STARTED, MessageType.MATCH_START, MessageType.GAME_STATUS]: # Register this event with the web dashboard so it gets notified when notifications arrive
notification_data = { g.main_app.web_dashboard.register_waiting_client(notification_received)
"type": message.type.value,
"data": message.data,
"timestamp": message.timestamp,
"sender": message.sender
}
notification_queue.append(notification_data)
notification_received.set()
elif message.type == MessageType.CUSTOM and "timer_update" in message.data:
# Handle timer updates from match_timer component
notification_data = {
"type": "TIMER_UPDATE",
"data": message.data["timer_update"],
"timestamp": message.timestamp,
"sender": message.sender
}
notification_queue.append(notification_data)
notification_received.set()
elif message.type == MessageType.CUSTOM and "fixture_status_update" in message.data:
# Handle fixture status updates from games_thread
notification_data = {
"type": "FIXTURE_STATUS_UPDATE",
"data": message.data["fixture_status_update"],
"timestamp": message.timestamp,
"sender": message.sender
}
notification_queue.append(notification_data)
notification_received.set()
# Subscribe to relevant message types
if api_bp.message_bus:
api_bp.message_bus.subscribe_global(MessageType.START_GAME, message_handler)
api_bp.message_bus.subscribe_global(MessageType.MATCH_START, message_handler)
api_bp.message_bus.subscribe_global(MessageType.GAME_STATUS, message_handler)
api_bp.message_bus.subscribe_global(MessageType.CUSTOM, message_handler)
# Wait for notification or timeout
notification_received.wait(timeout=timeout)
# Unsubscribe from messages safely
if api_bp.message_bus:
try: try:
# Use proper unsubscribe methods instead of direct removal # First check for any pending notifications
for msg_type in [MessageType.START_GAME, MessageType.MATCH_START, MessageType.GAME_STATUS, MessageType.CUSTOM]: pending_notifications = g.main_app.web_dashboard.get_pending_notifications()
try: if pending_notifications:
if hasattr(api_bp.message_bus, '_global_handlers') and msg_type in api_bp.message_bus._global_handlers: # Return pending notifications immediately
handlers = api_bp.message_bus._global_handlers[msg_type] logger.debug(f"Returning {len(pending_notifications)} pending notifications to client")
if message_handler in handlers: response_data = {
handlers.remove(message_handler) "success": True,
except (AttributeError, KeyError, ValueError) as e: "notifications": pending_notifications
logger.debug(f"Handler cleanup warning for {msg_type}: {e}") }
except Exception as e: else:
logger.warning(f"Error during notification cleanup: {e}") # No pending notifications, wait for new ones or timeout
logger.debug(f"Waiting for notifications with {timeout}s timeout")
# Prepare response data notification_received.wait(timeout=timeout)
if notification_queue: # After waiting, check again for any notifications that arrived
# Return the first notification received pending_notifications = g.main_app.web_dashboard.get_pending_notifications()
notification = notification_queue[0] if pending_notifications:
logger.debug(f"Notification sent to client: {notification['type']}") logger.debug(f"Returning {len(pending_notifications)} notifications after wait")
response_data = { response_data = {
"success": True, "success": True,
"notification": notification "notifications": pending_notifications
} }
else:
# Timeout - return empty response
logger.debug("Notification wait timed out, returning empty response")
response_data = {
"success": True,
"notifications": []
}
finally:
# Always unregister the event when done
g.main_app.web_dashboard.unregister_waiting_client(notification_received)
else: else:
# Timeout - return empty response # Web dashboard not available, return empty response
response_data = { response_data = {
"success": True, "success": True,
"notification": None "notifications": []
} }
# Response data is already prepared above
# Handle SSL/connection errors gracefully when sending response # Handle SSL/connection errors gracefully when sending response
try: try:
return jsonify(response_data) return jsonify(response_data)
......
...@@ -5,14 +5,20 @@ ...@@ -5,14 +5,20 @@
// Dashboard namespace // Dashboard namespace
window.Dashboard = (function() { window.Dashboard = (function() {
'use strict'; 'use strict';
let config = {}; let config = {};
let statusInterval = null; let statusInterval = null;
let isOnline = navigator.onLine; let isOnline = navigator.onLine;
let cache = {}; let cache = {};
let initialized = false; // Prevent multiple initializations
// Initialize dashboard // Initialize dashboard
function init(userConfig) { function init(userConfig) {
if (initialized) {
console.log('Dashboard already initialized, skipping...');
return;
}
config = Object.assign({ config = Object.assign({
statusUpdateInterval: 5000, statusUpdateInterval: 5000,
apiEndpoint: '/api', apiEndpoint: '/api',
...@@ -36,6 +42,7 @@ window.Dashboard = (function() { ...@@ -36,6 +42,7 @@ window.Dashboard = (function() {
// Initialize match timer // Initialize match timer
initMatchTimer(); initMatchTimer();
initialized = true;
console.log('Dashboard initialized successfully'); console.log('Dashboard initialized successfully');
} }
...@@ -463,9 +470,11 @@ window.Dashboard = (function() { ...@@ -463,9 +470,11 @@ window.Dashboard = (function() {
let lastServerSync = 0; let lastServerSync = 0;
let cachedMatchInterval = null; // Cache the match interval configuration let cachedMatchInterval = null; // Cache the match interval configuration
const SYNC_INTERVAL = 30000; // Sync with server every 30 seconds const SYNC_INTERVAL = 30000; // Sync with server every 30 seconds
let longPollingActive = false; // Flag to control long polling
let longPollingController = null; // AbortController for cancelling requests
function initMatchTimer() { function initMatchTimer() {
console.log('Initializing server-only match timer (no local countdown)...'); console.log('Initializing server-only match timer with real-time notifications...');
// Load match interval config once at initialization // Load match interval config once at initialization
loadMatchIntervalConfig().then(function(intervalSeconds) { loadMatchIntervalConfig().then(function(intervalSeconds) {
...@@ -474,9 +483,9 @@ window.Dashboard = (function() { ...@@ -474,9 +483,9 @@ window.Dashboard = (function() {
// Initial sync with server // Initial sync with server
syncWithServerTimer(); syncWithServerTimer();
// REMOVED: No periodic sync - rely on notifications // Start real-time notification polling for timer updates
// REMOVED: No local countdown - rely on server updates only startNotificationPolling();
}).catch(function(error) { }).catch(function(error) {
console.error('Failed to load match timer config at initialization:', error); console.error('Failed to load match timer config at initialization:', error);
// Use default and continue // Use default and continue
...@@ -484,6 +493,9 @@ window.Dashboard = (function() { ...@@ -484,6 +493,9 @@ window.Dashboard = (function() {
// Initial sync with server // Initial sync with server
syncWithServerTimer(); syncWithServerTimer();
// Start real-time notification polling for timer updates
startNotificationPolling();
}); });
} }
...@@ -568,6 +580,17 @@ window.Dashboard = (function() { ...@@ -568,6 +580,17 @@ window.Dashboard = (function() {
} }
} }
function stopNotificationPolling() {
console.log('Stopping long polling for notifications...');
longPollingActive = false;
// Cancel any ongoing request
if (longPollingController) {
longPollingController.abort();
longPollingController = null;
}
}
function updateMatchTimerDisplay() { function updateMatchTimerDisplay() {
let displaySeconds = serverTimerState.remaining_seconds; let displaySeconds = serverTimerState.remaining_seconds;
...@@ -661,6 +684,130 @@ window.Dashboard = (function() { ...@@ -661,6 +684,130 @@ window.Dashboard = (function() {
}); });
} }
function startNotificationPolling() {
console.log('Starting long polling for real-time notifications...');
if (longPollingActive) {
stopNotificationPolling();
}
longPollingActive = true;
performLongPoll();
}
function performLongPoll() {
if (!longPollingActive || !isOnline) {
return;
}
// Create AbortController for this request
longPollingController = new AbortController();
const signal = longPollingController.signal;
// Make long polling request (server handles 30-second timeout)
const url = config.apiEndpoint + '/notifications?_=' + Date.now() + Math.random();
const options = {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
signal: signal
};
fetch(url, options)
.then(function(response) {
if (!response.ok) {
throw new Error('HTTP ' + response.status);
}
return response.json();
})
.then(function(data) {
if (data.success && data.notifications && data.notifications.length > 0) {
// Process each notification
data.notifications.forEach(function(notification) {
handleNotification(notification);
});
}
// Immediately start the next long poll after processing notifications
if (longPollingActive) {
// Small delay to prevent overwhelming the server
setTimeout(performLongPoll, 100);
}
})
.catch(function(error) {
if (error.name === 'AbortError') {
// Request was cancelled, don't restart
console.debug('Long poll cancelled');
return;
}
console.debug('Long poll failed:', error.message);
// Retry after a delay if still active
if (longPollingActive) {
setTimeout(performLongPoll, 2000); // Retry after 2 seconds on error
}
});
}
function handleNotification(notification) {
console.log('Received notification:', notification.type, notification);
if (notification.type === 'TIMER_UPDATE' && notification.data) {
// Update timer state from server notification
serverTimerState = {
running: notification.data.running || false,
remaining_seconds: notification.data.remaining_seconds || 0,
total_seconds: notification.data.total_seconds || 0,
fixture_id: notification.data.fixture_id || null,
match_id: notification.data.match_id || null,
start_time: notification.data.start_time || null
};
// Update display immediately
updateMatchTimerDisplay();
console.log('Timer updated from notification:', serverTimerState);
}
else if (notification.type === 'START_GAME') {
console.log('Start game notification received');
onStartGameMessage();
showNotification('Games started - match timer is now running', 'success');
}
else if (notification.type === 'MATCH_START') {
console.log('Match started:', notification.data);
showNotification('New match has started!', 'info');
}
else if (notification.type === 'GAME_STATUS') {
console.log('Game status update:', notification.data);
// Update system status if provided
if (notification.data.status) {
const systemStatus = document.getElementById('system-status');
if (systemStatus) {
systemStatus.textContent = notification.data.status.charAt(0).toUpperCase() + notification.data.status.slice(1);
systemStatus.className = 'badge ' + (notification.data.status === 'running' ? 'bg-success' : 'bg-secondary');
}
}
}
else if (notification.type === 'FIXTURE_STATUS_UPDATE') {
console.log('Fixture status update:', notification.data);
// Trigger page refresh if on fixtures page
if (window.location.pathname.includes('/fixtures')) {
// Refresh fixtures data if function exists
if (typeof loadFixtures === 'function') {
loadFixtures();
}
if (typeof loadFixtureDetails === 'function') {
loadFixtureDetails();
}
}
}
else {
console.log('Unknown notification type:', notification.type);
}
}
function onStartGameMessage() { function onStartGameMessage() {
console.log('Received START_GAME message, initializing timer...'); console.log('Received START_GAME message, initializing timer...');
...@@ -675,7 +822,6 @@ window.Dashboard = (function() { ...@@ -675,7 +822,6 @@ window.Dashboard = (function() {
// Update display // Update display
updateMatchTimerDisplay(); updateMatchTimerDisplay();
startLocalCountdown();
showNotification('Games started - match timer is now running', 'success'); showNotification('Games started - match timer is now running', 'success');
}); });
...@@ -731,7 +877,8 @@ window.Dashboard = (function() { ...@@ -731,7 +877,8 @@ window.Dashboard = (function() {
startMatchTimer: startMatchTimer, startMatchTimer: startMatchTimer,
stopMatchTimer: stopMatchTimer, stopMatchTimer: stopMatchTimer,
resetMatchTimer: resetMatchTimer, resetMatchTimer: resetMatchTimer,
startNextMatch: startNextMatch startNextMatch: startNextMatch,
stopNotificationPolling: stopNotificationPolling
}; };
})(); })();
......
...@@ -286,9 +286,6 @@ ...@@ -286,9 +286,6 @@
// Initialize digital clock // Initialize digital clock
initializeClock(); initializeClock();
// Initialize long polling for notifications
initializeNotifications();
}); });
function initializeClock() { function initializeClock() {
...@@ -360,315 +357,6 @@ ...@@ -360,315 +357,6 @@
}); });
} }
function initializeNotifications() {
let pollingActive = true;
let reconnectDelay = 1000; // Start with 1 second delay
const maxReconnectDelay = 30000; // Max 30 seconds
function pollNotifications() {
if (!pollingActive) return;
fetch('/api/notifications?timeout=30', {
method: 'GET',
headers: {
'Content-Type': 'application/json',
},
credentials: 'same-origin'
})
.then(response => {
if (!response.ok) {
throw new Error(`HTTP ${response.status}`);
}
return response.json();
})
.then(data => {
if (data.success && data.notification) {
handleNotification(data.notification);
}
// Reset reconnect delay on successful response
reconnectDelay = 1000;
// Continue polling
setTimeout(pollNotifications, 100);
})
.catch(error => {
console.error('Notification polling error:', error);
// Exponential backoff for reconnection
reconnectDelay = Math.min(reconnectDelay * 2, maxReconnectDelay);
console.log(`Reconnecting in ${reconnectDelay}ms...`);
setTimeout(pollNotifications, reconnectDelay);
});
}
function handleNotification(notification) {
console.log('Received notification:', notification);
const { type, data, timestamp } = notification;
// Handle different notification types
switch (type) {
case 'START_GAME':
handleStartGame(data, timestamp);
break;
case 'MATCH_START':
handleMatchStart(data, timestamp);
break;
case 'GAME_STATUS':
handleGameStatus(data, timestamp);
break;
case 'TIMER_UPDATE':
handleTimerUpdate(data, timestamp);
break;
case 'FIXTURE_STATUS_UPDATE':
handleFixtureStatusUpdate(data, timestamp);
break;
default:
console.log('Unknown notification type:', type);
}
// Show notification to user (except for timer and fixture status updates)
if (type !== 'TIMER_UPDATE' && type !== 'FIXTURE_STATUS_UPDATE') {
showNotificationToast(type, data);
}
}
function handleStartGame(data, timestamp) {
console.log('Handling START_GAME notification:', data);
// Update match timer display
const matchTimerElement = document.getElementById('match-timer');
if (matchTimerElement) {
matchTimerElement.textContent = 'Starting...';
matchTimerElement.className = 'badge bg-success';
}
// Trigger timer initialization in Dashboard.js if available
if (typeof Dashboard !== 'undefined' && Dashboard.onStartGameMessage) {
Dashboard.onStartGameMessage();
}
// Trigger custom event for page-specific handling
const event = new CustomEvent('startGame', {
detail: { data, timestamp }
});
document.dispatchEvent(event);
}
function handleMatchStart(data, timestamp) {
console.log('Handling MATCH_START notification:', data);
// Reset and restart timer
const matchTimerElement = document.getElementById('match-timer');
if (matchTimerElement) {
matchTimerElement.textContent = '00:00';
matchTimerElement.className = 'badge bg-primary';
}
// Force sync with match timer component
if (typeof Dashboard !== 'undefined' && Dashboard.syncWithServerTimer) {
Dashboard.syncWithServerTimer();
}
// Trigger custom event for page-specific handling
const event = new CustomEvent('matchStart', {
detail: { data, timestamp }
});
document.dispatchEvent(event);
}
function handleGameStatus(data, timestamp) {
console.log('Handling GAME_STATUS notification:', data);
// Update status displays
const status = data.status;
const fixtureId = data.fixture_id;
// Update status bar if present
const systemStatusElement = document.getElementById('system-status');
if (systemStatusElement) {
systemStatusElement.textContent = status;
systemStatusElement.className = `badge bg-${getStatusColor(status)}`;
}
// If this is a timer-related status, sync with timer
if (status === 'started' && data.action === 'refresh') {
if (typeof Dashboard !== 'undefined' && Dashboard.syncWithServerTimer) {
Dashboard.syncWithServerTimer();
}
}
// Trigger custom event for page-specific handling
const event = new CustomEvent('gameStatus', {
detail: { data, timestamp }
});
document.dispatchEvent(event);
}
function handleTimerUpdate(data, timestamp) {
console.log('Handling TIMER_UPDATE notification:', data);
// Update timer displays directly from server data ONLY
if (data.running !== undefined && data.remaining_seconds !== undefined) {
// Clear any existing client-side timers
if (window.clientTimerInterval) {
clearInterval(window.clientTimerInterval);
window.clientTimerInterval = null;
}
// Update all timer elements with server data
updateTimerElements(data.remaining_seconds, data.running);
// Store server data for potential interpolation
window.lastServerUpdate = {
remaining_seconds: data.remaining_seconds,
timestamp: Date.now(),
running: data.running
};
// Update timer status text
const timerStatus = document.getElementById('timer-status');
if (timerStatus) {
if (data.running) {
timerStatus.textContent = 'Timer running';
timerStatus.className = 'text-success';
} else {
timerStatus.textContent = 'Waiting for games to start...';
timerStatus.className = 'text-muted';
}
}
// Trigger custom event for page-specific handling
const event = new CustomEvent('timerUpdate', {
detail: { data, timestamp }
});
document.dispatchEvent(event);
}
}
function handleFixtureStatusUpdate(data, timestamp) {
console.log('Handling FIXTURE_STATUS_UPDATE notification:', data);
// Trigger automatic refresh if on fixtures page
const currentPage = window.location.pathname;
if (currentPage === '/fixtures' || currentPage.startsWith('/fixtures/')) {
// Check if loadFixtures function exists (fixtures page)
if (typeof loadFixtures === 'function') {
console.log('Auto-refreshing fixtures due to status update');
loadFixtures();
}
// Check if loadFixtureDetails function exists (fixture details page)
if (typeof loadFixtureDetails === 'function') {
console.log('Auto-refreshing fixture details due to status update');
loadFixtureDetails();
}
}
// Trigger custom event for page-specific handling
const event = new CustomEvent('fixtureStatusUpdate', {
detail: { data, timestamp }
});
document.dispatchEvent(event);
}
function updateTimerElements(seconds, isRunning) {
const minutes = Math.floor(seconds / 60);
const remainingSeconds = seconds % 60;
const timeString = minutes.toString().padStart(2, '0') + ':' + remainingSeconds.toString().padStart(2, '0');
console.log(`Updating timer elements: ${timeString} (${seconds}s, running: ${isRunning})`);
// Update status bar timer
const statusTimer = document.getElementById('match-timer');
if (statusTimer) {
statusTimer.textContent = timeString;
if (!isRunning) {
statusTimer.className = 'badge bg-warning text-dark';
statusTimer.style.animation = '';
} else if (seconds <= 60) { // Last minute
statusTimer.className = 'badge bg-danger text-white';
statusTimer.style.animation = 'timerPulse 0.5s infinite';
} else if (seconds <= 300) { // Last 5 minutes
statusTimer.className = 'badge bg-warning text-dark';
statusTimer.style.animation = '';
} else {
statusTimer.className = 'badge bg-success text-white';
statusTimer.style.animation = '';
}
}
// Update admin dashboard timer if present
const adminTimer = document.getElementById('admin-match-timer');
if (adminTimer) {
adminTimer.textContent = timeString;
if (isRunning) {
adminTimer.className = seconds <= 60 ? 'h3 mb-2 text-danger fw-bold' : 'h3 mb-2 text-success fw-bold';
} else {
adminTimer.className = 'h3 mb-2 text-muted';
}
}
}
function showNotificationToast(type, data) {
// Create and show a toast notification
const toastHtml = `
<div class="toast align-items-center text-white bg-primary border-0" role="alert">
<div class="d-flex">
<div class="toast-body">
<i class="fas fa-bell me-2"></i>
<strong>${type.replace('_', ' ')}</strong>
${data.fixture_id ? ` - Fixture ${data.fixture_id}` : ''}
</div>
<button type="button" class="btn-close btn-close-white me-2 m-auto" data-bs-dismiss="toast"></button>
</div>
</div>
`;
// Add toast to container or create one
let toastContainer = document.querySelector('.toast-container');
if (!toastContainer) {
toastContainer = document.createElement('div');
toastContainer.className = 'toast-container position-fixed top-0 end-0 p-3';
toastContainer.style.zIndex = '9999';
document.body.appendChild(toastContainer);
}
toastContainer.insertAdjacentHTML('beforeend', toastHtml);
// Initialize and show the toast
const toastElement = toastContainer.lastElementChild;
const toast = new bootstrap.Toast(toastElement, {
autohide: true,
delay: 5000
});
toast.show();
// Remove from DOM after hiding
toastElement.addEventListener('hidden.bs.toast', () => {
toastElement.remove();
});
}
function getStatusColor(status) {
switch (status) {
case 'running': return 'success';
case 'scheduled': return 'warning';
case 'pending': return 'secondary';
case 'completed': return 'info';
case 'error': return 'danger';
default: return 'secondary';
}
}
// Start polling
console.log('Starting notification polling...');
pollNotifications();
// Stop polling when page unloads
window.addEventListener('beforeunload', () => {
pollingActive = false;
});
}
</script> </script>
{% endif %} {% endif %}
......
...@@ -1082,8 +1082,6 @@ function showNotification(message, type = 'info') { ...@@ -1082,8 +1082,6 @@ function showNotification(message, type = 'info') {
}, 3000); }, 3000);
} }
</script> </script>
<!-- Include the main dashboard.js for timer functionality -->
<script src="{{ url_for('static', filename='js/dashboard.js') }}"></script>
<script> <script>
// Initialize dashboard with timer functionality // Initialize dashboard with timer functionality
document.addEventListener('DOMContentLoaded', function() { document.addEventListener('DOMContentLoaded', function() {
......
...@@ -738,8 +738,6 @@ function loadAvailableTemplates() { ...@@ -738,8 +738,6 @@ function loadAvailableTemplates() {
}); });
} }
</script> </script>
<!-- Include the main dashboard.js for timer functionality -->
<script src="{{ url_for('static', filename='js/dashboard.js') }}"></script>
<script> <script>
// Initialize dashboard with timer functionality // Initialize dashboard with timer functionality
document.addEventListener('DOMContentLoaded', function() { document.addEventListener('DOMContentLoaded', function() {
......
...@@ -927,8 +927,6 @@ function showNotification(message, type = 'info') { ...@@ -927,8 +927,6 @@ function showNotification(message, type = 'info') {
}, 3000); }, 3000);
} }
</script> </script>
<!-- Include the main dashboard.js for timer functionality -->
<script src="{{ url_for('static', filename='js/dashboard.js') }}"></script>
<script> <script>
// Initialize dashboard with timer functionality // Initialize dashboard with timer functionality
document.addEventListener('DOMContentLoaded', function() { document.addEventListener('DOMContentLoaded', function() {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment