Almost there

parent 7fc35762
...@@ -633,6 +633,14 @@ class UpdatesResponseHandler(ResponseHandler): ...@@ -633,6 +633,14 @@ class UpdatesResponseHandler(ResponseHandler):
if not zip_filename: if not zip_filename:
continue continue
# Check if ZIP has already been validated successfully
if match.zip_validation_status == 'valid':
logger.debug(f"ZIP file already validated: {zip_filename} for fixture {fixture_id}")
continue
elif match.zip_validation_status == 'validating':
logger.debug(f"ZIP file validation in progress: {zip_filename} for fixture {fixture_id}")
continue
logger.debug(f"Validating ZIP file: {zip_filename} for fixture {fixture_id}") logger.debug(f"Validating ZIP file: {zip_filename} for fixture {fixture_id}")
zip_path = self.zip_storage_dir / zip_filename zip_path = self.zip_storage_dir / zip_filename
...@@ -649,6 +657,9 @@ class UpdatesResponseHandler(ResponseHandler): ...@@ -649,6 +657,9 @@ class UpdatesResponseHandler(ResponseHandler):
fixture_valid = False fixture_valid = False
else: else:
logger.debug(f"ZIP file validation passed: {zip_filename}") logger.debug(f"ZIP file validation passed: {zip_filename}")
# Mark as validated in database
match.zip_validation_status = 'valid'
session.commit()
if not fixture_valid: if not fixture_valid:
logger.warning(f"Fixture {fixture_id} has invalid/missing ZIP files: {missing_or_invalid_zips}") logger.warning(f"Fixture {fixture_id} has invalid/missing ZIP files: {missing_or_invalid_zips}")
......
...@@ -5,13 +5,13 @@ Games thread component for managing game-related operations ...@@ -5,13 +5,13 @@ Games thread component for managing game-related operations
import time import time
import logging import logging
import threading import threading
from datetime import datetime from datetime import datetime, timedelta
from typing import Optional, Dict, Any, List from typing import Optional, Dict, Any, List
from .thread_manager import ThreadedComponent from .thread_manager import ThreadedComponent
from .message_bus import MessageBus, Message, MessageType, MessageBuilder from .message_bus import MessageBus, Message, MessageType, MessageBuilder
from ..database.manager import DatabaseManager from ..database.manager import DatabaseManager
from ..database.models import MatchModel, MatchStatus, BetDetailModel, MatchOutcomeModel, GameConfigModel from ..database.models import MatchModel, MatchStatus, BetDetailModel, MatchOutcomeModel, GameConfigModel, ExtractionAssociationModel
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
...@@ -198,15 +198,11 @@ class GamesThread(ThreadedComponent): ...@@ -198,15 +198,11 @@ class GamesThread(ThreadedComponent):
self._send_response(message, "discarded", f"Fixture {fixture_id} is already completed") self._send_response(message, "discarded", f"Fixture {fixture_id} is already completed")
return return
# Check if all required ZIP files are available for this fixture # Fixture is not terminal, activate it (ZIP validation happens asynchronously)
if not self._are_fixture_zips_available(fixture_id):
logger.info(f"Fixture {fixture_id} has missing or invalid ZIP files - discarding START_GAME message")
self._send_response(message, "waiting_for_downloads", f"Fixture {fixture_id} is waiting for ZIP file downloads")
return
# Fixture is not terminal and has all ZIPs, activate it
logger.info(f"Activating provided fixture: {fixture_id}") logger.info(f"Activating provided fixture: {fixture_id}")
self._activate_fixture(fixture_id, message) self._activate_fixture(fixture_id, message)
# Start ZIP validation asynchronously in background
self._start_async_zip_validation(fixture_id)
return return
# No fixture_id provided - check today's fixtures # No fixture_id provided - check today's fixtures
...@@ -224,27 +220,19 @@ class GamesThread(ThreadedComponent): ...@@ -224,27 +220,19 @@ class GamesThread(ThreadedComponent):
# Step 3: Check if there are active fixtures with today's date # Step 3: Check if there are active fixtures with today's date
active_fixture = self._find_active_today_fixture() active_fixture = self._find_active_today_fixture()
if active_fixture: if active_fixture:
# Check if all required ZIP files are available for this fixture
if not self._are_fixture_zips_available(active_fixture):
logger.info(f"Active fixture {active_fixture} has missing or invalid ZIP files - discarding START_GAME message")
self._send_response(message, "waiting_for_downloads", f"Fixture {active_fixture} is waiting for ZIP file downloads")
return
logger.info(f"Found active fixture for today: {active_fixture}") logger.info(f"Found active fixture for today: {active_fixture}")
self._activate_fixture(active_fixture, message) self._activate_fixture(active_fixture, message)
# Start ZIP validation asynchronously in background
self._start_async_zip_validation(active_fixture)
return return
# Step 4: No active fixtures found - initialize new fixture # Step 4: No active fixtures found - initialize new fixture
logger.info("No active fixtures found - initializing new fixture") logger.info("No active fixtures found - initializing new fixture")
new_fixture_id = self._initialize_new_fixture() new_fixture_id = self._initialize_new_fixture()
if new_fixture_id: if new_fixture_id:
# Check if all required ZIP files are available for the new fixture
if not self._are_fixture_zips_available(new_fixture_id):
logger.info(f"New fixture {new_fixture_id} has missing or invalid ZIP files - discarding START_GAME message")
self._send_response(message, "waiting_for_downloads", f"Fixture {new_fixture_id} is waiting for ZIP file downloads")
return
self._activate_fixture(new_fixture_id, message) self._activate_fixture(new_fixture_id, message)
# Start ZIP validation asynchronously in background
self._start_async_zip_validation(new_fixture_id)
else: else:
logger.warning("Could not initialize new fixture") logger.warning("Could not initialize new fixture")
self._send_response(message, "error", "Could not initialize new fixture") self._send_response(message, "error", "Could not initialize new fixture")
...@@ -547,7 +535,7 @@ class GamesThread(ThreadedComponent): ...@@ -547,7 +535,7 @@ class GamesThread(ThreadedComponent):
logger.info(f"All matches completed for fixture {self.current_fixture_id} - creating new matches from old completed ones") logger.info(f"All matches completed for fixture {self.current_fixture_id} - creating new matches from old completed ones")
# Instead of stopping the game, create 5 new matches from old completed matches # Instead of stopping the game, create 5 new matches from old completed matches
old_matches = self._select_random_completed_matches(5, session) old_matches = self._select_random_completed_matches_with_fallback(5, self.current_fixture_id, session)
if old_matches: if old_matches:
self._create_matches_from_old_matches(self.current_fixture_id, old_matches, session) self._create_matches_from_old_matches(self.current_fixture_id, old_matches, session)
logger.info(f"Created 5 new matches in fixture {self.current_fixture_id} from old completed matches") logger.info(f"Created 5 new matches in fixture {self.current_fixture_id} from old completed matches")
...@@ -797,9 +785,27 @@ class GamesThread(ThreadedComponent): ...@@ -797,9 +785,27 @@ class GamesThread(ThreadedComponent):
logger.error(f"Failed to check if only non-terminal fixture: {e}") logger.error(f"Failed to check if only non-terminal fixture: {e}")
return False return False
def _are_fixture_zips_available(self, fixture_id: str) -> bool: def _start_async_zip_validation(self, fixture_id: str):
"""Check if all required ZIP files are available for a fixture""" """Start asynchronous ZIP validation for a fixture without blocking"""
try: try:
logger.info(f"Starting asynchronous ZIP validation for fixture {fixture_id}")
# Start validation in a background thread
validation_thread = threading.Thread(
target=self._validate_fixture_zips_async,
args=(fixture_id,),
daemon=True
)
validation_thread.start()
except Exception as e:
logger.error(f"Failed to start async ZIP validation for fixture {fixture_id}: {e}")
def _validate_fixture_zips_async(self, fixture_id: str):
"""Validate ZIP files for a fixture asynchronously"""
try:
logger.info(f"Async ZIP validation started for fixture {fixture_id}")
session = self.db_manager.get_session() session = self.db_manager.get_session()
try: try:
# Get all active matches for this fixture that have ZIP files # Get all active matches for this fixture that have ZIP files
...@@ -811,48 +817,148 @@ class GamesThread(ThreadedComponent): ...@@ -811,48 +817,148 @@ class GamesThread(ThreadedComponent):
if not matches_with_zips: if not matches_with_zips:
logger.debug(f"Fixture {fixture_id} has no matches requiring ZIP files") logger.debug(f"Fixture {fixture_id} has no matches requiring ZIP files")
return True # No ZIP files required, so it's "available" return
logger.info(f"Validating {len(matches_with_zips)} ZIP files for fixture {fixture_id}")
# Reset any stale 'validating' statuses (older than 5 minutes)
stale_threshold = datetime.utcnow() - timedelta(minutes=5)
stale_count = session.query(MatchModel).filter(
MatchModel.fixture_id == fixture_id,
MatchModel.zip_validation_status == 'validating',
MatchModel.updated_at < stale_threshold
).update({'zip_validation_status': 'pending'})
if stale_count > 0:
logger.info(f"Reset {stale_count} stale 'validating' statuses to 'pending'")
session.commit()
for match in matches_with_zips:
# Check if already validated
if match.zip_validation_status == 'valid':
logger.debug(f"Match {match.match_number} ZIP already validated, skipping")
continue
elif match.zip_validation_status == 'validating':
logger.debug(f"Match {match.match_number} ZIP validation in progress, skipping")
continue
# Start validation for this match
self._validate_single_zip_async(match.id, session)
finally:
session.close()
logger.info(f"Async ZIP validation completed for fixture {fixture_id}")
except Exception as e:
logger.error(f"Async ZIP validation failed for fixture {fixture_id}: {e}")
def _validate_single_zip_async(self, match_id: int, session):
"""Validate a single ZIP file asynchronously"""
try:
match = session.query(MatchModel).filter(MatchModel.id == match_id).first()
if not match:
logger.warning(f"Match {match_id} not found for ZIP validation")
return
# Update status to validating
match.zip_validation_status = 'validating'
session.commit()
# Start validation in separate thread
validation_thread = threading.Thread(
target=self._perform_zip_validation,
args=(match_id,),
daemon=True
)
validation_thread.start()
except Exception as e:
logger.error(f"Failed to start ZIP validation for match {match_id}: {e}")
def _perform_zip_validation(self, match_id: int):
"""Perform actual ZIP validation"""
try:
session = self.db_manager.get_session()
try:
match = session.query(MatchModel).filter(MatchModel.id == match_id).first()
if not match:
logger.warning(f"Match {match_id} not found during ZIP validation")
return
zip_filename = match.zip_filename
if not zip_filename:
logger.warning(f"Match {match_id} has no ZIP filename")
return
# Check if all required ZIP files exist and are valid
from ..config.settings import get_user_data_dir from ..config.settings import get_user_data_dir
from pathlib import Path from pathlib import Path
import zipfile
user_data_dir = get_user_data_dir() user_data_dir = get_user_data_dir()
missing_or_invalid_zips = [] zip_path = user_data_dir / "zip_files" / zip_filename
for match in matches_with_zips: logger.info(f"Validating ZIP file: {zip_path}")
zip_filename = match.zip_filename
if not zip_filename:
continue
zip_path = user_data_dir / "zip_files" / zip_filename # Check if file exists
if not zip_path.exists():
logger.error(f"ZIP file missing: {zip_path}")
match.zip_validation_status = 'invalid'
session.commit()
return
if not zip_path.exists(): # Check file size
missing_or_invalid_zips.append(zip_filename) if zip_path.stat().st_size == 0:
logger.debug(f"ZIP file missing for match {match.match_number}: {zip_path}") logger.error(f"ZIP file empty: {zip_path}")
else: match.zip_validation_status = 'invalid'
# Quick validation - check file size > 0 session.commit()
try: return
if zip_path.stat().st_size == 0:
missing_or_invalid_zips.append(zip_filename) # Try to open and validate ZIP structure
logger.debug(f"ZIP file empty for match {match.match_number}: {zip_path}") try:
except OSError as e: with zipfile.ZipFile(str(zip_path), 'r') as zip_ref:
missing_or_invalid_zips.append(zip_filename) # Check for required video files (WIN1.mp4, WIN2.mp4, etc.)
logger.debug(f"Cannot access ZIP file for match {match.match_number}: {e}") file_list = zip_ref.namelist()
required_videos = ['WIN1.mp4', 'WIN2.mp4', 'DRAW.mp4'] # Basic requirements
if missing_or_invalid_zips: found_videos = [f for f in file_list if f.endswith('.mp4')]
logger.info(f"Fixture {fixture_id} has missing/invalid ZIP files: {missing_or_invalid_zips}")
return False if not found_videos:
else: logger.error(f"ZIP file contains no MP4 files: {zip_path}")
logger.debug(f"All required ZIP files available for fixture {fixture_id}") match.zip_validation_status = 'invalid'
return True session.commit()
return
logger.info(f"ZIP file valid - contains {len(found_videos)} video files: {zip_path}")
except zipfile.BadZipFile as e:
logger.error(f"Invalid ZIP file: {zip_path} - {e}")
match.zip_validation_status = 'invalid'
session.commit()
return
except Exception as e:
logger.error(f"Error validating ZIP file: {zip_path} - {e}")
match.zip_validation_status = 'invalid'
session.commit()
return
# Validation successful
match.zip_validation_status = 'valid'
session.commit()
logger.info(f"ZIP validation successful for match {match_id}: {zip_filename}")
finally: finally:
session.close() session.close()
except Exception as e: except Exception as e:
logger.error(f"Failed to check ZIP availability for fixture {fixture_id}: {e}") logger.error(f"ZIP validation failed for match {match_id}: {e}")
return False try:
session = self.db_manager.get_session()
match = session.query(MatchModel).filter(MatchModel.id == match_id).first()
if match:
match.zip_validation_status = 'invalid'
session.commit()
session.close()
except Exception as update_e:
logger.error(f"Failed to update validation status after error: {update_e}")
def _find_active_today_fixture(self) -> Optional[str]: def _find_active_today_fixture(self) -> Optional[str]:
"""Find an active fixture with today's date""" """Find an active fixture with today's date"""
...@@ -917,7 +1023,7 @@ class GamesThread(ThreadedComponent): ...@@ -917,7 +1023,7 @@ class GamesThread(ThreadedComponent):
# No fixtures with no start_time found - create a new fixture from old completed matches # No fixtures with no start_time found - create a new fixture from old completed matches
logger.info("No fixtures with no start_time found - creating new fixture from old completed matches") logger.info("No fixtures with no start_time found - creating new fixture from old completed matches")
old_matches = self._select_random_completed_matches(5, session) old_matches = self._select_random_completed_matches_with_fallback(5, None, session)
if old_matches: if old_matches:
fixture_id = self._create_new_fixture_from_old_matches(old_matches, session) fixture_id = self._create_new_fixture_from_old_matches(old_matches, session)
if fixture_id: if fixture_id:
...@@ -1816,6 +1922,10 @@ class GamesThread(ThreadedComponent): ...@@ -1816,6 +1922,10 @@ class GamesThread(ThreadedComponent):
logger.info(f"DEBUG _set_match_status_and_result: Found match {match_id}, current status='{match.status}', current result='{match.result}'") logger.info(f"DEBUG _set_match_status_and_result: Found match {match_id}, current status='{match.status}', current result='{match.result}'")
match.status = status match.status = status
match.result = result match.result = result
# Set end_time when match is completed
if status == 'done':
match.end_time = datetime.utcnow()
logger.info(f"DEBUG _set_match_status_and_result: Set end_time for match {match_id}")
session.commit() session.commit()
logger.info(f"Updated match {match_id} status to {status} and result to {result}") logger.info(f"Updated match {match_id} status to {status} and result to {result}")
...@@ -1956,6 +2066,13 @@ class GamesThread(ThreadedComponent): ...@@ -1956,6 +2066,13 @@ class GamesThread(ThreadedComponent):
selected_result = self._weighted_result_selection(eligible_payouts, session, match_id) selected_result = self._weighted_result_selection(eligible_payouts, session, match_id)
logger.info(f"🎯 [EXTRACTION DEBUG] Selected result: {selected_result}") logger.info(f"🎯 [EXTRACTION DEBUG] Selected result: {selected_result}")
# Step 7.1: Log winning outcomes from results associations configurations
winning_outcomes = session.query(ExtractionAssociationModel.outcome_name).filter(
ExtractionAssociationModel.extraction_result == selected_result
).distinct().all()
winning_outcome_names = [outcome.outcome_name for outcome in winning_outcomes]
logger.info(f"🏆 [EXTRACTION DEBUG] Winning outcomes for result '{selected_result}': {winning_outcome_names}")
# Step 8: Update bet results # Step 8: Update bet results
logger.info(f"💾 [EXTRACTION DEBUG] Step 8: Updating bet results for match {match_id}") logger.info(f"💾 [EXTRACTION DEBUG] Step 8: Updating bet results for match {match_id}")
self._update_bet_results(match_id, selected_result, session) self._update_bet_results(match_id, selected_result, session)
...@@ -2061,6 +2178,11 @@ class GamesThread(ThreadedComponent): ...@@ -2061,6 +2178,11 @@ class GamesThread(ThreadedComponent):
under_over_outcome = 'UNDER' if selected_result == 'UNDER' else 'OVER' if selected_result == 'OVER' else None under_over_outcome = 'UNDER' if selected_result == 'UNDER' else 'OVER' if selected_result == 'OVER' else None
logger.info(f"DEBUG _update_bet_results: under_over_outcome = '{under_over_outcome}'") logger.info(f"DEBUG _update_bet_results: under_over_outcome = '{under_over_outcome}'")
# DEBUG: Log the current match result before updating
match = session.query(MatchModel).filter_by(id=match_id).first()
if match:
logger.info(f"DEBUG _update_bet_results: Current match.result before formatting = '{match.result}'")
if under_over_outcome: if under_over_outcome:
# UNDER/OVER bet wins # UNDER/OVER bet wins
under_over_bets = session.query(BetDetailModel).filter( under_over_bets = session.query(BetDetailModel).filter(
...@@ -2116,12 +2238,44 @@ class GamesThread(ThreadedComponent): ...@@ -2116,12 +2238,44 @@ class GamesThread(ThreadedComponent):
).update({'result': 'lost'}) ).update({'result': 'lost'})
logger.info(f"DEBUG _update_bet_results: Set {losing_count} other bets to lost") logger.info(f"DEBUG _update_bet_results: Set {losing_count} other bets to lost")
# Update the match result in the matches table # Update the match result in the matches table with winning outcomes in parentheses
match = session.query(MatchModel).filter_by(id=match_id).first() match = session.query(MatchModel).filter_by(id=match_id).first()
if match: if match:
logger.info(f"DEBUG _update_bet_results: Before update - match.result = '{match.result}'") logger.info(f"DEBUG _update_bet_results: Before update - match.result = '{match.result}'")
match.result = selected_result
logger.info(f"Updated match {match_id} result to {selected_result}") # Get winning outcomes for the selected result
winning_outcomes = session.query(ExtractionAssociationModel.outcome_name).filter(
ExtractionAssociationModel.extraction_result == selected_result
).distinct().all()
winning_outcome_names = [outcome.outcome_name for outcome in winning_outcomes]
logger.info(f"DEBUG _update_bet_results: Found {len(winning_outcomes)} winning outcomes for '{selected_result}': {winning_outcome_names}")
# Include UNDER/OVER if applicable
under_over_result = None
if under_over_outcome:
under_over_result = under_over_outcome
logger.info(f"DEBUG _update_bet_results: UNDER/OVER result detected: '{under_over_result}'")
# Format result to include winning outcomes and UNDER/OVER
result_parts = []
if selected_result not in ['UNDER', 'OVER']:
result_parts.append(selected_result)
logger.info(f"DEBUG _update_bet_results: Added main result '{selected_result}' to result_parts")
if under_over_result:
result_parts.append(under_over_result)
logger.info(f"DEBUG _update_bet_results: Added UNDER/OVER result '{under_over_result}' to result_parts")
if winning_outcome_names:
# Add winning outcomes that are not already included
additional_outcomes = [outcome for outcome in winning_outcome_names if outcome not in result_parts]
if additional_outcomes:
result_parts.extend(additional_outcomes)
logger.info(f"DEBUG _update_bet_results: Added additional outcomes {additional_outcomes} to result_parts")
# Join with " + " separator
formatted_result = " + ".join(result_parts) if result_parts else selected_result
logger.info(f"DEBUG _update_bet_results: Final result_parts = {result_parts}, formatted_result = '{formatted_result}'")
match.result = formatted_result
logger.info(f"Updated match {match_id} result to {formatted_result}")
else: else:
logger.error(f"DEBUG _update_bet_results: Match {match_id} not found for result update!") logger.error(f"DEBUG _update_bet_results: Match {match_id} not found for result update!")
...@@ -2447,7 +2601,7 @@ class GamesThread(ThreadedComponent): ...@@ -2447,7 +2601,7 @@ class GamesThread(ThreadedComponent):
# Get all completed matches (status = 'done', 'cancelled', or 'failed') # Get all completed matches (status = 'done', 'cancelled', or 'failed')
# Exclude matches from fixtures that contain "_recycle_" in the fixture name # Exclude matches from fixtures that contain "_recycle_" in the fixture name
completed_matches = session.query(MatchModel).filter( completed_matches = session.query(MatchModel).filter(
MatchModel.status.in_(['done', 'cancelled', 'failed']), MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True, MatchModel.active_status == True,
~MatchModel.fixture_id.like('%_recycle_%') ~MatchModel.fixture_id.like('%_recycle_%')
).all() ).all()
...@@ -2497,6 +2651,7 @@ class GamesThread(ThreadedComponent): ...@@ -2497,6 +2651,7 @@ class GamesThread(ThreadedComponent):
zip_filename=old_match.zip_filename, zip_filename=old_match.zip_filename,
zip_sha1sum=old_match.zip_sha1sum, zip_sha1sum=old_match.zip_sha1sum,
zip_upload_status='completed', # Assume ZIP is already available zip_upload_status='completed', # Assume ZIP is already available
zip_validation_status='valid', # ZIP already validated from old match
fixture_active_time=int(now.timestamp()), fixture_active_time=int(now.timestamp()),
result=None, # Reset result for new match result=None, # Reset result for new match
end_time=None, # Reset end time for new match end_time=None, # Reset end time for new match
...@@ -2553,6 +2708,7 @@ class GamesThread(ThreadedComponent): ...@@ -2553,6 +2708,7 @@ class GamesThread(ThreadedComponent):
zip_filename=old_match.zip_filename, zip_filename=old_match.zip_filename,
zip_sha1sum=old_match.zip_sha1sum, zip_sha1sum=old_match.zip_sha1sum,
zip_upload_status='completed', # Assume ZIP is already available zip_upload_status='completed', # Assume ZIP is already available
zip_validation_status='valid', # ZIP already validated from old match
fixture_active_time=int(now.timestamp()), fixture_active_time=int(now.timestamp()),
result=None, # Reset result for new match result=None, # Reset result for new match
end_time=None, # Reset end time for new match end_time=None, # Reset end time for new match
...@@ -2607,39 +2763,9 @@ class GamesThread(ThreadedComponent): ...@@ -2607,39 +2763,9 @@ class GamesThread(ThreadedComponent):
).all() ).all()
if active_matches: if active_matches:
# Check if all required ZIP files are available for these active matches # Active matches found - return "already_active" since validation happens asynchronously
# Only consider fixtures "active" if all their ZIP files are downloaded logger.debug("Active matches found - game can be activated")
fixtures_with_missing_zips = set() return "already_active"
for match in active_matches:
if match.zip_filename and match.active_status:
# Check if ZIP file exists and is valid
from ..config.settings import get_user_data_dir
from pathlib import Path
import os
user_data_dir = get_user_data_dir()
zip_path = user_data_dir / "zip_files" / match.zip_filename
if not zip_path.exists():
fixtures_with_missing_zips.add(match.fixture_id)
logger.debug(f"ZIP file missing for match {match.match_number} in fixture {match.fixture_id}: {zip_path}")
else:
# Quick validation - check file size > 0
try:
if zip_path.stat().st_size == 0:
fixtures_with_missing_zips.add(match.fixture_id)
logger.debug(f"ZIP file empty for match {match.match_number} in fixture {match.fixture_id}: {zip_path}")
except OSError as e:
fixtures_with_missing_zips.add(match.fixture_id)
logger.debug(f"Cannot access ZIP file for match {match.match_number} in fixture {match.fixture_id}: {e}")
if fixtures_with_missing_zips:
logger.info(f"Active fixtures found but missing ZIP files for fixtures: {fixtures_with_missing_zips} - waiting for downloads")
return "waiting_for_downloads" # New status indicating waiting for ZIP downloads
else:
logger.debug("All active fixtures have required ZIP files available")
return "already_active"
# Check if all today's fixtures are in terminal states # Check if all today's fixtures are in terminal states
if self._has_today_fixtures_all_terminal(): if self._has_today_fixtures_all_terminal():
...@@ -2696,9 +2822,9 @@ class GamesThread(ThreadedComponent): ...@@ -2696,9 +2822,9 @@ class GamesThread(ThreadedComponent):
last_played_match_id = self._get_last_played_match_id(fixture_id, session) last_played_match_id = self._get_last_played_match_id(fixture_id, session)
logger.info(f"🎯 Last played match ID: {last_played_match_id}") logger.info(f"🎯 Last played match ID: {last_played_match_id}")
# Select random completed matches, excluding the last played one # Select random completed matches using progressive fallback (excludes last 3 matches)
old_matches = self._select_random_completed_matches_excluding_last( old_matches = self._select_random_completed_matches_with_fallback(
minimum_required, last_played_match_id, session minimum_required, fixture_id, session
) )
if old_matches: if old_matches:
...@@ -2720,7 +2846,7 @@ class GamesThread(ThreadedComponent): ...@@ -2720,7 +2846,7 @@ class GamesThread(ThreadedComponent):
# Find the most recently completed match in this fixture # Find the most recently completed match in this fixture
last_match = session.query(MatchModel).filter( last_match = session.query(MatchModel).filter(
MatchModel.fixture_id == fixture_id, MatchModel.fixture_id == fixture_id,
MatchModel.status.in_(['done', 'cancelled', 'failed']), MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True MatchModel.active_status == True
).order_by(MatchModel.updated_at.desc()).first() ).order_by(MatchModel.updated_at.desc()).first()
...@@ -2741,7 +2867,7 @@ class GamesThread(ThreadedComponent): ...@@ -2741,7 +2867,7 @@ class GamesThread(ThreadedComponent):
# Build query for completed matches # Build query for completed matches
# Exclude matches from fixtures that contain "_recycle_" in the fixture name # Exclude matches from fixtures that contain "_recycle_" in the fixture name
query = session.query(MatchModel).filter( query = session.query(MatchModel).filter(
MatchModel.status.in_(['done', 'cancelled', 'failed']), MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True, MatchModel.active_status == True,
~MatchModel.fixture_id.like('%_recycle_%') ~MatchModel.fixture_id.like('%_recycle_%')
) )
...@@ -2775,6 +2901,117 @@ class GamesThread(ThreadedComponent): ...@@ -2775,6 +2901,117 @@ class GamesThread(ThreadedComponent):
logger.error(f"Failed to select random completed matches excluding same fighters: {e}") logger.error(f"Failed to select random completed matches excluding same fighters: {e}")
return [] return []
def _select_random_completed_matches_with_fallback(self, count: int, fixture_id: Optional[str], session, max_attempts: int = 5) -> List[MatchModel]:
"""Select random matches with progressive fallback - try up to 5 times with relaxed criteria"""
import random
for attempt in range(max_attempts):
try:
if attempt == 0:
# Attempt 1: Exclude last 3 matches (fighters + venue)
exclusion_count = 3
fighters_only = False
logger.info(f"🎯 Attempt {attempt + 1}: Excluding last {exclusion_count} matches (fighters + venue)")
elif attempt == 1:
# Attempt 2: Exclude last 2 matches (fighters + venue)
exclusion_count = 2
fighters_only = False
logger.info(f"🎯 Attempt {attempt + 1}: Excluding last {exclusion_count} matches (fighters + venue)")
elif attempt == 2:
# Attempt 3: Exclude last 1 match (fighters + venue)
exclusion_count = 1
fighters_only = False
logger.info(f"🎯 Attempt {attempt + 1}: Excluding last {exclusion_count} match (fighters + venue)")
elif attempt == 3:
# Attempt 4: Exclude last 1 match (fighters only, ignore venue)
exclusion_count = 1
fighters_only = True
logger.info(f"🎯 Attempt {attempt + 1}: Excluding last {exclusion_count} match (fighters only)")
else:
# Attempt 5: No exclusions
exclusion_count = 0
fighters_only = False
logger.info(f"🎯 Attempt {attempt + 1}: No exclusions (final fallback)")
# Get available matches with current exclusion criteria
available_matches = self._get_available_matches_excluding_recent(
fixture_id, exclusion_count, fighters_only, session
)
if len(available_matches) >= count:
selected = random.sample(available_matches, count)
logger.info(f"✅ Success on attempt {attempt + 1}: selected {len(selected)} matches from {len(available_matches)} available")
return selected
else:
logger.warning(f"⚠️ Attempt {attempt + 1} failed: only {len(available_matches)} matches available, need {count}")
continue
except Exception as e:
logger.error(f"❌ Attempt {attempt + 1} failed with error: {e}")
continue
# Final fallback: return whatever matches are available
logger.warning(f"🚨 All {max_attempts} attempts failed - returning all available matches")
all_matches = session.query(MatchModel).filter(
MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True,
~MatchModel.fixture_id.like('%_recycle_%')
).all()
result = all_matches[:count] if len(all_matches) >= count else all_matches
logger.info(f"🔄 Final fallback: returning {len(result)} matches from {len(all_matches)} total available")
return result
def _get_available_matches_excluding_recent(self, fixture_id: Optional[str], exclude_last_n: int, fighters_only: bool, session) -> List[MatchModel]:
"""Get available matches excluding the last N recent matches in the fixture"""
try:
# If no fixture_id provided (creating new fixture), don't exclude any recent matches
if fixture_id is None:
recent_matches = []
else:
# Get the last N matches in the fixture (by match_number, regardless of completion status)
recent_matches = session.query(MatchModel).filter(
MatchModel.fixture_id == fixture_id,
MatchModel.active_status == True
).order_by(MatchModel.match_number.desc()).limit(exclude_last_n).all()
logger.debug(f"Found {len(recent_matches)} recent matches to exclude: {[f'#{m.match_number}: {m.fighter1_township} vs {m.fighter2_township}' for m in recent_matches]}")
# Build exclusion filters
exclusion_filters = []
for recent_match in recent_matches:
if fighters_only:
# Exclude matches with same fighters only (both directions)
exclusion_filters.append(
~((MatchModel.fighter1_township == recent_match.fighter1_township) &
(MatchModel.fighter2_township == recent_match.fighter2_township)) &
~((MatchModel.fighter1_township == recent_match.fighter2_township) &
(MatchModel.fighter2_township == recent_match.fighter1_township))
)
else:
# Exclude matches with same fighters AND venue
exclusion_filters.append(
~((MatchModel.fighter1_township == recent_match.fighter1_township) &
(MatchModel.fighter2_township == recent_match.fighter2_township) &
(MatchModel.venue_kampala_township == recent_match.venue_kampala_township))
)
# Query available matches with exclusions
query = session.query(MatchModel).filter(
MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True,
~MatchModel.fixture_id.like('%_recycle_%'),
*exclusion_filters
)
available_matches = query.all()
logger.debug(f"Found {len(available_matches)} matches available after exclusions")
return available_matches
except Exception as e:
logger.error(f"Failed to get available matches excluding recent: {e}")
return []
def _determine_new_match_status(self, fixture_id: str, session) -> str: def _determine_new_match_status(self, fixture_id: str, session) -> str:
"""Determine the status for new matches based on system state""" """Determine the status for new matches based on system state"""
try: try:
...@@ -2805,6 +3042,7 @@ class GamesThread(ThreadedComponent): ...@@ -2805,6 +3042,7 @@ class GamesThread(ThreadedComponent):
logger.error(f"Failed to determine new match status: {e}") logger.error(f"Failed to determine new match status: {e}")
return 'scheduled' # Default fallback return 'scheduled' # Default fallback
def _cleanup_previous_match_extractions(self): def _cleanup_previous_match_extractions(self):
"""Clean up all previous unzipped match directories from temporary location""" """Clean up all previous unzipped match directories from temporary location"""
try: try:
......
...@@ -526,7 +526,7 @@ class MatchTimerComponent(ThreadedComponent): ...@@ -526,7 +526,7 @@ class MatchTimerComponent(ThreadedComponent):
# Find the most recently completed match in this fixture # Find the most recently completed match in this fixture
last_match = session.query(MatchModel).filter( last_match = session.query(MatchModel).filter(
MatchModel.fixture_id == fixture_id, MatchModel.fixture_id == fixture_id,
MatchModel.status.in_(['done', 'cancelled', 'failed']), MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True MatchModel.active_status == True
).order_by(MatchModel.updated_at.desc()).first() ).order_by(MatchModel.updated_at.desc()).first()
...@@ -547,7 +547,7 @@ class MatchTimerComponent(ThreadedComponent): ...@@ -547,7 +547,7 @@ class MatchTimerComponent(ThreadedComponent):
# Build query for completed matches # Build query for completed matches
# Exclude matches from fixtures that contain "_recycle_" in the fixture name # Exclude matches from fixtures that contain "_recycle_" in the fixture name
query = session.query(MatchModel).filter( query = session.query(MatchModel).filter(
MatchModel.status.in_(['done', 'cancelled', 'failed']), MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True, MatchModel.active_status == True,
~MatchModel.fixture_id.like('%_recycle_%') ~MatchModel.fixture_id.like('%_recycle_%')
) )
......
...@@ -2316,6 +2316,50 @@ class Migration_029_ChangeMatchNumberToUniqueWithinFixture(DatabaseMigration): ...@@ -2316,6 +2316,50 @@ class Migration_029_ChangeMatchNumberToUniqueWithinFixture(DatabaseMigration):
logger.error(f"Failed to revert match_number constraint: {e}") logger.error(f"Failed to revert match_number constraint: {e}")
return False return False
class Migration_030_AddZipValidationStatus(DatabaseMigration):
"""Add zip_validation_status field to matches table"""
def __init__(self):
super().__init__("030", "Add zip_validation_status field to matches table")
def up(self, db_manager) -> bool:
"""Add zip_validation_status column to matches table"""
try:
with db_manager.engine.connect() as conn:
# Check if zip_validation_status column already exists
result = conn.execute(text("PRAGMA table_info(matches)"))
columns = [row[1] for row in result.fetchall()]
if 'zip_validation_status' not in columns:
# Add zip_validation_status column with default value 'pending'
conn.execute(text("""
ALTER TABLE matches
ADD COLUMN zip_validation_status VARCHAR(20) DEFAULT 'pending'
"""))
# Add index for zip_validation_status column
conn.execute(text("""
CREATE INDEX IF NOT EXISTS ix_matches_zip_validation_status
ON matches(zip_validation_status)
"""))
conn.commit()
logger.info("zip_validation_status column added to matches table")
else:
logger.info("zip_validation_status column already exists in matches table")
return True
except Exception as e:
logger.error(f"Failed to add zip_validation_status field to matches: {e}")
return False
def down(self, db_manager) -> bool:
"""Remove zip_validation_status column - SQLite doesn't support DROP COLUMN easily"""
logger.warning("SQLite doesn't support DROP COLUMN - zip_validation_status column will remain")
return True
# Registry of all migrations in order # Registry of all migrations in order
MIGRATIONS: List[DatabaseMigration] = [ MIGRATIONS: List[DatabaseMigration] = [
Migration_001_InitialSchema(), Migration_001_InitialSchema(),
...@@ -2347,6 +2391,7 @@ MIGRATIONS: List[DatabaseMigration] = [ ...@@ -2347,6 +2391,7 @@ MIGRATIONS: List[DatabaseMigration] = [
Migration_027_AddDefaultIntroTemplatesConfig(), Migration_027_AddDefaultIntroTemplatesConfig(),
Migration_028_AddFixtureRefreshIntervalConfig(), Migration_028_AddFixtureRefreshIntervalConfig(),
Migration_029_ChangeMatchNumberToUniqueWithinFixture(), Migration_029_ChangeMatchNumberToUniqueWithinFixture(),
Migration_030_AddZipValidationStatus(),
] ]
......
...@@ -472,6 +472,7 @@ class MatchModel(BaseModel): ...@@ -472,6 +472,7 @@ class MatchModel(BaseModel):
Index('ix_matches_file_sha1sum', 'file_sha1sum'), Index('ix_matches_file_sha1sum', 'file_sha1sum'),
Index('ix_matches_zip_sha1sum', 'zip_sha1sum'), Index('ix_matches_zip_sha1sum', 'zip_sha1sum'),
Index('ix_matches_zip_upload_status', 'zip_upload_status'), Index('ix_matches_zip_upload_status', 'zip_upload_status'),
Index('ix_matches_zip_validation_status', 'zip_validation_status'),
Index('ix_matches_created_by', 'created_by'), Index('ix_matches_created_by', 'created_by'),
Index('ix_matches_fixture_active_time', 'fixture_active_time'), Index('ix_matches_fixture_active_time', 'fixture_active_time'),
Index('ix_matches_composite', 'active_status', 'zip_upload_status', 'created_at'), Index('ix_matches_composite', 'active_status', 'zip_upload_status', 'created_at'),
...@@ -504,6 +505,7 @@ class MatchModel(BaseModel): ...@@ -504,6 +505,7 @@ class MatchModel(BaseModel):
zip_sha1sum = Column(String(255), comment='SHA1 checksum of ZIP file') zip_sha1sum = Column(String(255), comment='SHA1 checksum of ZIP file')
zip_upload_status = Column(String(20), default='pending', comment='Upload status: pending, uploading, completed, failed') zip_upload_status = Column(String(20), default='pending', comment='Upload status: pending, uploading, completed, failed')
zip_upload_progress = Column(Float, default=0.0, comment='Upload progress percentage (0.0-100.0)') zip_upload_progress = Column(Float, default=0.0, comment='Upload progress percentage (0.0-100.0)')
zip_validation_status = Column(String(20), default='pending', comment='Validation status: pending, validating, valid, invalid, failed')
# User tracking # User tracking
created_by = Column(Integer, ForeignKey('users.id'), comment='User who created this record') created_by = Column(Integer, ForeignKey('users.id'), comment='User who created this record')
......
...@@ -120,25 +120,28 @@ class OverlayWebChannel(QObject): ...@@ -120,25 +120,28 @@ class OverlayWebChannel(QObject):
if not data: if not data:
logger.warning("send_data_update called with null/empty data, skipping") logger.warning("send_data_update called with null/empty data, skipping")
return return
# Debug original data before cleaning # Debug original data before cleaning
logger.debug(f"OverlayWebChannel received data: {data}, type: {type(data)}") logger.debug(f"OverlayWebChannel received data: {data}, type: {type(data)}")
logger.debug(f"OverlayWebChannel data keys: {list(data.keys()) if isinstance(data, dict) else 'not dict'}")
# Clean data to remove null/undefined values before sending to JavaScript # Clean data to remove null/undefined values before sending to JavaScript
cleaned_data = self._clean_data(data) cleaned_data = self._clean_data(data)
logger.debug(f"OverlayWebChannel cleaned data: {cleaned_data}")
if not cleaned_data: if not cleaned_data:
logger.debug("All data properties were null/undefined, skipping JavaScript update") logger.debug("All data properties were null/undefined, skipping JavaScript update")
return return
# Debug what data is being sent to JavaScript # Debug what data is being sent to JavaScript
data_keys = list(cleaned_data.keys()) if isinstance(cleaned_data, dict) else [] data_keys = list(cleaned_data.keys()) if isinstance(cleaned_data, dict) else []
logger.debug(f"OverlayWebChannel sending to JavaScript: {len(cleaned_data)} items with keys: {data_keys}") logger.debug(f"OverlayWebChannel sending to JavaScript: {len(cleaned_data)} items with keys: {data_keys}")
logger.debug(f"Data type: {type(cleaned_data)}, Data is dict: {isinstance(cleaned_data, dict)}") logger.debug(f"Data type: {type(cleaned_data)}, Data is dict: {isinstance(cleaned_data, dict)}")
with QMutexLocker(self.mutex): with QMutexLocker(self.mutex):
self.overlay_data.update(cleaned_data) self.overlay_data.update(cleaned_data)
# Add additional validation just before emit # Add additional validation just before emit
if cleaned_data and isinstance(cleaned_data, dict) and any(v is not None for v in cleaned_data.values()): if cleaned_data and isinstance(cleaned_data, dict) and any(v is not None for v in cleaned_data.values()):
logger.debug(f"OverlayWebChannel emitting dataUpdated signal with: {cleaned_data}")
self.dataUpdated.emit(cleaned_data) self.dataUpdated.emit(cleaned_data)
data_keys = list(cleaned_data.keys()) if isinstance(cleaned_data, dict) else [] data_keys = list(cleaned_data.keys()) if isinstance(cleaned_data, dict) else []
logger.debug(f"Signal emitted successfully with {len(cleaned_data)} data items: {data_keys}") logger.debug(f"Signal emitted successfully with {len(cleaned_data)} data items: {data_keys}")
...@@ -148,15 +151,18 @@ class OverlayWebChannel(QObject): ...@@ -148,15 +151,18 @@ class OverlayWebChannel(QObject):
def _clean_data(self, data: Dict[str, Any]) -> Dict[str, Any]: def _clean_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
"""Clean data by removing null/undefined values before sending to JavaScript""" """Clean data by removing null/undefined values before sending to JavaScript"""
cleaned_data = {} cleaned_data = {}
for key, value in data.items(): for key, value in data.items():
# Skip null, undefined, or empty string values # Skip null, undefined, or empty string values
if value is None or value == "": if value is None or value == "":
logger.debug(f"OverlayWebChannel: Skipping null/empty property '{key}'") logger.debug(f"OverlayWebChannel: Skipping null/empty property '{key}'")
continue continue
# Keep dicts and lists as is (Qt WebChannel can handle them)
if isinstance(value, (dict, list)):
cleaned_data[key] = value
# Convert non-null values to appropriate types # Convert non-null values to appropriate types
if isinstance(value, str): elif isinstance(value, str):
cleaned_data[key] = value cleaned_data[key] = value
elif isinstance(value, bool): elif isinstance(value, bool):
cleaned_data[key] = value cleaned_data[key] = value
...@@ -165,7 +171,7 @@ class OverlayWebChannel(QObject): ...@@ -165,7 +171,7 @@ class OverlayWebChannel(QObject):
else: else:
# For other types, convert to string # For other types, convert to string
cleaned_data[key] = str(value) cleaned_data[key] = str(value)
return cleaned_data return cleaned_data
def send_position_update(self, position: float, duration: float): def send_position_update(self, position: float, duration: float):
...@@ -194,6 +200,19 @@ class OverlayWebChannel(QObject): ...@@ -194,6 +200,19 @@ class OverlayWebChannel(QObject):
logger.info(f"[JS CONSOLE.LOG] {message}") logger.info(f"[JS CONSOLE.LOG] {message}")
print(f"[JS CONSOLE.LOG] {message}") print(f"[JS CONSOLE.LOG] {message}")
@pyqtSlot(result=str)
def getCurrentData(self) -> str:
"""Provide current overlay data to JavaScript via WebChannel"""
try:
logger.debug("OverlayWebChannel: getCurrentData called")
# Return current overlay data
current_data = dict(self.overlay_data)
logger.debug(f"OverlayWebChannel: Returning current data: {current_data}")
return json.dumps(current_data)
except Exception as e:
logger.error(f"OverlayWebChannel: Failed to get current data: {e}")
return json.dumps({})
@pyqtSlot(result=str) @pyqtSlot(result=str)
def getFixtureData(self) -> str: def getFixtureData(self) -> str:
"""Provide fixture data to JavaScript via WebChannel""" """Provide fixture data to JavaScript via WebChannel"""
...@@ -258,6 +277,7 @@ class OverlayWebChannel(QObject): ...@@ -258,6 +277,7 @@ class OverlayWebChannel(QObject):
"""Provide winning outcomes data for a match to JavaScript via WebChannel""" """Provide winning outcomes data for a match to JavaScript via WebChannel"""
try: try:
logger.info(f"QtWebChannel: Getting winning outcomes for match {match_id}") logger.info(f"QtWebChannel: Getting winning outcomes for match {match_id}")
logger.debug(f"QtWebChannel: db_manager available: {self.db_manager is not None}")
# Get winning outcomes from database # Get winning outcomes from database
winning_outcomes = self._get_winning_outcomes_from_database(match_id) winning_outcomes = self._get_winning_outcomes_from_database(match_id)
...@@ -267,6 +287,8 @@ class OverlayWebChannel(QObject): ...@@ -267,6 +287,8 @@ class OverlayWebChannel(QObject):
except Exception as e: except Exception as e:
logger.error(f"QtWebChannel: Failed to get winning outcomes for match {match_id}: {e}") logger.error(f"QtWebChannel: Failed to get winning outcomes for match {match_id}: {e}")
import traceback
logger.error(f"QtWebChannel: Full traceback: {traceback.format_exc()}")
return json.dumps([]) return json.dumps([])
def _get_fixture_data_from_games_thread(self) -> Optional[List[Dict[str, Any]]]: def _get_fixture_data_from_games_thread(self) -> Optional[List[Dict[str, Any]]]:
...@@ -325,7 +347,7 @@ class OverlayWebChannel(QObject): ...@@ -325,7 +347,7 @@ class OverlayWebChannel(QObject):
MatchModel.start_time.isnot(None), MatchModel.start_time.isnot(None),
MatchModel.start_time >= datetime.combine(today, datetime.min.time()), MatchModel.start_time >= datetime.combine(today, datetime.min.time()),
MatchModel.start_time < datetime.combine(today, datetime.max.time()), MatchModel.start_time < datetime.combine(today, datetime.max.time()),
MatchModel.status.notin_(['done', 'cancelled', 'failed', 'paused']), MatchModel.status.notin_(['done', 'end', 'cancelled', 'failed', 'paused']),
MatchModel.active_status == True MatchModel.active_status == True
).order_by(MatchModel.start_time.asc()).limit(5).all() ).order_by(MatchModel.start_time.asc()).limit(5).all()
...@@ -414,14 +436,18 @@ class OverlayWebChannel(QObject): ...@@ -414,14 +436,18 @@ class OverlayWebChannel(QObject):
from ..database.models import BetDetailModel, MatchModel from ..database.models import BetDetailModel, MatchModel
from sqlalchemy import func from sqlalchemy import func
logger.debug(f"QtWebChannel: _get_winning_outcomes_from_database called for match {match_id}")
# Use the database manager passed to this channel # Use the database manager passed to this channel
if not self.db_manager: if not self.db_manager:
logger.error("Database manager not initialized") logger.error("QtWebChannel: Database manager not initialized")
return [] return []
logger.debug("QtWebChannel: Getting database session")
session = self.db_manager.get_session() session = self.db_manager.get_session()
try: try:
logger.debug(f"QtWebChannel: Executing query for match {match_id}")
# Get aggregated winning amounts by outcome for this match # Get aggregated winning amounts by outcome for this match
winning_outcomes_query = session.query( winning_outcomes_query = session.query(
BetDetailModel.outcome, BetDetailModel.outcome,
...@@ -433,6 +459,8 @@ class OverlayWebChannel(QObject): ...@@ -433,6 +459,8 @@ class OverlayWebChannel(QObject):
MatchModel.active_status == True MatchModel.active_status == True
).group_by(BetDetailModel.outcome).all() ).group_by(BetDetailModel.outcome).all()
logger.debug(f"QtWebChannel: Query returned {len(winning_outcomes_query)} results")
# Convert to dictionary format for JavaScript # Convert to dictionary format for JavaScript
outcomes_data = [] outcomes_data = []
for outcome_name, total_amount in winning_outcomes_query: for outcome_name, total_amount in winning_outcomes_query:
...@@ -442,14 +470,17 @@ class OverlayWebChannel(QObject): ...@@ -442,14 +470,17 @@ class OverlayWebChannel(QObject):
} }
outcomes_data.append(outcome_data) outcomes_data.append(outcome_data)
logger.debug(f"Retrieved {len(outcomes_data)} winning outcomes for match {match_id}") logger.debug(f"QtWebChannel: Retrieved {len(outcomes_data)} winning outcomes for match {match_id}: {outcomes_data}")
return outcomes_data return outcomes_data
finally: finally:
session.close() session.close()
logger.debug("QtWebChannel: Database session closed")
except Exception as e: except Exception as e:
logger.error(f"Failed to get winning outcomes from database: {e}") logger.error(f"QtWebChannel: Failed to get winning outcomes from database: {e}")
import traceback
logger.error(f"QtWebChannel: Full traceback: {traceback.format_exc()}")
return [] return []
...@@ -3418,7 +3449,8 @@ class QtVideoPlayer(QObject): ...@@ -3418,7 +3449,8 @@ class QtVideoPlayer(QObject):
# Validate and clean template_data before sending to overlay # Validate and clean template_data before sending to overlay
cleaned_data = self._clean_overlay_data(data_to_send) cleaned_data = self._clean_overlay_data(data_to_send)
if cleaned_data: # Only send if we have valid data after cleaning if cleaned_data: # Only send if we have valid data after cleaning
self.window._update_overlay_safe(overlay_view, cleaned_data) # Send data after a short delay to ensure page has loaded
QTimer.singleShot(500, lambda: self.window._update_overlay_safe(overlay_view, cleaned_data))
else: else:
logger.debug("Template data contained only null/undefined values, skipping update") logger.debug("Template data contained only null/undefined values, skipping update")
...@@ -3769,32 +3801,45 @@ class QtVideoPlayer(QObject): ...@@ -3769,32 +3801,45 @@ class QtVideoPlayer(QObject):
def _get_database_manager(self): def _get_database_manager(self):
"""Get database manager from message bus""" """Get database manager from message bus"""
try: try:
logger.info("QtPlayer: DEBUG - Getting database manager")
if hasattr(self, '_message_bus') and self._message_bus: if hasattr(self, '_message_bus') and self._message_bus:
logger.info("QtPlayer: DEBUG - Message bus available, trying to get db_manager from web_dashboard")
# Try to get db_manager from web_dashboard component # Try to get db_manager from web_dashboard component
try: try:
web_dashboard_queue = self._message_bus._queues.get('web_dashboard') web_dashboard_queue = self._message_bus._queues.get('web_dashboard')
logger.info(f"QtPlayer: DEBUG - Web dashboard queue: {web_dashboard_queue}")
if web_dashboard_queue and hasattr(web_dashboard_queue, 'component'): if web_dashboard_queue and hasattr(web_dashboard_queue, 'component'):
component = web_dashboard_queue.component component = web_dashboard_queue.component
logger.info(f"QtPlayer: DEBUG - Web dashboard component: {component}")
if hasattr(component, 'db_manager'): if hasattr(component, 'db_manager'):
logger.debug("QtVideoPlayer: Got db_manager from web_dashboard component") logger.info("QtPlayer: DEBUG - Got db_manager from web_dashboard component")
return component.db_manager return component.db_manager
else:
logger.info("QtPlayer: DEBUG - Web dashboard component has no db_manager attribute")
else:
logger.info("QtPlayer: DEBUG - No web dashboard queue or component")
except Exception as e: except Exception as e:
logger.debug(f"QtVideoPlayer: Could not get db_manager from message bus: {e}") logger.error(f"QtPlayer: DEBUG - Could not get db_manager from message bus: {e}")
logger.info("QtPlayer: DEBUG - Falling back to create database manager directly")
# Fallback: create database manager directly # Fallback: create database manager directly
from ..config.settings import get_user_data_dir from ..config.settings import get_user_data_dir
from ..database.manager import DatabaseManager from ..database.manager import DatabaseManager
db_path = get_user_data_dir() / "mbetterclient.db" db_path = get_user_data_dir() / "mbetterclient.db"
logger.debug(f"QtVideoPlayer: Creating database manager directly: {db_path}") logger.info(f"QtPlayer: DEBUG - Creating database manager directly: {db_path}")
db_manager = DatabaseManager(str(db_path)) db_manager = DatabaseManager(str(db_path))
logger.info("QtPlayer: DEBUG - Database manager created, initializing")
if db_manager.initialize(): if db_manager.initialize():
logger.info("QtPlayer: DEBUG - Database manager initialized successfully")
return db_manager return db_manager
else: else:
logger.warning("QtVideoPlayer: Failed to initialize database manager") logger.warning("QtPlayer: DEBUG - Failed to initialize database manager")
return None return None
except Exception as e: except Exception as e:
logger.error(f"QtVideoPlayer: Failed to get database manager: {e}") logger.error(f"QtPlayer: DEBUG - Failed to get database manager: {e}")
import traceback
logger.error(f"QtPlayer: DEBUG - Full traceback: {traceback.format_exc()}")
return None return None
def _unzip_match_zip_file(self, match_id: int): def _unzip_match_zip_file(self, match_id: int):
...@@ -4092,17 +4137,40 @@ class QtVideoPlayer(QObject): ...@@ -4092,17 +4137,40 @@ class QtVideoPlayer(QObject):
logger.info(f"Playing result video: {result}.mp4 for match {match_id}") logger.info(f"Playing result video: {result}.mp4 for match {match_id}")
# Use results overlay template # Get match details from database
match_details = self._get_match_details_for_results(match_id)
if not match_details:
logger.error(f"Could not get match details for match {match_id}")
return
# Determine under/over result if applicable
under_over_result = None
main_result = result
if result in ['UNDER', 'OVER']:
under_over_result = result
main_result = None # No separate main result
elif result not in ['UNDER', 'OVER']:
# For main results, check if there's a separate under/over from database
# This is a simplified approach - in practice, you'd need to determine this from the match outcome
pass
# Prepare overlay data for results template
overlay_data = { overlay_data = {
'outcome': main_result,
'result': main_result, # For backwards compatibility
'under_over_result': under_over_result,
'match': {
'fighter1_township': match_details.get('fighter1_township', 'Fighter 1'),
'fighter2_township': match_details.get('fighter2_township', 'Fighter 2'),
'venue': match_details.get('venue', 'Venue')
},
'match_id': match_id, 'match_id': match_id,
'fixture_id': fixture_id, 'fixture_id': fixture_id,
'result': result,
'fighter1': 'Fighter 1', # TODO: Get from database
'fighter2': 'Fighter 2', # TODO: Get from database
'venue': 'Venue', # TODO: Get from database
'is_result_video': True 'is_result_video': True
} }
logger.info(f"Sending results data to overlay: {overlay_data}")
# Play the result video with results overlay template # Play the result video with results overlay template
self.window.play_video( self.window.play_video(
video_path, video_path,
...@@ -4672,3 +4740,106 @@ class QtVideoPlayer(QObject): ...@@ -4672,3 +4740,106 @@ class QtVideoPlayer(QObject):
import traceback import traceback
logger.error(f"QtPlayer: Full traceback: {traceback.format_exc()}") logger.error(f"QtPlayer: Full traceback: {traceback.format_exc()}")
return None return None
def _get_match_details_for_results(self, match_id: int) -> Optional[Dict[str, Any]]:
"""Get match details for results overlay"""
try:
from ..database.models import MatchModel
logger.info(f"QtPlayer: DEBUG - Getting match details for match {match_id}")
# First try to get results from game thread (for matches that aren't done yet)
logger.info("QtPlayer: DEBUG - Trying to get match results from game thread")
game_thread_results = self._get_match_results_from_game_thread(match_id)
if game_thread_results:
logger.info(f"QtPlayer: DEBUG - Got match results from game thread: {game_thread_results}")
return game_thread_results
logger.info("QtPlayer: DEBUG - No results from game thread, trying database")
# Get database manager
logger.info("QtPlayer: DEBUG - Getting database manager")
db_manager = self._get_database_manager()
if not db_manager:
logger.error("QtPlayer: DEBUG - Database manager not available for match details, using defaults")
return {
'fighter1_township': 'Fighter 1',
'fighter2_township': 'Fighter 2',
'venue': 'Venue'
}
logger.info("QtPlayer: DEBUG - Database manager available, getting session")
session = db_manager.get_session()
logger.info("QtPlayer: DEBUG - Database session obtained")
try:
logger.info(f"QtPlayer: DEBUG - Executing query for match {match_id}")
# Get match details
match = session.query(MatchModel).filter_by(id=match_id).first()
logger.info(f"QtPlayer: DEBUG - Query executed, match object: {match}")
if not match:
logger.warning(f"QtPlayer: DEBUG - Match {match_id} not found in database, using defaults")
return {
'fighter1_township': 'Fighter 1',
'fighter2_township': 'Fighter 2',
'venue': 'Venue'
}
logger.info(f"QtPlayer: DEBUG - Match found, extracting details")
match_details = {
'fighter1_township': match.fighter1_township or 'Fighter 1',
'fighter2_township': match.fighter2_township or 'Fighter 2',
'venue': match.venue_kampala_township or 'Venue'
}
logger.info(f"QtPlayer: DEBUG - Retrieved match details for match {match_id}: {match_details}")
return match_details
finally:
logger.info("QtPlayer: DEBUG - Closing database session")
session.close()
except Exception as e:
logger.error(f"QtPlayer: DEBUG - Failed to get match details for results: {e}, using defaults")
import traceback
logger.error(f"QtPlayer: DEBUG - Full traceback: {traceback.format_exc()}")
return {
'fighter1_township': 'Fighter 1',
'fighter2_township': 'Fighter 2',
'venue': 'Venue'
}
def _get_match_results_from_game_thread(self, match_id: int) -> Optional[Dict[str, Any]]:
"""Get match results from game thread temporary storage"""
try:
logger.info(f"QtPlayer: DEBUG - Requesting match results from game thread for match {match_id}")
# Send message to game thread to get match results
if hasattr(self, '_message_bus') and self._message_bus:
from ..core.message_bus import Message, MessageType
request_message = Message(
type=MessageType.CUSTOM,
sender="qt_player",
recipient="games_thread",
data={
"request": "get_match_results",
"match_id": match_id
}
)
logger.info("QtPlayer: DEBUG - Sending request to game thread")
# For synchronous response, we can't easily wait, so this might not work
# The game thread would need to respond asynchronously
# For now, return None and rely on database
logger.info("QtPlayer: DEBUG - Game thread request sent (async), returning None for now")
return None
else:
logger.info("QtPlayer: DEBUG - No message bus available for game thread request")
return None
except Exception as e:
logger.error(f"QtPlayer: DEBUG - Failed to get match results from game thread: {e}")
return None
...@@ -3,6 +3,7 @@ ...@@ -3,6 +3,7 @@
<head> <head>
<meta charset="utf-8"> <meta charset="utf-8">
<title>Results Overlay</title> <title>Results Overlay</title>
<script src="qrc:///qtwebchannel/qwebchannel.js"></script>
<style> <style>
* { * {
margin: 0; margin: 0;
...@@ -53,9 +54,14 @@ ...@@ -53,9 +54,14 @@
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3); box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3);
backdrop-filter: blur(10px); backdrop-filter: blur(10px);
border: 2px solid rgba(255, 255, 255, 0.1); border: 2px solid rgba(255, 255, 255, 0.1);
opacity: 1; /* Always visible */ opacity: 0; /* Initially transparent */
padding-bottom: 50px; padding-bottom: 50px;
display: flex; /* Always visible, content hidden instead */ display: flex;
transition: opacity 0.5s ease-out;
}
.results-panel.visible {
opacity: 1;
} }
.results-content { .results-content {
...@@ -738,7 +744,7 @@ ...@@ -738,7 +744,7 @@
<!-- Combined Result Display --> <!-- Combined Result Display -->
<div class="combined-result-display" id="combinedResultDisplay"> <div class="combined-result-display" id="combinedResultDisplay">
<div class="combined-result-text" id="combinedResultText"> <div class="combined-result-text" id="combinedResultText">
<span id="mainResult">WIN1</span> / <span id="underOverResult">UNDER</span> <span id="mainResult">WIN1</span> - <span id="underOverResult">UNDER</span>
</div> </div>
</div> </div>
...@@ -766,6 +772,11 @@ ...@@ -766,6 +772,11 @@
let videoStartTime = null; let videoStartTime = null;
let contentDelayTimer = null; let contentDelayTimer = null;
let resultsTimer = null; let resultsTimer = null;
// Define showLoadingState for compatibility (results template doesn't use loading state)
function showLoadingState() {
console.log('DEBUG: showLoadingState called (no-op for results template)');
}
// Outcome categories for styling // Outcome categories for styling
const outcomeCategories = { const outcomeCategories = {
...@@ -786,13 +797,13 @@ ...@@ -786,13 +797,13 @@
// Function to update overlay data (called by Qt WebChannel) // Function to update overlay data (called by Qt WebChannel)
function updateOverlayData(data) { function updateOverlayData(data) {
console.log('Received overlay data:', data); console.log('DEBUG: updateOverlayData called with data:', data);
overlayData = data || {}; overlayData = data || {};
// Only update if we have valid data // Only update if we have valid data
if (data && (data.outcome || data.result)) { if (data && (data.outcome || data.result)) {
let result = data.outcome || data.result; let result = data.outcome || data.result;
console.log('Processing result:', result); console.log('DEBUG: Processing valid result:', result);
// Always treat the main result as the primary outcome // Always treat the main result as the primary outcome
currentMainResult = result; currentMainResult = result;
...@@ -800,7 +811,7 @@ ...@@ -800,7 +811,7 @@
// Check if under/over result is provided separately // Check if under/over result is provided separately
if (data.under_over_result) { if (data.under_over_result) {
currentUnderOverResult = data.under_over_result; currentUnderOverResult = data.under_over_result;
console.log('Under/over result provided separately:', currentUnderOverResult); console.log('DEBUG: Under/over result provided separately:', currentUnderOverResult);
} else { } else {
// Fallback: determine if main result is under/over // Fallback: determine if main result is under/over
if (result === 'UNDER' || result === 'OVER') { if (result === 'UNDER' || result === 'OVER') {
...@@ -813,18 +824,33 @@ ...@@ -813,18 +824,33 @@
if (data.match) { if (data.match) {
currentMatch = data.match; currentMatch = data.match;
console.log('DEBUG: Match data received:', data.match);
} }
if (data.match_id) { if (data.match_id) {
console.log('DEBUG: Match ID received:', data.match_id);
// Fetch winning outcomes for this match // Fetch winning outcomes for this match
fetchWinningOutcomes(data.match_id); fetchWinningOutcomes(data.match_id);
// Check if results have already been shown for this match (handles overlay reloads)
const resultsShownKey = 'results_shown_' + data.match_id;
const alreadyShown = sessionStorage.getItem(resultsShownKey) === 'true';
console.log('DEBUG: Results already shown for match?', alreadyShown);
if (alreadyShown) {
console.log('DEBUG: Results already shown for this match, displaying immediately');
contentVisible = true;
showResultsPanel();
showResultsContent();
return; // Don't prepare animation again
}
} }
// Prepare data but don't start animation yet - wait for video to actually start playing // Prepare data and start 5-second timer to show results
console.log('Results data received, preparing animation data'); console.log('DEBUG: Results data received, preparing animation data');
prepareResultsAnimation(); prepareResultsAnimation();
} else { } else {
console.log('DEBUG: No valid data received, showing loading state');
// No valid data, show loading state // No valid data, show loading state
showLoadingState(); showLoadingState();
} }
...@@ -835,9 +861,6 @@ ...@@ -835,9 +861,6 @@
if (animationStarted) return; if (animationStarted) return;
animationStarted = true; animationStarted = true;
// Show results panel immediately (but content hidden)
showResultsPanel();
// Update fighters display // Update fighters display
updateFightersDisplay(); updateFightersDisplay();
...@@ -847,23 +870,38 @@ ...@@ -847,23 +870,38 @@
// Update winning bets display // Update winning bets display
updateWinningBetsDisplay(); updateWinningBetsDisplay();
// Content will be shown after 5 seconds when video starts // Show results after 5 seconds from data receipt
setTimeout(() => {
if (!contentVisible) {
contentVisible = true;
console.log('Showing results after 5 seconds from data received');
// Mark results as shown in sessionStorage
if (overlayData && overlayData.match_id) {
sessionStorage.setItem('results_shown_' + overlayData.match_id, 'true');
}
showResultsPanel();
showResultsContent();
}
}, 5000);
} }
// Fetch winning outcomes for the match // Fetch winning outcomes for the match
function fetchWinningOutcomes(matchId) { function fetchWinningOutcomes(matchId) {
console.log('Fetching winning outcomes for match:', matchId); console.log('DEBUG: fetchWinningOutcomes called for match:', matchId);
// Use Qt WebChannel to request winning outcomes data // Use Qt WebChannel to request winning outcomes data
if (window.overlay && window.overlay.getWinningOutcomes) { if (window.overlay && window.overlay.getWinningOutcomes) {
console.log('DEBUG: Qt WebChannel available, requesting winning outcomes');
try { try {
const outcomesJson = window.overlay.getWinningOutcomes(matchId); const outcomesJson = window.overlay.getWinningOutcomes(matchId);
const outcomesData = JSON.parse(outcomesJson); const outcomesData = JSON.parse(outcomesJson);
console.log('Received winning outcomes:', outcomesData); console.log('DEBUG: Received winning outcomes:', outcomesData);
winningOutcomes = outcomesData || []; winningOutcomes = outcomesData || [];
updateWinningBetsDisplay(); updateWinningBetsDisplay();
} catch (error) { } catch (error) {
console.error('Failed to get winning outcomes:', error); console.error('DEBUG: Failed to get winning outcomes:', error);
// Fallback: show sample data for testing // Fallback: show sample data for testing
winningOutcomes = [ winningOutcomes = [
{ outcome: 'WIN1', amount: 125.00 }, { outcome: 'WIN1', amount: 125.00 },
...@@ -873,7 +911,7 @@ ...@@ -873,7 +911,7 @@
updateWinningBetsDisplay(); updateWinningBetsDisplay();
} }
} else { } else {
console.warn('Qt WebChannel not available for fetching winning outcomes'); console.warn('DEBUG: Qt WebChannel not available for fetching winning outcomes');
// Fallback: show sample data for testing // Fallback: show sample data for testing
winningOutcomes = [ winningOutcomes = [
{ outcome: 'WIN1', amount: 125.00 }, { outcome: 'WIN1', amount: 125.00 },
...@@ -884,19 +922,31 @@ ...@@ -884,19 +922,31 @@
} }
} }
// Show results panel (always visible now) // Show results panel with fade-in animation
function showResultsPanel() { function showResultsPanel() {
console.log('DEBUG: showResultsPanel called');
const resultsPanel = document.getElementById('resultsPanel'); const resultsPanel = document.getElementById('resultsPanel');
resultsPanel.style.display = 'flex'; if (resultsPanel) {
console.log('DEBUG: Adding visible class to results panel');
resultsPanel.classList.add('visible');
} else {
console.log('DEBUG: ERROR - resultsPanel element not found');
}
} }
// Show results content with animation after delay // Show results content with animation after delay
function showResultsContent() { function showResultsContent() {
console.log('DEBUG: showResultsContent called');
const resultsContent = document.getElementById('resultsContent'); const resultsContent = document.getElementById('resultsContent');
resultsContent.classList.add('visible'); if (resultsContent) {
console.log('DEBUG: Adding visible class to results content');
resultsContent.classList.add('visible');
} else {
console.log('DEBUG: ERROR - resultsContent element not found');
}
} }
// Handle video position changes to detect when video starts playing and reaches 5 seconds // Handle video position changes (for logging/debugging purposes)
function handlePositionChange(position, duration) { function handlePositionChange(position, duration) {
// Check if video has started playing (position > 0) // Check if video has started playing (position > 0)
if (position > 0 && !videoStarted) { if (position > 0 && !videoStarted) {
...@@ -904,22 +954,8 @@ ...@@ -904,22 +954,8 @@
console.log('Video started playing at position:', position); console.log('Video started playing at position:', position);
} }
// Check if video has been playing for at least 5 seconds // Log position for debugging
if (videoStarted && position >= 5 && !contentVisible) { console.log('Video position:', position, 'duration:', duration);
contentVisible = true;
console.log('Video has been playing for 5+ seconds, showing results content');
// Clear any existing timers
if (resultsTimer) {
clearTimeout(resultsTimer);
}
if (contentDelayTimer) {
clearTimeout(contentDelayTimer);
}
// Show results content with animation
showResultsContent();
}
} }
// Update fighters display // Update fighters display
...@@ -1040,48 +1076,105 @@ ...@@ -1040,48 +1076,105 @@
// Initialize when DOM is loaded // Initialize when DOM is loaded
document.addEventListener('DOMContentLoaded', function() { document.addEventListener('DOMContentLoaded', function() {
console.log('Results overlay initialized'); console.log('DEBUG: Results overlay DOM loaded and initialized');
console.log('DEBUG: sessionStorage available:', typeof sessionStorage !== 'undefined');
// Always show results panel with default content
showResultsPanel(); // Setup WebChannel communication
setupWebChannel();
// Timer will start when video begins playing (detected via position changes)
console.log('Waiting for video to start playing before showing results content'); // Panel and content will be shown after 5 seconds when video starts playing
console.log('DEBUG: Waiting for results data to be received');
// Fallback: show test results after 5 seconds if no data received
setTimeout(() => {
if (!contentVisible) {
console.log('DEBUG: Fallback - No data received after 5 seconds, showing test results');
// Set test data
currentMainResult = 'WIN1';
currentUnderOverResult = 'OVER';
currentMatch = { fighter1_township: 'Test Fighter 1', fighter2_township: 'Test Fighter 2' };
winningOutcomes = [
{ outcome: 'WIN1', amount: 100.00 },
{ outcome: 'OVER', amount: 50.00 }
];
contentVisible = true;
showResultsPanel();
showResultsContent();
updateFightersDisplay();
updateCombinedResultDisplay();
updateWinningBetsDisplay();
}
}, 5000);
}); });
// Qt WebChannel initialization (when available) // Setup WebChannel communication (similar to fixtures.html)
if (typeof QWebChannel !== 'undefined') { function setupWebChannel() {
new QWebChannel(qt.webChannelTransport, function(channel) { // Check if WebChannel is already set up by overlay.js
console.log('WebChannel initialized for results overlay'); if (window.overlay) {
console.log('DEBUG: WebChannel already set up by overlay.js');
// Connect to overlay object if available
if (channel.objects.overlay) { // Test WebChannel
window.overlay = channel.objects.overlay; if (window.overlay && window.overlay.log) {
window.overlay.log('TEST: WebChannel connection successful');
}
// Connect dataChanged signal // Listen for data updates from Python
window.overlay.dataChanged.connect(function(data) { if (window.overlay.dataUpdated) {
window.overlay.dataUpdated.connect(function(data) {
console.log('DEBUG: Received data update from Python:', data);
updateOverlayData(data); updateOverlayData(data);
}); });
}
// Connect positionChanged signal // Connect positionChanged signal
if (window.overlay.positionChanged) { if (window.overlay.positionChanged) {
window.overlay.positionChanged.connect(function(position, duration) { console.log('DEBUG: Connecting positionChanged signal');
if (position !== null && duration !== null) { window.overlay.positionChanged.connect(function(position, duration) {
handlePositionChange(position, duration); if (position !== null && duration !== null) {
} else { handlePositionChange(position, duration);
console.warn('positionChanged signal received null/undefined parameters, skipping'); } else {
} console.warn('DEBUG: positionChanged signal received null/undefined parameters');
}); }
} });
}
return;
}
// Get initial data // Fallback: setup WebChannel if overlay.js didn't do it
if (window.overlay.getCurrentData) { if (typeof qt !== 'undefined' && qt.webChannelTransport) {
window.overlay.getCurrentData(function(data) { try {
updateOverlayData(data); new QWebChannel(qt.webChannelTransport, function(channel) {
}); console.log('DEBUG: WebChannel connected successfully (fallback)');
}
// Connect to overlay object
window.overlay = channel.objects.overlay;
// Listen for data updates from Python
if (window.overlay && window.overlay.dataUpdated) {
window.overlay.dataUpdated.connect(function(data) {
console.log('DEBUG: Received data update from Python:', data);
updateOverlayData(data);
});
}
// Connect positionChanged signal
if (window.overlay.positionChanged) {
console.log('DEBUG: Connecting positionChanged signal');
window.overlay.positionChanged.connect(function(position, duration) {
if (position !== null && duration !== null) {
handlePositionChange(position, duration);
} else {
console.warn('DEBUG: positionChanged signal received null/undefined parameters');
}
});
}
});
} catch (e) {
console.log('DEBUG: Failed to setup WebChannel:', e);
} }
}); } else {
console.log('DEBUG: WebChannel not available');
}
} }
// Export functions for external use // Export functions for external use
...@@ -1090,17 +1183,15 @@ ...@@ -1090,17 +1183,15 @@
</script> </script>
<!-- <!--
IMPORTANT: When creating or editing custom templates, always maintain these two script tags: IMPORTANT: When creating or editing custom templates, always maintain this script tag:
1. qrc:///qtwebchannel/qwebchannel.js - Required for Qt WebChannel communication qrc:///qtwebchannel/qwebchannel.js - Required for Qt WebChannel communication
2. overlay://overlay.js - Required for overlay functionality and data updates
This script enables communication between the Qt application and the overlay template.
These scripts enable communication between the Qt application and the overlay template. The results.html template handles its own WebChannel setup and does not use overlay.js
Without them, the template will not receive data updates or function properly. to avoid conflicts with the custom overlay elements.
NOTE: When editing this template or creating new ones, never remove these script sources! NOTE: When editing this template, never remove the qwebchannel.js script source!
The overlay:// custom scheme ensures JavaScript files work for both built-in and uploaded templates.
--> -->
<script src="qrc:///qtwebchannel/qwebchannel.js"></script> <script src="qrc:///qtwebchannel/qwebchannel.js"></script>
<script src="overlay://overlay.js"></script>
</body> </body>
</html> </html>
\ No newline at end of file
...@@ -116,6 +116,7 @@ ...@@ -116,6 +116,7 @@
<th>Fighters</th> <th>Fighters</th>
<th>Status</th> <th>Status</th>
<th>Start Time</th> <th>Start Time</th>
<th>End Time</th>
<th>Result</th> <th>Result</th>
<th>Outcomes</th> <th>Outcomes</th>
<th>Actions</th> <th>Actions</th>
...@@ -391,6 +392,7 @@ function renderMatchesTable(matches) { ...@@ -391,6 +392,7 @@ function renderMatchesTable(matches) {
matches.forEach(match => { matches.forEach(match => {
const row = document.createElement('tr'); const row = document.createElement('tr');
const startTimeDisplay = match.start_time ? new Date(match.start_time).toLocaleString() : 'Not set'; const startTimeDisplay = match.start_time ? new Date(match.start_time).toLocaleString() : 'Not set';
const endTimeDisplay = match.end_time ? new Date(match.end_time).toLocaleString() : 'Not set';
const resultDisplay = match.result || 'Not available'; const resultDisplay = match.result || 'Not available';
const outcomesCount = match.outcome_count || 0; const outcomesCount = match.outcome_count || 0;
...@@ -403,6 +405,7 @@ function renderMatchesTable(matches) { ...@@ -403,6 +405,7 @@ function renderMatchesTable(matches) {
</td> </td>
<td>${getStatusBadge(match)}</td> <td>${getStatusBadge(match)}</td>
<td><small class="text-info">${startTimeDisplay}</small></td> <td><small class="text-info">${startTimeDisplay}</small></td>
<td><small class="text-success">${endTimeDisplay}</small></td>
<td><small class="text-muted">${resultDisplay}</small></td> <td><small class="text-muted">${resultDisplay}</small></td>
<td><span class="badge bg-light text-dark">${outcomesCount} outcomes</span></td> <td><span class="badge bg-light text-dark">${outcomesCount} outcomes</span></td>
<td> <td>
...@@ -436,8 +439,9 @@ function updateMatchesTable(matches) { ...@@ -436,8 +439,9 @@ function updateMatchesTable(matches) {
matches.forEach(match => { matches.forEach(match => {
processedMatches.add(match.id); processedMatches.add(match.id);
const startTimeDisplay = match.start_time ? new Date(match.start_time).toLocaleString() : 'Not set'; const startTimeDisplay = match.start_time ? new Date(match.start_time).toLocaleString() : 'Not set';
const endTimeDisplay = match.end_time ? new Date(match.end_time).toLocaleString() : 'Not set';
const resultDisplay = match.result || 'Not available'; const resultDisplay = match.result || 'Not available';
const outcomesCount = match.outcome_count || 0; const outcomesCount = match.outcome_count || 0;
...@@ -450,6 +454,7 @@ function updateMatchesTable(matches) { ...@@ -450,6 +454,7 @@ function updateMatchesTable(matches) {
</td> </td>
<td>${getStatusBadge(match)}</td> <td>${getStatusBadge(match)}</td>
<td><small class="text-info">${startTimeDisplay}</small></td> <td><small class="text-info">${startTimeDisplay}</small></td>
<td><small class="text-success">${endTimeDisplay}</small></td>
<td><small class="text-muted">${resultDisplay}</small></td> <td><small class="text-muted">${resultDisplay}</small></td>
<td><span class="badge bg-light text-dark">${outcomesCount} outcomes</span></td> <td><span class="badge bg-light text-dark">${outcomesCount} outcomes</span></td>
<td> <td>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment