Almost there

parent 7fc35762
......@@ -633,6 +633,14 @@ class UpdatesResponseHandler(ResponseHandler):
if not zip_filename:
continue
# Check if ZIP has already been validated successfully
if match.zip_validation_status == 'valid':
logger.debug(f"ZIP file already validated: {zip_filename} for fixture {fixture_id}")
continue
elif match.zip_validation_status == 'validating':
logger.debug(f"ZIP file validation in progress: {zip_filename} for fixture {fixture_id}")
continue
logger.debug(f"Validating ZIP file: {zip_filename} for fixture {fixture_id}")
zip_path = self.zip_storage_dir / zip_filename
......@@ -649,6 +657,9 @@ class UpdatesResponseHandler(ResponseHandler):
fixture_valid = False
else:
logger.debug(f"ZIP file validation passed: {zip_filename}")
# Mark as validated in database
match.zip_validation_status = 'valid'
session.commit()
if not fixture_valid:
logger.warning(f"Fixture {fixture_id} has invalid/missing ZIP files: {missing_or_invalid_zips}")
......
......@@ -5,13 +5,13 @@ Games thread component for managing game-related operations
import time
import logging
import threading
from datetime import datetime
from datetime import datetime, timedelta
from typing import Optional, Dict, Any, List
from .thread_manager import ThreadedComponent
from .message_bus import MessageBus, Message, MessageType, MessageBuilder
from ..database.manager import DatabaseManager
from ..database.models import MatchModel, MatchStatus, BetDetailModel, MatchOutcomeModel, GameConfigModel
from ..database.models import MatchModel, MatchStatus, BetDetailModel, MatchOutcomeModel, GameConfigModel, ExtractionAssociationModel
logger = logging.getLogger(__name__)
......@@ -198,15 +198,11 @@ class GamesThread(ThreadedComponent):
self._send_response(message, "discarded", f"Fixture {fixture_id} is already completed")
return
# Check if all required ZIP files are available for this fixture
if not self._are_fixture_zips_available(fixture_id):
logger.info(f"Fixture {fixture_id} has missing or invalid ZIP files - discarding START_GAME message")
self._send_response(message, "waiting_for_downloads", f"Fixture {fixture_id} is waiting for ZIP file downloads")
return
# Fixture is not terminal and has all ZIPs, activate it
# Fixture is not terminal, activate it (ZIP validation happens asynchronously)
logger.info(f"Activating provided fixture: {fixture_id}")
self._activate_fixture(fixture_id, message)
# Start ZIP validation asynchronously in background
self._start_async_zip_validation(fixture_id)
return
# No fixture_id provided - check today's fixtures
......@@ -224,27 +220,19 @@ class GamesThread(ThreadedComponent):
# Step 3: Check if there are active fixtures with today's date
active_fixture = self._find_active_today_fixture()
if active_fixture:
# Check if all required ZIP files are available for this fixture
if not self._are_fixture_zips_available(active_fixture):
logger.info(f"Active fixture {active_fixture} has missing or invalid ZIP files - discarding START_GAME message")
self._send_response(message, "waiting_for_downloads", f"Fixture {active_fixture} is waiting for ZIP file downloads")
return
logger.info(f"Found active fixture for today: {active_fixture}")
self._activate_fixture(active_fixture, message)
# Start ZIP validation asynchronously in background
self._start_async_zip_validation(active_fixture)
return
# Step 4: No active fixtures found - initialize new fixture
logger.info("No active fixtures found - initializing new fixture")
new_fixture_id = self._initialize_new_fixture()
if new_fixture_id:
# Check if all required ZIP files are available for the new fixture
if not self._are_fixture_zips_available(new_fixture_id):
logger.info(f"New fixture {new_fixture_id} has missing or invalid ZIP files - discarding START_GAME message")
self._send_response(message, "waiting_for_downloads", f"Fixture {new_fixture_id} is waiting for ZIP file downloads")
return
self._activate_fixture(new_fixture_id, message)
# Start ZIP validation asynchronously in background
self._start_async_zip_validation(new_fixture_id)
else:
logger.warning("Could not initialize new fixture")
self._send_response(message, "error", "Could not initialize new fixture")
......@@ -547,7 +535,7 @@ class GamesThread(ThreadedComponent):
logger.info(f"All matches completed for fixture {self.current_fixture_id} - creating new matches from old completed ones")
# Instead of stopping the game, create 5 new matches from old completed matches
old_matches = self._select_random_completed_matches(5, session)
old_matches = self._select_random_completed_matches_with_fallback(5, self.current_fixture_id, session)
if old_matches:
self._create_matches_from_old_matches(self.current_fixture_id, old_matches, session)
logger.info(f"Created 5 new matches in fixture {self.current_fixture_id} from old completed matches")
......@@ -797,9 +785,27 @@ class GamesThread(ThreadedComponent):
logger.error(f"Failed to check if only non-terminal fixture: {e}")
return False
def _are_fixture_zips_available(self, fixture_id: str) -> bool:
"""Check if all required ZIP files are available for a fixture"""
def _start_async_zip_validation(self, fixture_id: str):
"""Start asynchronous ZIP validation for a fixture without blocking"""
try:
logger.info(f"Starting asynchronous ZIP validation for fixture {fixture_id}")
# Start validation in a background thread
validation_thread = threading.Thread(
target=self._validate_fixture_zips_async,
args=(fixture_id,),
daemon=True
)
validation_thread.start()
except Exception as e:
logger.error(f"Failed to start async ZIP validation for fixture {fixture_id}: {e}")
def _validate_fixture_zips_async(self, fixture_id: str):
"""Validate ZIP files for a fixture asynchronously"""
try:
logger.info(f"Async ZIP validation started for fixture {fixture_id}")
session = self.db_manager.get_session()
try:
# Get all active matches for this fixture that have ZIP files
......@@ -811,48 +817,148 @@ class GamesThread(ThreadedComponent):
if not matches_with_zips:
logger.debug(f"Fixture {fixture_id} has no matches requiring ZIP files")
return True # No ZIP files required, so it's "available"
return
# Check if all required ZIP files exist and are valid
from ..config.settings import get_user_data_dir
from pathlib import Path
logger.info(f"Validating {len(matches_with_zips)} ZIP files for fixture {fixture_id}")
user_data_dir = get_user_data_dir()
missing_or_invalid_zips = []
# Reset any stale 'validating' statuses (older than 5 minutes)
stale_threshold = datetime.utcnow() - timedelta(minutes=5)
stale_count = session.query(MatchModel).filter(
MatchModel.fixture_id == fixture_id,
MatchModel.zip_validation_status == 'validating',
MatchModel.updated_at < stale_threshold
).update({'zip_validation_status': 'pending'})
if stale_count > 0:
logger.info(f"Reset {stale_count} stale 'validating' statuses to 'pending'")
session.commit()
for match in matches_with_zips:
# Check if already validated
if match.zip_validation_status == 'valid':
logger.debug(f"Match {match.match_number} ZIP already validated, skipping")
continue
elif match.zip_validation_status == 'validating':
logger.debug(f"Match {match.match_number} ZIP validation in progress, skipping")
continue
# Start validation for this match
self._validate_single_zip_async(match.id, session)
finally:
session.close()
logger.info(f"Async ZIP validation completed for fixture {fixture_id}")
except Exception as e:
logger.error(f"Async ZIP validation failed for fixture {fixture_id}: {e}")
def _validate_single_zip_async(self, match_id: int, session):
"""Validate a single ZIP file asynchronously"""
try:
match = session.query(MatchModel).filter(MatchModel.id == match_id).first()
if not match:
logger.warning(f"Match {match_id} not found for ZIP validation")
return
# Update status to validating
match.zip_validation_status = 'validating'
session.commit()
# Start validation in separate thread
validation_thread = threading.Thread(
target=self._perform_zip_validation,
args=(match_id,),
daemon=True
)
validation_thread.start()
except Exception as e:
logger.error(f"Failed to start ZIP validation for match {match_id}: {e}")
def _perform_zip_validation(self, match_id: int):
"""Perform actual ZIP validation"""
try:
session = self.db_manager.get_session()
try:
match = session.query(MatchModel).filter(MatchModel.id == match_id).first()
if not match:
logger.warning(f"Match {match_id} not found during ZIP validation")
return
zip_filename = match.zip_filename
if not zip_filename:
continue
logger.warning(f"Match {match_id} has no ZIP filename")
return
from ..config.settings import get_user_data_dir
from pathlib import Path
import zipfile
user_data_dir = get_user_data_dir()
zip_path = user_data_dir / "zip_files" / zip_filename
logger.info(f"Validating ZIP file: {zip_path}")
# Check if file exists
if not zip_path.exists():
missing_or_invalid_zips.append(zip_filename)
logger.debug(f"ZIP file missing for match {match.match_number}: {zip_path}")
else:
# Quick validation - check file size > 0
try:
logger.error(f"ZIP file missing: {zip_path}")
match.zip_validation_status = 'invalid'
session.commit()
return
# Check file size
if zip_path.stat().st_size == 0:
missing_or_invalid_zips.append(zip_filename)
logger.debug(f"ZIP file empty for match {match.match_number}: {zip_path}")
except OSError as e:
missing_or_invalid_zips.append(zip_filename)
logger.debug(f"Cannot access ZIP file for match {match.match_number}: {e}")
if missing_or_invalid_zips:
logger.info(f"Fixture {fixture_id} has missing/invalid ZIP files: {missing_or_invalid_zips}")
return False
else:
logger.debug(f"All required ZIP files available for fixture {fixture_id}")
return True
logger.error(f"ZIP file empty: {zip_path}")
match.zip_validation_status = 'invalid'
session.commit()
return
# Try to open and validate ZIP structure
try:
with zipfile.ZipFile(str(zip_path), 'r') as zip_ref:
# Check for required video files (WIN1.mp4, WIN2.mp4, etc.)
file_list = zip_ref.namelist()
required_videos = ['WIN1.mp4', 'WIN2.mp4', 'DRAW.mp4'] # Basic requirements
found_videos = [f for f in file_list if f.endswith('.mp4')]
if not found_videos:
logger.error(f"ZIP file contains no MP4 files: {zip_path}")
match.zip_validation_status = 'invalid'
session.commit()
return
logger.info(f"ZIP file valid - contains {len(found_videos)} video files: {zip_path}")
except zipfile.BadZipFile as e:
logger.error(f"Invalid ZIP file: {zip_path} - {e}")
match.zip_validation_status = 'invalid'
session.commit()
return
except Exception as e:
logger.error(f"Error validating ZIP file: {zip_path} - {e}")
match.zip_validation_status = 'invalid'
session.commit()
return
# Validation successful
match.zip_validation_status = 'valid'
session.commit()
logger.info(f"ZIP validation successful for match {match_id}: {zip_filename}")
finally:
session.close()
except Exception as e:
logger.error(f"Failed to check ZIP availability for fixture {fixture_id}: {e}")
return False
logger.error(f"ZIP validation failed for match {match_id}: {e}")
try:
session = self.db_manager.get_session()
match = session.query(MatchModel).filter(MatchModel.id == match_id).first()
if match:
match.zip_validation_status = 'invalid'
session.commit()
session.close()
except Exception as update_e:
logger.error(f"Failed to update validation status after error: {update_e}")
def _find_active_today_fixture(self) -> Optional[str]:
"""Find an active fixture with today's date"""
......@@ -917,7 +1023,7 @@ class GamesThread(ThreadedComponent):
# No fixtures with no start_time found - create a new fixture from old completed matches
logger.info("No fixtures with no start_time found - creating new fixture from old completed matches")
old_matches = self._select_random_completed_matches(5, session)
old_matches = self._select_random_completed_matches_with_fallback(5, None, session)
if old_matches:
fixture_id = self._create_new_fixture_from_old_matches(old_matches, session)
if fixture_id:
......@@ -1816,6 +1922,10 @@ class GamesThread(ThreadedComponent):
logger.info(f"DEBUG _set_match_status_and_result: Found match {match_id}, current status='{match.status}', current result='{match.result}'")
match.status = status
match.result = result
# Set end_time when match is completed
if status == 'done':
match.end_time = datetime.utcnow()
logger.info(f"DEBUG _set_match_status_and_result: Set end_time for match {match_id}")
session.commit()
logger.info(f"Updated match {match_id} status to {status} and result to {result}")
......@@ -1956,6 +2066,13 @@ class GamesThread(ThreadedComponent):
selected_result = self._weighted_result_selection(eligible_payouts, session, match_id)
logger.info(f"🎯 [EXTRACTION DEBUG] Selected result: {selected_result}")
# Step 7.1: Log winning outcomes from results associations configurations
winning_outcomes = session.query(ExtractionAssociationModel.outcome_name).filter(
ExtractionAssociationModel.extraction_result == selected_result
).distinct().all()
winning_outcome_names = [outcome.outcome_name for outcome in winning_outcomes]
logger.info(f"🏆 [EXTRACTION DEBUG] Winning outcomes for result '{selected_result}': {winning_outcome_names}")
# Step 8: Update bet results
logger.info(f"💾 [EXTRACTION DEBUG] Step 8: Updating bet results for match {match_id}")
self._update_bet_results(match_id, selected_result, session)
......@@ -2061,6 +2178,11 @@ class GamesThread(ThreadedComponent):
under_over_outcome = 'UNDER' if selected_result == 'UNDER' else 'OVER' if selected_result == 'OVER' else None
logger.info(f"DEBUG _update_bet_results: under_over_outcome = '{under_over_outcome}'")
# DEBUG: Log the current match result before updating
match = session.query(MatchModel).filter_by(id=match_id).first()
if match:
logger.info(f"DEBUG _update_bet_results: Current match.result before formatting = '{match.result}'")
if under_over_outcome:
# UNDER/OVER bet wins
under_over_bets = session.query(BetDetailModel).filter(
......@@ -2116,12 +2238,44 @@ class GamesThread(ThreadedComponent):
).update({'result': 'lost'})
logger.info(f"DEBUG _update_bet_results: Set {losing_count} other bets to lost")
# Update the match result in the matches table
# Update the match result in the matches table with winning outcomes in parentheses
match = session.query(MatchModel).filter_by(id=match_id).first()
if match:
logger.info(f"DEBUG _update_bet_results: Before update - match.result = '{match.result}'")
match.result = selected_result
logger.info(f"Updated match {match_id} result to {selected_result}")
# Get winning outcomes for the selected result
winning_outcomes = session.query(ExtractionAssociationModel.outcome_name).filter(
ExtractionAssociationModel.extraction_result == selected_result
).distinct().all()
winning_outcome_names = [outcome.outcome_name for outcome in winning_outcomes]
logger.info(f"DEBUG _update_bet_results: Found {len(winning_outcomes)} winning outcomes for '{selected_result}': {winning_outcome_names}")
# Include UNDER/OVER if applicable
under_over_result = None
if under_over_outcome:
under_over_result = under_over_outcome
logger.info(f"DEBUG _update_bet_results: UNDER/OVER result detected: '{under_over_result}'")
# Format result to include winning outcomes and UNDER/OVER
result_parts = []
if selected_result not in ['UNDER', 'OVER']:
result_parts.append(selected_result)
logger.info(f"DEBUG _update_bet_results: Added main result '{selected_result}' to result_parts")
if under_over_result:
result_parts.append(under_over_result)
logger.info(f"DEBUG _update_bet_results: Added UNDER/OVER result '{under_over_result}' to result_parts")
if winning_outcome_names:
# Add winning outcomes that are not already included
additional_outcomes = [outcome for outcome in winning_outcome_names if outcome not in result_parts]
if additional_outcomes:
result_parts.extend(additional_outcomes)
logger.info(f"DEBUG _update_bet_results: Added additional outcomes {additional_outcomes} to result_parts")
# Join with " + " separator
formatted_result = " + ".join(result_parts) if result_parts else selected_result
logger.info(f"DEBUG _update_bet_results: Final result_parts = {result_parts}, formatted_result = '{formatted_result}'")
match.result = formatted_result
logger.info(f"Updated match {match_id} result to {formatted_result}")
else:
logger.error(f"DEBUG _update_bet_results: Match {match_id} not found for result update!")
......@@ -2447,7 +2601,7 @@ class GamesThread(ThreadedComponent):
# Get all completed matches (status = 'done', 'cancelled', or 'failed')
# Exclude matches from fixtures that contain "_recycle_" in the fixture name
completed_matches = session.query(MatchModel).filter(
MatchModel.status.in_(['done', 'cancelled', 'failed']),
MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True,
~MatchModel.fixture_id.like('%_recycle_%')
).all()
......@@ -2497,6 +2651,7 @@ class GamesThread(ThreadedComponent):
zip_filename=old_match.zip_filename,
zip_sha1sum=old_match.zip_sha1sum,
zip_upload_status='completed', # Assume ZIP is already available
zip_validation_status='valid', # ZIP already validated from old match
fixture_active_time=int(now.timestamp()),
result=None, # Reset result for new match
end_time=None, # Reset end time for new match
......@@ -2553,6 +2708,7 @@ class GamesThread(ThreadedComponent):
zip_filename=old_match.zip_filename,
zip_sha1sum=old_match.zip_sha1sum,
zip_upload_status='completed', # Assume ZIP is already available
zip_validation_status='valid', # ZIP already validated from old match
fixture_active_time=int(now.timestamp()),
result=None, # Reset result for new match
end_time=None, # Reset end time for new match
......@@ -2607,38 +2763,8 @@ class GamesThread(ThreadedComponent):
).all()
if active_matches:
# Check if all required ZIP files are available for these active matches
# Only consider fixtures "active" if all their ZIP files are downloaded
fixtures_with_missing_zips = set()
for match in active_matches:
if match.zip_filename and match.active_status:
# Check if ZIP file exists and is valid
from ..config.settings import get_user_data_dir
from pathlib import Path
import os
user_data_dir = get_user_data_dir()
zip_path = user_data_dir / "zip_files" / match.zip_filename
if not zip_path.exists():
fixtures_with_missing_zips.add(match.fixture_id)
logger.debug(f"ZIP file missing for match {match.match_number} in fixture {match.fixture_id}: {zip_path}")
else:
# Quick validation - check file size > 0
try:
if zip_path.stat().st_size == 0:
fixtures_with_missing_zips.add(match.fixture_id)
logger.debug(f"ZIP file empty for match {match.match_number} in fixture {match.fixture_id}: {zip_path}")
except OSError as e:
fixtures_with_missing_zips.add(match.fixture_id)
logger.debug(f"Cannot access ZIP file for match {match.match_number} in fixture {match.fixture_id}: {e}")
if fixtures_with_missing_zips:
logger.info(f"Active fixtures found but missing ZIP files for fixtures: {fixtures_with_missing_zips} - waiting for downloads")
return "waiting_for_downloads" # New status indicating waiting for ZIP downloads
else:
logger.debug("All active fixtures have required ZIP files available")
# Active matches found - return "already_active" since validation happens asynchronously
logger.debug("Active matches found - game can be activated")
return "already_active"
# Check if all today's fixtures are in terminal states
......@@ -2696,9 +2822,9 @@ class GamesThread(ThreadedComponent):
last_played_match_id = self._get_last_played_match_id(fixture_id, session)
logger.info(f"🎯 Last played match ID: {last_played_match_id}")
# Select random completed matches, excluding the last played one
old_matches = self._select_random_completed_matches_excluding_last(
minimum_required, last_played_match_id, session
# Select random completed matches using progressive fallback (excludes last 3 matches)
old_matches = self._select_random_completed_matches_with_fallback(
minimum_required, fixture_id, session
)
if old_matches:
......@@ -2720,7 +2846,7 @@ class GamesThread(ThreadedComponent):
# Find the most recently completed match in this fixture
last_match = session.query(MatchModel).filter(
MatchModel.fixture_id == fixture_id,
MatchModel.status.in_(['done', 'cancelled', 'failed']),
MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True
).order_by(MatchModel.updated_at.desc()).first()
......@@ -2741,7 +2867,7 @@ class GamesThread(ThreadedComponent):
# Build query for completed matches
# Exclude matches from fixtures that contain "_recycle_" in the fixture name
query = session.query(MatchModel).filter(
MatchModel.status.in_(['done', 'cancelled', 'failed']),
MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True,
~MatchModel.fixture_id.like('%_recycle_%')
)
......@@ -2775,6 +2901,117 @@ class GamesThread(ThreadedComponent):
logger.error(f"Failed to select random completed matches excluding same fighters: {e}")
return []
def _select_random_completed_matches_with_fallback(self, count: int, fixture_id: Optional[str], session, max_attempts: int = 5) -> List[MatchModel]:
"""Select random matches with progressive fallback - try up to 5 times with relaxed criteria"""
import random
for attempt in range(max_attempts):
try:
if attempt == 0:
# Attempt 1: Exclude last 3 matches (fighters + venue)
exclusion_count = 3
fighters_only = False
logger.info(f"🎯 Attempt {attempt + 1}: Excluding last {exclusion_count} matches (fighters + venue)")
elif attempt == 1:
# Attempt 2: Exclude last 2 matches (fighters + venue)
exclusion_count = 2
fighters_only = False
logger.info(f"🎯 Attempt {attempt + 1}: Excluding last {exclusion_count} matches (fighters + venue)")
elif attempt == 2:
# Attempt 3: Exclude last 1 match (fighters + venue)
exclusion_count = 1
fighters_only = False
logger.info(f"🎯 Attempt {attempt + 1}: Excluding last {exclusion_count} match (fighters + venue)")
elif attempt == 3:
# Attempt 4: Exclude last 1 match (fighters only, ignore venue)
exclusion_count = 1
fighters_only = True
logger.info(f"🎯 Attempt {attempt + 1}: Excluding last {exclusion_count} match (fighters only)")
else:
# Attempt 5: No exclusions
exclusion_count = 0
fighters_only = False
logger.info(f"🎯 Attempt {attempt + 1}: No exclusions (final fallback)")
# Get available matches with current exclusion criteria
available_matches = self._get_available_matches_excluding_recent(
fixture_id, exclusion_count, fighters_only, session
)
if len(available_matches) >= count:
selected = random.sample(available_matches, count)
logger.info(f"✅ Success on attempt {attempt + 1}: selected {len(selected)} matches from {len(available_matches)} available")
return selected
else:
logger.warning(f"⚠️ Attempt {attempt + 1} failed: only {len(available_matches)} matches available, need {count}")
continue
except Exception as e:
logger.error(f"❌ Attempt {attempt + 1} failed with error: {e}")
continue
# Final fallback: return whatever matches are available
logger.warning(f"🚨 All {max_attempts} attempts failed - returning all available matches")
all_matches = session.query(MatchModel).filter(
MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True,
~MatchModel.fixture_id.like('%_recycle_%')
).all()
result = all_matches[:count] if len(all_matches) >= count else all_matches
logger.info(f"🔄 Final fallback: returning {len(result)} matches from {len(all_matches)} total available")
return result
def _get_available_matches_excluding_recent(self, fixture_id: Optional[str], exclude_last_n: int, fighters_only: bool, session) -> List[MatchModel]:
"""Get available matches excluding the last N recent matches in the fixture"""
try:
# If no fixture_id provided (creating new fixture), don't exclude any recent matches
if fixture_id is None:
recent_matches = []
else:
# Get the last N matches in the fixture (by match_number, regardless of completion status)
recent_matches = session.query(MatchModel).filter(
MatchModel.fixture_id == fixture_id,
MatchModel.active_status == True
).order_by(MatchModel.match_number.desc()).limit(exclude_last_n).all()
logger.debug(f"Found {len(recent_matches)} recent matches to exclude: {[f'#{m.match_number}: {m.fighter1_township} vs {m.fighter2_township}' for m in recent_matches]}")
# Build exclusion filters
exclusion_filters = []
for recent_match in recent_matches:
if fighters_only:
# Exclude matches with same fighters only (both directions)
exclusion_filters.append(
~((MatchModel.fighter1_township == recent_match.fighter1_township) &
(MatchModel.fighter2_township == recent_match.fighter2_township)) &
~((MatchModel.fighter1_township == recent_match.fighter2_township) &
(MatchModel.fighter2_township == recent_match.fighter1_township))
)
else:
# Exclude matches with same fighters AND venue
exclusion_filters.append(
~((MatchModel.fighter1_township == recent_match.fighter1_township) &
(MatchModel.fighter2_township == recent_match.fighter2_township) &
(MatchModel.venue_kampala_township == recent_match.venue_kampala_township))
)
# Query available matches with exclusions
query = session.query(MatchModel).filter(
MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True,
~MatchModel.fixture_id.like('%_recycle_%'),
*exclusion_filters
)
available_matches = query.all()
logger.debug(f"Found {len(available_matches)} matches available after exclusions")
return available_matches
except Exception as e:
logger.error(f"Failed to get available matches excluding recent: {e}")
return []
def _determine_new_match_status(self, fixture_id: str, session) -> str:
"""Determine the status for new matches based on system state"""
try:
......@@ -2805,6 +3042,7 @@ class GamesThread(ThreadedComponent):
logger.error(f"Failed to determine new match status: {e}")
return 'scheduled' # Default fallback
def _cleanup_previous_match_extractions(self):
"""Clean up all previous unzipped match directories from temporary location"""
try:
......
......@@ -526,7 +526,7 @@ class MatchTimerComponent(ThreadedComponent):
# Find the most recently completed match in this fixture
last_match = session.query(MatchModel).filter(
MatchModel.fixture_id == fixture_id,
MatchModel.status.in_(['done', 'cancelled', 'failed']),
MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True
).order_by(MatchModel.updated_at.desc()).first()
......@@ -547,7 +547,7 @@ class MatchTimerComponent(ThreadedComponent):
# Build query for completed matches
# Exclude matches from fixtures that contain "_recycle_" in the fixture name
query = session.query(MatchModel).filter(
MatchModel.status.in_(['done', 'cancelled', 'failed']),
MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True,
~MatchModel.fixture_id.like('%_recycle_%')
)
......
......@@ -2316,6 +2316,50 @@ class Migration_029_ChangeMatchNumberToUniqueWithinFixture(DatabaseMigration):
logger.error(f"Failed to revert match_number constraint: {e}")
return False
class Migration_030_AddZipValidationStatus(DatabaseMigration):
"""Add zip_validation_status field to matches table"""
def __init__(self):
super().__init__("030", "Add zip_validation_status field to matches table")
def up(self, db_manager) -> bool:
"""Add zip_validation_status column to matches table"""
try:
with db_manager.engine.connect() as conn:
# Check if zip_validation_status column already exists
result = conn.execute(text("PRAGMA table_info(matches)"))
columns = [row[1] for row in result.fetchall()]
if 'zip_validation_status' not in columns:
# Add zip_validation_status column with default value 'pending'
conn.execute(text("""
ALTER TABLE matches
ADD COLUMN zip_validation_status VARCHAR(20) DEFAULT 'pending'
"""))
# Add index for zip_validation_status column
conn.execute(text("""
CREATE INDEX IF NOT EXISTS ix_matches_zip_validation_status
ON matches(zip_validation_status)
"""))
conn.commit()
logger.info("zip_validation_status column added to matches table")
else:
logger.info("zip_validation_status column already exists in matches table")
return True
except Exception as e:
logger.error(f"Failed to add zip_validation_status field to matches: {e}")
return False
def down(self, db_manager) -> bool:
"""Remove zip_validation_status column - SQLite doesn't support DROP COLUMN easily"""
logger.warning("SQLite doesn't support DROP COLUMN - zip_validation_status column will remain")
return True
# Registry of all migrations in order
MIGRATIONS: List[DatabaseMigration] = [
Migration_001_InitialSchema(),
......@@ -2347,6 +2391,7 @@ MIGRATIONS: List[DatabaseMigration] = [
Migration_027_AddDefaultIntroTemplatesConfig(),
Migration_028_AddFixtureRefreshIntervalConfig(),
Migration_029_ChangeMatchNumberToUniqueWithinFixture(),
Migration_030_AddZipValidationStatus(),
]
......
......@@ -472,6 +472,7 @@ class MatchModel(BaseModel):
Index('ix_matches_file_sha1sum', 'file_sha1sum'),
Index('ix_matches_zip_sha1sum', 'zip_sha1sum'),
Index('ix_matches_zip_upload_status', 'zip_upload_status'),
Index('ix_matches_zip_validation_status', 'zip_validation_status'),
Index('ix_matches_created_by', 'created_by'),
Index('ix_matches_fixture_active_time', 'fixture_active_time'),
Index('ix_matches_composite', 'active_status', 'zip_upload_status', 'created_at'),
......@@ -504,6 +505,7 @@ class MatchModel(BaseModel):
zip_sha1sum = Column(String(255), comment='SHA1 checksum of ZIP file')
zip_upload_status = Column(String(20), default='pending', comment='Upload status: pending, uploading, completed, failed')
zip_upload_progress = Column(Float, default=0.0, comment='Upload progress percentage (0.0-100.0)')
zip_validation_status = Column(String(20), default='pending', comment='Validation status: pending, validating, valid, invalid, failed')
# User tracking
created_by = Column(Integer, ForeignKey('users.id'), comment='User who created this record')
......
......@@ -123,9 +123,11 @@ class OverlayWebChannel(QObject):
# Debug original data before cleaning
logger.debug(f"OverlayWebChannel received data: {data}, type: {type(data)}")
logger.debug(f"OverlayWebChannel data keys: {list(data.keys()) if isinstance(data, dict) else 'not dict'}")
# Clean data to remove null/undefined values before sending to JavaScript
cleaned_data = self._clean_data(data)
logger.debug(f"OverlayWebChannel cleaned data: {cleaned_data}")
if not cleaned_data:
logger.debug("All data properties were null/undefined, skipping JavaScript update")
return
......@@ -139,6 +141,7 @@ class OverlayWebChannel(QObject):
self.overlay_data.update(cleaned_data)
# Add additional validation just before emit
if cleaned_data and isinstance(cleaned_data, dict) and any(v is not None for v in cleaned_data.values()):
logger.debug(f"OverlayWebChannel emitting dataUpdated signal with: {cleaned_data}")
self.dataUpdated.emit(cleaned_data)
data_keys = list(cleaned_data.keys()) if isinstance(cleaned_data, dict) else []
logger.debug(f"Signal emitted successfully with {len(cleaned_data)} data items: {data_keys}")
......@@ -155,8 +158,11 @@ class OverlayWebChannel(QObject):
logger.debug(f"OverlayWebChannel: Skipping null/empty property '{key}'")
continue
# Keep dicts and lists as is (Qt WebChannel can handle them)
if isinstance(value, (dict, list)):
cleaned_data[key] = value
# Convert non-null values to appropriate types
if isinstance(value, str):
elif isinstance(value, str):
cleaned_data[key] = value
elif isinstance(value, bool):
cleaned_data[key] = value
......@@ -194,6 +200,19 @@ class OverlayWebChannel(QObject):
logger.info(f"[JS CONSOLE.LOG] {message}")
print(f"[JS CONSOLE.LOG] {message}")
@pyqtSlot(result=str)
def getCurrentData(self) -> str:
"""Provide current overlay data to JavaScript via WebChannel"""
try:
logger.debug("OverlayWebChannel: getCurrentData called")
# Return current overlay data
current_data = dict(self.overlay_data)
logger.debug(f"OverlayWebChannel: Returning current data: {current_data}")
return json.dumps(current_data)
except Exception as e:
logger.error(f"OverlayWebChannel: Failed to get current data: {e}")
return json.dumps({})
@pyqtSlot(result=str)
def getFixtureData(self) -> str:
"""Provide fixture data to JavaScript via WebChannel"""
......@@ -258,6 +277,7 @@ class OverlayWebChannel(QObject):
"""Provide winning outcomes data for a match to JavaScript via WebChannel"""
try:
logger.info(f"QtWebChannel: Getting winning outcomes for match {match_id}")
logger.debug(f"QtWebChannel: db_manager available: {self.db_manager is not None}")
# Get winning outcomes from database
winning_outcomes = self._get_winning_outcomes_from_database(match_id)
......@@ -267,6 +287,8 @@ class OverlayWebChannel(QObject):
except Exception as e:
logger.error(f"QtWebChannel: Failed to get winning outcomes for match {match_id}: {e}")
import traceback
logger.error(f"QtWebChannel: Full traceback: {traceback.format_exc()}")
return json.dumps([])
def _get_fixture_data_from_games_thread(self) -> Optional[List[Dict[str, Any]]]:
......@@ -325,7 +347,7 @@ class OverlayWebChannel(QObject):
MatchModel.start_time.isnot(None),
MatchModel.start_time >= datetime.combine(today, datetime.min.time()),
MatchModel.start_time < datetime.combine(today, datetime.max.time()),
MatchModel.status.notin_(['done', 'cancelled', 'failed', 'paused']),
MatchModel.status.notin_(['done', 'end', 'cancelled', 'failed', 'paused']),
MatchModel.active_status == True
).order_by(MatchModel.start_time.asc()).limit(5).all()
......@@ -414,14 +436,18 @@ class OverlayWebChannel(QObject):
from ..database.models import BetDetailModel, MatchModel
from sqlalchemy import func
logger.debug(f"QtWebChannel: _get_winning_outcomes_from_database called for match {match_id}")
# Use the database manager passed to this channel
if not self.db_manager:
logger.error("Database manager not initialized")
logger.error("QtWebChannel: Database manager not initialized")
return []
logger.debug("QtWebChannel: Getting database session")
session = self.db_manager.get_session()
try:
logger.debug(f"QtWebChannel: Executing query for match {match_id}")
# Get aggregated winning amounts by outcome for this match
winning_outcomes_query = session.query(
BetDetailModel.outcome,
......@@ -433,6 +459,8 @@ class OverlayWebChannel(QObject):
MatchModel.active_status == True
).group_by(BetDetailModel.outcome).all()
logger.debug(f"QtWebChannel: Query returned {len(winning_outcomes_query)} results")
# Convert to dictionary format for JavaScript
outcomes_data = []
for outcome_name, total_amount in winning_outcomes_query:
......@@ -442,14 +470,17 @@ class OverlayWebChannel(QObject):
}
outcomes_data.append(outcome_data)
logger.debug(f"Retrieved {len(outcomes_data)} winning outcomes for match {match_id}")
logger.debug(f"QtWebChannel: Retrieved {len(outcomes_data)} winning outcomes for match {match_id}: {outcomes_data}")
return outcomes_data
finally:
session.close()
logger.debug("QtWebChannel: Database session closed")
except Exception as e:
logger.error(f"Failed to get winning outcomes from database: {e}")
logger.error(f"QtWebChannel: Failed to get winning outcomes from database: {e}")
import traceback
logger.error(f"QtWebChannel: Full traceback: {traceback.format_exc()}")
return []
......@@ -3418,7 +3449,8 @@ class QtVideoPlayer(QObject):
# Validate and clean template_data before sending to overlay
cleaned_data = self._clean_overlay_data(data_to_send)
if cleaned_data: # Only send if we have valid data after cleaning
self.window._update_overlay_safe(overlay_view, cleaned_data)
# Send data after a short delay to ensure page has loaded
QTimer.singleShot(500, lambda: self.window._update_overlay_safe(overlay_view, cleaned_data))
else:
logger.debug("Template data contained only null/undefined values, skipping update")
......@@ -3769,32 +3801,45 @@ class QtVideoPlayer(QObject):
def _get_database_manager(self):
"""Get database manager from message bus"""
try:
logger.info("QtPlayer: DEBUG - Getting database manager")
if hasattr(self, '_message_bus') and self._message_bus:
logger.info("QtPlayer: DEBUG - Message bus available, trying to get db_manager from web_dashboard")
# Try to get db_manager from web_dashboard component
try:
web_dashboard_queue = self._message_bus._queues.get('web_dashboard')
logger.info(f"QtPlayer: DEBUG - Web dashboard queue: {web_dashboard_queue}")
if web_dashboard_queue and hasattr(web_dashboard_queue, 'component'):
component = web_dashboard_queue.component
logger.info(f"QtPlayer: DEBUG - Web dashboard component: {component}")
if hasattr(component, 'db_manager'):
logger.debug("QtVideoPlayer: Got db_manager from web_dashboard component")
logger.info("QtPlayer: DEBUG - Got db_manager from web_dashboard component")
return component.db_manager
else:
logger.info("QtPlayer: DEBUG - Web dashboard component has no db_manager attribute")
else:
logger.info("QtPlayer: DEBUG - No web dashboard queue or component")
except Exception as e:
logger.debug(f"QtVideoPlayer: Could not get db_manager from message bus: {e}")
logger.error(f"QtPlayer: DEBUG - Could not get db_manager from message bus: {e}")
logger.info("QtPlayer: DEBUG - Falling back to create database manager directly")
# Fallback: create database manager directly
from ..config.settings import get_user_data_dir
from ..database.manager import DatabaseManager
db_path = get_user_data_dir() / "mbetterclient.db"
logger.debug(f"QtVideoPlayer: Creating database manager directly: {db_path}")
logger.info(f"QtPlayer: DEBUG - Creating database manager directly: {db_path}")
db_manager = DatabaseManager(str(db_path))
logger.info("QtPlayer: DEBUG - Database manager created, initializing")
if db_manager.initialize():
logger.info("QtPlayer: DEBUG - Database manager initialized successfully")
return db_manager
else:
logger.warning("QtVideoPlayer: Failed to initialize database manager")
logger.warning("QtPlayer: DEBUG - Failed to initialize database manager")
return None
except Exception as e:
logger.error(f"QtVideoPlayer: Failed to get database manager: {e}")
logger.error(f"QtPlayer: DEBUG - Failed to get database manager: {e}")
import traceback
logger.error(f"QtPlayer: DEBUG - Full traceback: {traceback.format_exc()}")
return None
def _unzip_match_zip_file(self, match_id: int):
......@@ -4092,17 +4137,40 @@ class QtVideoPlayer(QObject):
logger.info(f"Playing result video: {result}.mp4 for match {match_id}")
# Use results overlay template
# Get match details from database
match_details = self._get_match_details_for_results(match_id)
if not match_details:
logger.error(f"Could not get match details for match {match_id}")
return
# Determine under/over result if applicable
under_over_result = None
main_result = result
if result in ['UNDER', 'OVER']:
under_over_result = result
main_result = None # No separate main result
elif result not in ['UNDER', 'OVER']:
# For main results, check if there's a separate under/over from database
# This is a simplified approach - in practice, you'd need to determine this from the match outcome
pass
# Prepare overlay data for results template
overlay_data = {
'outcome': main_result,
'result': main_result, # For backwards compatibility
'under_over_result': under_over_result,
'match': {
'fighter1_township': match_details.get('fighter1_township', 'Fighter 1'),
'fighter2_township': match_details.get('fighter2_township', 'Fighter 2'),
'venue': match_details.get('venue', 'Venue')
},
'match_id': match_id,
'fixture_id': fixture_id,
'result': result,
'fighter1': 'Fighter 1', # TODO: Get from database
'fighter2': 'Fighter 2', # TODO: Get from database
'venue': 'Venue', # TODO: Get from database
'is_result_video': True
}
logger.info(f"Sending results data to overlay: {overlay_data}")
# Play the result video with results overlay template
self.window.play_video(
video_path,
......@@ -4672,3 +4740,106 @@ class QtVideoPlayer(QObject):
import traceback
logger.error(f"QtPlayer: Full traceback: {traceback.format_exc()}")
return None
def _get_match_details_for_results(self, match_id: int) -> Optional[Dict[str, Any]]:
"""Get match details for results overlay"""
try:
from ..database.models import MatchModel
logger.info(f"QtPlayer: DEBUG - Getting match details for match {match_id}")
# First try to get results from game thread (for matches that aren't done yet)
logger.info("QtPlayer: DEBUG - Trying to get match results from game thread")
game_thread_results = self._get_match_results_from_game_thread(match_id)
if game_thread_results:
logger.info(f"QtPlayer: DEBUG - Got match results from game thread: {game_thread_results}")
return game_thread_results
logger.info("QtPlayer: DEBUG - No results from game thread, trying database")
# Get database manager
logger.info("QtPlayer: DEBUG - Getting database manager")
db_manager = self._get_database_manager()
if not db_manager:
logger.error("QtPlayer: DEBUG - Database manager not available for match details, using defaults")
return {
'fighter1_township': 'Fighter 1',
'fighter2_township': 'Fighter 2',
'venue': 'Venue'
}
logger.info("QtPlayer: DEBUG - Database manager available, getting session")
session = db_manager.get_session()
logger.info("QtPlayer: DEBUG - Database session obtained")
try:
logger.info(f"QtPlayer: DEBUG - Executing query for match {match_id}")
# Get match details
match = session.query(MatchModel).filter_by(id=match_id).first()
logger.info(f"QtPlayer: DEBUG - Query executed, match object: {match}")
if not match:
logger.warning(f"QtPlayer: DEBUG - Match {match_id} not found in database, using defaults")
return {
'fighter1_township': 'Fighter 1',
'fighter2_township': 'Fighter 2',
'venue': 'Venue'
}
logger.info(f"QtPlayer: DEBUG - Match found, extracting details")
match_details = {
'fighter1_township': match.fighter1_township or 'Fighter 1',
'fighter2_township': match.fighter2_township or 'Fighter 2',
'venue': match.venue_kampala_township or 'Venue'
}
logger.info(f"QtPlayer: DEBUG - Retrieved match details for match {match_id}: {match_details}")
return match_details
finally:
logger.info("QtPlayer: DEBUG - Closing database session")
session.close()
except Exception as e:
logger.error(f"QtPlayer: DEBUG - Failed to get match details for results: {e}, using defaults")
import traceback
logger.error(f"QtPlayer: DEBUG - Full traceback: {traceback.format_exc()}")
return {
'fighter1_township': 'Fighter 1',
'fighter2_township': 'Fighter 2',
'venue': 'Venue'
}
def _get_match_results_from_game_thread(self, match_id: int) -> Optional[Dict[str, Any]]:
"""Get match results from game thread temporary storage"""
try:
logger.info(f"QtPlayer: DEBUG - Requesting match results from game thread for match {match_id}")
# Send message to game thread to get match results
if hasattr(self, '_message_bus') and self._message_bus:
from ..core.message_bus import Message, MessageType
request_message = Message(
type=MessageType.CUSTOM,
sender="qt_player",
recipient="games_thread",
data={
"request": "get_match_results",
"match_id": match_id
}
)
logger.info("QtPlayer: DEBUG - Sending request to game thread")
# For synchronous response, we can't easily wait, so this might not work
# The game thread would need to respond asynchronously
# For now, return None and rely on database
logger.info("QtPlayer: DEBUG - Game thread request sent (async), returning None for now")
return None
else:
logger.info("QtPlayer: DEBUG - No message bus available for game thread request")
return None
except Exception as e:
logger.error(f"QtPlayer: DEBUG - Failed to get match results from game thread: {e}")
return None
......@@ -3,6 +3,7 @@
<head>
<meta charset="utf-8">
<title>Results Overlay</title>
<script src="qrc:///qtwebchannel/qwebchannel.js"></script>
<style>
* {
margin: 0;
......@@ -53,9 +54,14 @@
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3);
backdrop-filter: blur(10px);
border: 2px solid rgba(255, 255, 255, 0.1);
opacity: 1; /* Always visible */
opacity: 0; /* Initially transparent */
padding-bottom: 50px;
display: flex; /* Always visible, content hidden instead */
display: flex;
transition: opacity 0.5s ease-out;
}
.results-panel.visible {
opacity: 1;
}
.results-content {
......@@ -738,7 +744,7 @@
<!-- Combined Result Display -->
<div class="combined-result-display" id="combinedResultDisplay">
<div class="combined-result-text" id="combinedResultText">
<span id="mainResult">WIN1</span> / <span id="underOverResult">UNDER</span>
<span id="mainResult">WIN1</span> - <span id="underOverResult">UNDER</span>
</div>
</div>
......@@ -767,6 +773,11 @@
let contentDelayTimer = null;
let resultsTimer = null;
// Define showLoadingState for compatibility (results template doesn't use loading state)
function showLoadingState() {
console.log('DEBUG: showLoadingState called (no-op for results template)');
}
// Outcome categories for styling
const outcomeCategories = {
'WIN1': 'win1',
......@@ -786,13 +797,13 @@
// Function to update overlay data (called by Qt WebChannel)
function updateOverlayData(data) {
console.log('Received overlay data:', data);
console.log('DEBUG: updateOverlayData called with data:', data);
overlayData = data || {};
// Only update if we have valid data
if (data && (data.outcome || data.result)) {
let result = data.outcome || data.result;
console.log('Processing result:', result);
console.log('DEBUG: Processing valid result:', result);
// Always treat the main result as the primary outcome
currentMainResult = result;
......@@ -800,7 +811,7 @@
// Check if under/over result is provided separately
if (data.under_over_result) {
currentUnderOverResult = data.under_over_result;
console.log('Under/over result provided separately:', currentUnderOverResult);
console.log('DEBUG: Under/over result provided separately:', currentUnderOverResult);
} else {
// Fallback: determine if main result is under/over
if (result === 'UNDER' || result === 'OVER') {
......@@ -813,18 +824,33 @@
if (data.match) {
currentMatch = data.match;
console.log('DEBUG: Match data received:', data.match);
}
if (data.match_id) {
console.log('DEBUG: Match ID received:', data.match_id);
// Fetch winning outcomes for this match
fetchWinningOutcomes(data.match_id);
// Check if results have already been shown for this match (handles overlay reloads)
const resultsShownKey = 'results_shown_' + data.match_id;
const alreadyShown = sessionStorage.getItem(resultsShownKey) === 'true';
console.log('DEBUG: Results already shown for match?', alreadyShown);
if (alreadyShown) {
console.log('DEBUG: Results already shown for this match, displaying immediately');
contentVisible = true;
showResultsPanel();
showResultsContent();
return; // Don't prepare animation again
}
}
// Prepare data but don't start animation yet - wait for video to actually start playing
console.log('Results data received, preparing animation data');
// Prepare data and start 5-second timer to show results
console.log('DEBUG: Results data received, preparing animation data');
prepareResultsAnimation();
} else {
console.log('DEBUG: No valid data received, showing loading state');
// No valid data, show loading state
showLoadingState();
}
......@@ -835,9 +861,6 @@
if (animationStarted) return;
animationStarted = true;
// Show results panel immediately (but content hidden)
showResultsPanel();
// Update fighters display
updateFightersDisplay();
......@@ -847,23 +870,38 @@
// Update winning bets display
updateWinningBetsDisplay();
// Content will be shown after 5 seconds when video starts
// Show results after 5 seconds from data receipt
setTimeout(() => {
if (!contentVisible) {
contentVisible = true;
console.log('Showing results after 5 seconds from data received');
// Mark results as shown in sessionStorage
if (overlayData && overlayData.match_id) {
sessionStorage.setItem('results_shown_' + overlayData.match_id, 'true');
}
showResultsPanel();
showResultsContent();
}
}, 5000);
}
// Fetch winning outcomes for the match
function fetchWinningOutcomes(matchId) {
console.log('Fetching winning outcomes for match:', matchId);
console.log('DEBUG: fetchWinningOutcomes called for match:', matchId);
// Use Qt WebChannel to request winning outcomes data
if (window.overlay && window.overlay.getWinningOutcomes) {
console.log('DEBUG: Qt WebChannel available, requesting winning outcomes');
try {
const outcomesJson = window.overlay.getWinningOutcomes(matchId);
const outcomesData = JSON.parse(outcomesJson);
console.log('Received winning outcomes:', outcomesData);
console.log('DEBUG: Received winning outcomes:', outcomesData);
winningOutcomes = outcomesData || [];
updateWinningBetsDisplay();
} catch (error) {
console.error('Failed to get winning outcomes:', error);
console.error('DEBUG: Failed to get winning outcomes:', error);
// Fallback: show sample data for testing
winningOutcomes = [
{ outcome: 'WIN1', amount: 125.00 },
......@@ -873,7 +911,7 @@
updateWinningBetsDisplay();
}
} else {
console.warn('Qt WebChannel not available for fetching winning outcomes');
console.warn('DEBUG: Qt WebChannel not available for fetching winning outcomes');
// Fallback: show sample data for testing
winningOutcomes = [
{ outcome: 'WIN1', amount: 125.00 },
......@@ -884,19 +922,31 @@
}
}
// Show results panel (always visible now)
// Show results panel with fade-in animation
function showResultsPanel() {
console.log('DEBUG: showResultsPanel called');
const resultsPanel = document.getElementById('resultsPanel');
resultsPanel.style.display = 'flex';
if (resultsPanel) {
console.log('DEBUG: Adding visible class to results panel');
resultsPanel.classList.add('visible');
} else {
console.log('DEBUG: ERROR - resultsPanel element not found');
}
}
// Show results content with animation after delay
function showResultsContent() {
console.log('DEBUG: showResultsContent called');
const resultsContent = document.getElementById('resultsContent');
if (resultsContent) {
console.log('DEBUG: Adding visible class to results content');
resultsContent.classList.add('visible');
} else {
console.log('DEBUG: ERROR - resultsContent element not found');
}
}
// Handle video position changes to detect when video starts playing and reaches 5 seconds
// Handle video position changes (for logging/debugging purposes)
function handlePositionChange(position, duration) {
// Check if video has started playing (position > 0)
if (position > 0 && !videoStarted) {
......@@ -904,22 +954,8 @@
console.log('Video started playing at position:', position);
}
// Check if video has been playing for at least 5 seconds
if (videoStarted && position >= 5 && !contentVisible) {
contentVisible = true;
console.log('Video has been playing for 5+ seconds, showing results content');
// Clear any existing timers
if (resultsTimer) {
clearTimeout(resultsTimer);
}
if (contentDelayTimer) {
clearTimeout(contentDelayTimer);
}
// Show results content with animation
showResultsContent();
}
// Log position for debugging
console.log('Video position:', position, 'duration:', duration);
}
// Update fighters display
......@@ -1040,48 +1076,105 @@
// Initialize when DOM is loaded
document.addEventListener('DOMContentLoaded', function() {
console.log('Results overlay initialized');
console.log('DEBUG: Results overlay DOM loaded and initialized');
console.log('DEBUG: sessionStorage available:', typeof sessionStorage !== 'undefined');
// Always show results panel with default content
showResultsPanel();
// Setup WebChannel communication
setupWebChannel();
// Timer will start when video begins playing (detected via position changes)
console.log('Waiting for video to start playing before showing results content');
// Panel and content will be shown after 5 seconds when video starts playing
console.log('DEBUG: Waiting for results data to be received');
// Fallback: show test results after 5 seconds if no data received
setTimeout(() => {
if (!contentVisible) {
console.log('DEBUG: Fallback - No data received after 5 seconds, showing test results');
// Set test data
currentMainResult = 'WIN1';
currentUnderOverResult = 'OVER';
currentMatch = { fighter1_township: 'Test Fighter 1', fighter2_township: 'Test Fighter 2' };
winningOutcomes = [
{ outcome: 'WIN1', amount: 100.00 },
{ outcome: 'OVER', amount: 50.00 }
];
contentVisible = true;
showResultsPanel();
showResultsContent();
updateFightersDisplay();
updateCombinedResultDisplay();
updateWinningBetsDisplay();
}
}, 5000);
});
// Qt WebChannel initialization (when available)
if (typeof QWebChannel !== 'undefined') {
new QWebChannel(qt.webChannelTransport, function(channel) {
console.log('WebChannel initialized for results overlay');
// Setup WebChannel communication (similar to fixtures.html)
function setupWebChannel() {
// Check if WebChannel is already set up by overlay.js
if (window.overlay) {
console.log('DEBUG: WebChannel already set up by overlay.js');
// Connect to overlay object if available
if (channel.objects.overlay) {
window.overlay = channel.objects.overlay;
// Test WebChannel
if (window.overlay && window.overlay.log) {
window.overlay.log('TEST: WebChannel connection successful');
}
// Connect dataChanged signal
window.overlay.dataChanged.connect(function(data) {
// Listen for data updates from Python
if (window.overlay.dataUpdated) {
window.overlay.dataUpdated.connect(function(data) {
console.log('DEBUG: Received data update from Python:', data);
updateOverlayData(data);
});
}
// Connect positionChanged signal
if (window.overlay.positionChanged) {
console.log('DEBUG: Connecting positionChanged signal');
window.overlay.positionChanged.connect(function(position, duration) {
if (position !== null && duration !== null) {
handlePositionChange(position, duration);
} else {
console.warn('positionChanged signal received null/undefined parameters, skipping');
console.warn('DEBUG: positionChanged signal received null/undefined parameters');
}
});
}
return;
}
// Get initial data
if (window.overlay.getCurrentData) {
window.overlay.getCurrentData(function(data) {
// Fallback: setup WebChannel if overlay.js didn't do it
if (typeof qt !== 'undefined' && qt.webChannelTransport) {
try {
new QWebChannel(qt.webChannelTransport, function(channel) {
console.log('DEBUG: WebChannel connected successfully (fallback)');
// Connect to overlay object
window.overlay = channel.objects.overlay;
// Listen for data updates from Python
if (window.overlay && window.overlay.dataUpdated) {
window.overlay.dataUpdated.connect(function(data) {
console.log('DEBUG: Received data update from Python:', data);
updateOverlayData(data);
});
}
// Connect positionChanged signal
if (window.overlay.positionChanged) {
console.log('DEBUG: Connecting positionChanged signal');
window.overlay.positionChanged.connect(function(position, duration) {
if (position !== null && duration !== null) {
handlePositionChange(position, duration);
} else {
console.warn('DEBUG: positionChanged signal received null/undefined parameters');
}
});
}
});
} catch (e) {
console.log('DEBUG: Failed to setup WebChannel:', e);
}
} else {
console.log('DEBUG: WebChannel not available');
}
}
// Export functions for external use
......@@ -1090,17 +1183,15 @@
</script>
<!--
IMPORTANT: When creating or editing custom templates, always maintain these two script tags:
1. qrc:///qtwebchannel/qwebchannel.js - Required for Qt WebChannel communication
2. overlay://overlay.js - Required for overlay functionality and data updates
IMPORTANT: When creating or editing custom templates, always maintain this script tag:
qrc:///qtwebchannel/qwebchannel.js - Required for Qt WebChannel communication
These scripts enable communication between the Qt application and the overlay template.
Without them, the template will not receive data updates or function properly.
This script enables communication between the Qt application and the overlay template.
The results.html template handles its own WebChannel setup and does not use overlay.js
to avoid conflicts with the custom overlay elements.
NOTE: When editing this template or creating new ones, never remove these script sources!
The overlay:// custom scheme ensures JavaScript files work for both built-in and uploaded templates.
NOTE: When editing this template, never remove the qwebchannel.js script source!
-->
<script src="qrc:///qtwebchannel/qwebchannel.js"></script>
<script src="overlay://overlay.js"></script>
</body>
</html>
\ No newline at end of file
......@@ -116,6 +116,7 @@
<th>Fighters</th>
<th>Status</th>
<th>Start Time</th>
<th>End Time</th>
<th>Result</th>
<th>Outcomes</th>
<th>Actions</th>
......@@ -391,6 +392,7 @@ function renderMatchesTable(matches) {
matches.forEach(match => {
const row = document.createElement('tr');
const startTimeDisplay = match.start_time ? new Date(match.start_time).toLocaleString() : 'Not set';
const endTimeDisplay = match.end_time ? new Date(match.end_time).toLocaleString() : 'Not set';
const resultDisplay = match.result || 'Not available';
const outcomesCount = match.outcome_count || 0;
......@@ -403,6 +405,7 @@ function renderMatchesTable(matches) {
</td>
<td>${getStatusBadge(match)}</td>
<td><small class="text-info">${startTimeDisplay}</small></td>
<td><small class="text-success">${endTimeDisplay}</small></td>
<td><small class="text-muted">${resultDisplay}</small></td>
<td><span class="badge bg-light text-dark">${outcomesCount} outcomes</span></td>
<td>
......@@ -438,6 +441,7 @@ function updateMatchesTable(matches) {
processedMatches.add(match.id);
const startTimeDisplay = match.start_time ? new Date(match.start_time).toLocaleString() : 'Not set';
const endTimeDisplay = match.end_time ? new Date(match.end_time).toLocaleString() : 'Not set';
const resultDisplay = match.result || 'Not available';
const outcomesCount = match.outcome_count || 0;
......@@ -450,6 +454,7 @@ function updateMatchesTable(matches) {
</td>
<td>${getStatusBadge(match)}</td>
<td><small class="text-info">${startTimeDisplay}</small></td>
<td><small class="text-success">${endTimeDisplay}</small></td>
<td><small class="text-muted">${resultDisplay}</small></td>
<td><span class="badge bg-light text-dark">${outcomesCount} outcomes</span></td>
<td>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment