Almost there...

parent 398d40ba
...@@ -753,18 +753,17 @@ class MbetterClientApplication: ...@@ -753,18 +753,17 @@ class MbetterClientApplication:
logger.error(f"Failed to handle log entry: {e}") logger.error(f"Failed to handle log entry: {e}")
def _handle_start_game_message(self, message: Message): def _handle_start_game_message(self, message: Message):
"""Handle START_GAME message - only cancel the start-timer as its job is done""" """Handle START_GAME message - timer will be cancelled when game actually starts successfully"""
try: try:
# The core should only cancel its start-timer when START_GAME is received # The timer will be cancelled when the game status indicates successful start
# The actual START_GAME processing is done by games_thread # The actual START_GAME processing is done by games_thread
logger.info("START_GAME message received - cancelling command line start-timer as it has completed its job") logger.info("START_GAME message received - timer will be managed based on game start outcome")
self._cancel_game_timer()
except Exception as e: except Exception as e:
logger.error(f"Failed to handle START_GAME message: {e}") logger.error(f"Failed to handle START_GAME message: {e}")
def _handle_game_status_response(self, message: Message): def _handle_game_status_response(self, message: Message):
"""Handle GAME_STATUS responses, particularly for timer-initiated START_GAME failures""" """Handle GAME_STATUS responses, managing timer based on game start outcome"""
try: try:
status = message.data.get("status", "unknown") status = message.data.get("status", "unknown")
sender = message.sender sender = message.sender
...@@ -776,7 +775,7 @@ class MbetterClientApplication: ...@@ -776,7 +775,7 @@ class MbetterClientApplication:
return return
# Check if this is a failure response that should trigger timer restart # Check if this is a failure response that should trigger timer restart
failure_statuses = ["waiting_for_downloads", "discarded", "error", "no_matches"] failure_statuses = ["waiting_for_downloads", "discarded", "error", "no_matches", "no_fixtures_available"]
if status in failure_statuses: if status in failure_statuses:
logger.info(f"START_GAME failed with status '{status}' from {sender} - restarting timer") logger.info(f"START_GAME failed with status '{status}' from {sender} - restarting timer")
...@@ -791,8 +790,9 @@ class MbetterClientApplication: ...@@ -791,8 +790,9 @@ class MbetterClientApplication:
else: else:
logger.warning("No original timer interval available for restart") logger.warning("No original timer interval available for restart")
elif status == "started": elif status == "started":
logger.info(f"START_GAME succeeded with status '{status}' from {sender} - timer job completed") logger.info(f"START_GAME succeeded with status '{status}' from {sender} - cancelling timer")
# Game started successfully, clear timer state # Game started successfully, cancel the timer and clear timer state
self._cancel_game_timer()
self._original_timer_interval = None self._original_timer_interval = None
except Exception as e: except Exception as e:
......
...@@ -3,6 +3,7 @@ Games thread component for managing game-related operations ...@@ -3,6 +3,7 @@ Games thread component for managing game-related operations
""" """
import time import time
import json
import logging import logging
import threading import threading
from datetime import datetime, timedelta from datetime import datetime, timedelta
...@@ -26,6 +27,7 @@ class GamesThread(ThreadedComponent): ...@@ -26,6 +27,7 @@ class GamesThread(ThreadedComponent):
self.game_active = False self.game_active = False
self._shutdown_event = threading.Event() self._shutdown_event = threading.Event()
self.message_queue = None self.message_queue = None
self.waiting_for_validation_fixture: Optional[str] = None
def _cleanup_stale_ingame_matches(self): def _cleanup_stale_ingame_matches(self):
"""Clean up any stale 'ingame' matches from previous crashed sessions and old 'bet' fixtures""" """Clean up any stale 'ingame' matches from previous crashed sessions and old 'bet' fixtures"""
...@@ -189,6 +191,18 @@ class GamesThread(ThreadedComponent): ...@@ -189,6 +191,18 @@ class GamesThread(ThreadedComponent):
try: try:
logger.info(f"Processing START_GAME message from {message.sender}") logger.info(f"Processing START_GAME message from {message.sender}")
# If any fixture is currently being downloaded, wait for all ZIP validation to complete
if self.waiting_for_validation_fixture is not None:
logger.info(f"Fixture {self.waiting_for_validation_fixture} is currently downloading - waiting for all ZIP files to be validated")
self._send_response(message, "waiting_for_downloads", f"Waiting for fixture {self.waiting_for_validation_fixture} downloads to complete")
return
# Check if any ZIP files are currently being validated system-wide
if self._are_any_zips_being_validated():
logger.info("ZIP files are currently being validated system-wide - waiting for all downloads to complete")
self._send_response(message, "waiting_for_downloads", "Waiting for all ZIP file downloads and validation to complete")
return
fixture_id = message.data.get("fixture_id") fixture_id = message.data.get("fixture_id")
if fixture_id: if fixture_id:
...@@ -198,8 +212,15 @@ class GamesThread(ThreadedComponent): ...@@ -198,8 +212,15 @@ class GamesThread(ThreadedComponent):
self._send_response(message, "discarded", f"Fixture {fixture_id} is already completed") self._send_response(message, "discarded", f"Fixture {fixture_id} is already completed")
return return
# Fixture is not terminal, activate it (ZIP validation happens asynchronously) # Check if this fixture is currently being downloaded
logger.info(f"Activating provided fixture: {fixture_id}") if fixture_id == self.waiting_for_validation_fixture:
# This fixture is being downloaded, wait for it
logger.info(f"Fixture {fixture_id} is currently being downloaded - waiting")
self._send_response(message, "waiting_for_downloads", f"Waiting for fixture {fixture_id} to finish downloading")
return
else:
# Start game with this fixture as usual
logger.info(f"Starting game with provided fixture {fixture_id}")
self._activate_fixture(fixture_id, message) self._activate_fixture(fixture_id, message)
# Start ZIP validation asynchronously in background # Start ZIP validation asynchronously in background
self._start_async_zip_validation(fixture_id) self._start_async_zip_validation(fixture_id)
...@@ -221,6 +242,21 @@ class GamesThread(ThreadedComponent): ...@@ -221,6 +242,21 @@ class GamesThread(ThreadedComponent):
active_fixture = self._find_active_today_fixture() active_fixture = self._find_active_today_fixture()
if active_fixture: if active_fixture:
logger.info(f"Found active fixture for today: {active_fixture}") logger.info(f"Found active fixture for today: {active_fixture}")
# Check if this fixture is currently being downloaded
if active_fixture == self.waiting_for_validation_fixture:
# This fixture is being downloaded, skip it and initialize new fixture
logger.info(f"Active fixture {active_fixture} is currently being downloaded - initializing new fixture")
new_fixture_id = self._initialize_new_fixture()
if new_fixture_id:
self._activate_fixture(new_fixture_id, message)
# Start ZIP validation asynchronously in background
self._start_async_zip_validation(new_fixture_id)
else:
logger.warning("Could not initialize new fixture")
self._send_response(message, "error", "Could not initialize new fixture")
else:
# Start game with this active fixture as usual
logger.info(f"Starting game with active fixture {active_fixture}")
self._activate_fixture(active_fixture, message) self._activate_fixture(active_fixture, message)
# Start ZIP validation asynchronously in background # Start ZIP validation asynchronously in background
self._start_async_zip_validation(active_fixture) self._start_async_zip_validation(active_fixture)
...@@ -230,12 +266,20 @@ class GamesThread(ThreadedComponent): ...@@ -230,12 +266,20 @@ class GamesThread(ThreadedComponent):
logger.info("No active fixtures found - initializing new fixture") logger.info("No active fixtures found - initializing new fixture")
new_fixture_id = self._initialize_new_fixture() new_fixture_id = self._initialize_new_fixture()
if new_fixture_id: if new_fixture_id:
# Check if all ZIPs are validated before activating
if self._are_all_zips_validated_for_fixture(new_fixture_id):
# All ZIPs validated, activate fixture
logger.info(f"All ZIPs validated for new fixture {new_fixture_id} - activating")
self._activate_fixture(new_fixture_id, message) self._activate_fixture(new_fixture_id, message)
# Start ZIP validation asynchronously in background else:
# Not all ZIPs validated, start validation and wait
logger.info(f"Not all ZIPs validated for new fixture {new_fixture_id} - starting validation and waiting")
self.waiting_for_validation_fixture = new_fixture_id
self._start_async_zip_validation(new_fixture_id) self._start_async_zip_validation(new_fixture_id)
self._send_response(message, "waiting_for_downloads", f"Waiting for ZIP files to be validated for fixture {new_fixture_id}")
else: else:
logger.warning("Could not initialize new fixture") logger.warning("Could not initialize new fixture - no fixtures available")
self._send_response(message, "error", "Could not initialize new fixture") self._send_response(message, "no_fixtures_available", "No fixtures available to start game")
except Exception as e: except Exception as e:
logger.error(f"Failed to handle START_GAME message: {e}") logger.error(f"Failed to handle START_GAME message: {e}")
...@@ -581,6 +625,17 @@ class GamesThread(ThreadedComponent): ...@@ -581,6 +625,17 @@ class GamesThread(ThreadedComponent):
) )
self.message_bus.publish(response) self.message_bus.publish(response)
# For timer-related failures, also send to core so it can handle timer reset
if status in ["waiting_for_downloads", "discarded", "error", "no_matches", "no_fixtures_available"]:
core_response = Message(
type=MessageType.GAME_STATUS,
sender=self.name,
recipient="core",
data=response_data,
correlation_id=original_message.correlation_id
)
self.message_bus.publish(core_response)
except Exception as e: except Exception as e:
logger.error(f"Failed to send response: {e}") logger.error(f"Failed to send response: {e}")
...@@ -609,6 +664,94 @@ class GamesThread(ThreadedComponent): ...@@ -609,6 +664,94 @@ class GamesThread(ThreadedComponent):
logger.error(f"Failed to check if fixture {fixture_id} is terminal: {e}") logger.error(f"Failed to check if fixture {fixture_id} is terminal: {e}")
return False return False
def _are_all_zips_validated_for_fixture(self, fixture_id: str) -> bool:
"""Check if all ZIP files for matches in a fixture are validated"""
try:
session = self.db_manager.get_session()
try:
# Get all matches with ZIP files
matches_with_zips = session.query(MatchModel).filter(
MatchModel.fixture_id == fixture_id,
MatchModel.active_status == True,
MatchModel.zip_filename.isnot(None)
).all()
if not matches_with_zips:
return True # No ZIPs needed
# Check if all have valid status
return all(match.zip_validation_status == 'valid' for match in matches_with_zips)
finally:
session.close()
except Exception as e:
logger.error(f"Failed to check ZIP validation status for fixture {fixture_id}: {e}")
return False
def _are_any_zips_being_validated(self) -> bool:
"""Check if any ZIP files are currently being validated system-wide"""
try:
session = self.db_manager.get_session()
try:
# Check if any matches have ZIP validation actively in progress (not just pending)
validating_count = session.query(MatchModel).filter(
MatchModel.zip_validation_status == 'validating',
MatchModel.active_status == True,
MatchModel.zip_filename.isnot(None)
).count()
return validating_count > 0
finally:
session.close()
except Exception as e:
logger.error(f"Failed to check if any ZIPs are being validated: {e}")
return False
def _are_any_zips_being_validated(self) -> bool:
"""Check if any ZIP files are currently being validated system-wide"""
try:
session = self.db_manager.get_session()
try:
# Check if any matches have ZIP validation actively in progress (not just pending)
validating_count = session.query(MatchModel).filter(
MatchModel.zip_validation_status == 'validating',
MatchModel.active_status == True,
MatchModel.zip_filename.isnot(None)
).count()
return validating_count > 0
finally:
session.close()
except Exception as e:
logger.error(f"Failed to check if any ZIPs are being validated: {e}")
return False
def _mark_all_zips_as_validated(self):
"""Mark all ZIP files as validated (used after fixture update completion)"""
try:
session = self.db_manager.get_session()
try:
# Update all matches with ZIP files to have validated status
updated_count = session.query(MatchModel).filter(
MatchModel.zip_filename.isnot(None),
MatchModel.active_status == True,
MatchModel.zip_validation_status != 'valid'
).update({'zip_validation_status': 'valid'})
session.commit()
logger.info(f"Marked {updated_count} ZIP files as validated")
finally:
session.close()
except Exception as e:
logger.error(f"Failed to mark ZIPs as validated: {e}")
def _has_today_fixtures_all_terminal(self) -> bool: def _has_today_fixtures_all_terminal(self) -> bool:
"""Check if all fixtures with today's matches are in terminal states""" """Check if all fixtures with today's matches are in terminal states"""
try: try:
...@@ -849,6 +992,24 @@ class GamesThread(ThreadedComponent): ...@@ -849,6 +992,24 @@ class GamesThread(ThreadedComponent):
logger.info(f"Async ZIP validation completed for fixture {fixture_id}") logger.info(f"Async ZIP validation completed for fixture {fixture_id}")
# Check if we were waiting for this fixture
if self.waiting_for_validation_fixture == fixture_id:
# Check if all are now validated
if self._are_all_zips_validated_for_fixture(fixture_id):
logger.info(f"All ZIPs now validated for waiting fixture {fixture_id} - activating")
# Create a dummy message for activation
dummy_message = Message(
type=MessageType.START_GAME,
sender=self.name,
recipient=self.name,
data={"fixture_id": fixture_id, "timestamp": time.time()},
correlation_id=None
)
self._activate_fixture(fixture_id, dummy_message)
self.waiting_for_validation_fixture = None
else:
logger.warning(f"ZIP validation completed for fixture {fixture_id} but not all ZIPs are valid - not activating")
except Exception as e: except Exception as e:
logger.error(f"Async ZIP validation failed for fixture {fixture_id}: {e}") logger.error(f"Async ZIP validation failed for fixture {fixture_id}: {e}")
...@@ -1405,10 +1566,24 @@ class GamesThread(ThreadedComponent): ...@@ -1405,10 +1566,24 @@ class GamesThread(ThreadedComponent):
# Extract the ZIP file # Extract the ZIP file
logger.info(f"DEBUG: Starting ZIP extraction...") logger.info(f"DEBUG: Starting ZIP extraction...")
try:
with zipfile.ZipFile(str(zip_file_path), 'r') as zip_ref: with zipfile.ZipFile(str(zip_file_path), 'r') as zip_ref:
file_list = zip_ref.namelist() file_list = zip_ref.namelist()
logger.info(f"DEBUG: ZIP contains {len(file_list)} files: {file_list}") logger.info(f"DEBUG: ZIP contains {len(file_list)} files: {file_list}")
zip_ref.extractall(str(temp_dir)) zip_ref.extractall(str(temp_dir))
except zipfile.BadZipFile as e:
logger.error(f"DEBUG: Invalid or corrupted ZIP file for match {match_id}: {e}")
# When ZIP extraction fails, act as if PLAY_VIDEO_RESULTS_DONE has been received
# Send PLAY_VIDEO_RESULTS with a fallback result
logger.info(f"DEBUG: ZIP extraction failed, sending PLAY_VIDEO_RESULTS_DONE simulation for match {match_id}")
self._handle_zip_extraction_failure(match_id, match.fixture_id)
return
except Exception as e:
logger.error(f"DEBUG: Error during ZIP extraction for match {match_id}: {e}")
# When ZIP extraction fails, act as if PLAY_VIDEO_RESULTS_DONE has been received
logger.info(f"DEBUG: ZIP extraction failed, sending PLAY_VIDEO_RESULTS_DONE simulation for match {match_id}")
self._handle_zip_extraction_failure(match_id, match.fixture_id)
return
# Log extraction results # Log extraction results
extracted_files = list(temp_dir.rglob("*")) extracted_files = list(temp_dir.rglob("*"))
...@@ -1837,6 +2012,16 @@ class GamesThread(ThreadedComponent): ...@@ -1837,6 +2012,16 @@ class GamesThread(ThreadedComponent):
logger.info(f"Fixture update completed: {synchronized_matches} matches synchronized, {downloaded_zips} ZIPs downloaded") logger.info(f"Fixture update completed: {synchronized_matches} matches synchronized, {downloaded_zips} ZIPs downloaded")
# Mark all downloaded ZIPs as validated since the update completed successfully
if downloaded_zips > 0:
self._mark_all_zips_as_validated()
logger.info(f"Marked {downloaded_zips} ZIP files as validated after fixture update")
# Clear any waiting fixture since downloads are now complete
if self.waiting_for_validation_fixture:
logger.info(f"Clearing waiting fixture {self.waiting_for_validation_fixture} since fixture update completed")
self.waiting_for_validation_fixture = None
# Check if we should start a game now that fixtures are available # Check if we should start a game now that fixtures are available
if synchronized_matches > 0 and not self.game_active: if synchronized_matches > 0 and not self.game_active:
logger.info("New fixtures available and no game is active - attempting to start game") logger.info("New fixtures available and no game is active - attempting to start game")
...@@ -2234,7 +2419,7 @@ class GamesThread(ThreadedComponent): ...@@ -2234,7 +2419,7 @@ class GamesThread(ThreadedComponent):
).update({'result': 'lost'}) ).update({'result': 'lost'})
logger.info(f"DEBUG _update_bet_results: Set {losing_count} other bets to lost") logger.info(f"DEBUG _update_bet_results: Set {losing_count} other bets to lost")
# Update the match result in the matches table with winning outcomes in parentheses # Update the match result in the matches table with winning outcomes in separate fields
match = session.query(MatchModel).filter_by(id=match_id).first() match = session.query(MatchModel).filter_by(id=match_id).first()
if match: if match:
logger.info(f"DEBUG _update_bet_results: Before update - match.result = '{match.result}'") logger.info(f"DEBUG _update_bet_results: Before update - match.result = '{match.result}'")
...@@ -2246,32 +2431,27 @@ class GamesThread(ThreadedComponent): ...@@ -2246,32 +2431,27 @@ class GamesThread(ThreadedComponent):
winning_outcome_names = [outcome.outcome_name for outcome in winning_outcomes] winning_outcome_names = [outcome.outcome_name for outcome in winning_outcomes]
logger.info(f"DEBUG _update_bet_results: Found {len(winning_outcomes)} winning outcomes for '{selected_result}': {winning_outcome_names}") logger.info(f"DEBUG _update_bet_results: Found {len(winning_outcomes)} winning outcomes for '{selected_result}': {winning_outcome_names}")
# Include UNDER/OVER if applicable # Set the main result (selected_result)
under_over_result = None match.result = selected_result
if under_over_outcome: logger.info(f"DEBUG _update_bet_results: Set match.result to '{selected_result}'")
under_over_result = under_over_outcome
logger.info(f"DEBUG _update_bet_results: UNDER/OVER result detected: '{under_over_result}'")
# Format result to include winning outcomes and UNDER/OVER # Set winning outcomes as JSON array in separate field
result_parts = []
if selected_result not in ['UNDER', 'OVER']:
result_parts.append(selected_result)
logger.info(f"DEBUG _update_bet_results: Added main result '{selected_result}' to result_parts")
if under_over_result:
result_parts.append(under_over_result)
logger.info(f"DEBUG _update_bet_results: Added UNDER/OVER result '{under_over_result}' to result_parts")
if winning_outcome_names: if winning_outcome_names:
# Add winning outcomes that are not already included match.winning_outcomes = json.dumps(winning_outcome_names)
additional_outcomes = [outcome for outcome in winning_outcome_names if outcome not in result_parts] logger.info(f"DEBUG _update_bet_results: Set match.winning_outcomes to {winning_outcome_names}")
if additional_outcomes: else:
result_parts.extend(additional_outcomes) match.winning_outcomes = None
logger.info(f"DEBUG _update_bet_results: Added additional outcomes {additional_outcomes} to result_parts") logger.info(f"DEBUG _update_bet_results: No winning outcomes, set match.winning_outcomes to None")
# Join with " + " separator # Set under_over_result in separate field
formatted_result = " + ".join(result_parts) if result_parts else selected_result if under_over_outcome:
logger.info(f"DEBUG _update_bet_results: Final result_parts = {result_parts}, formatted_result = '{formatted_result}'") match.under_over_result = under_over_outcome
match.result = formatted_result logger.info(f"DEBUG _update_bet_results: Set match.under_over_result to '{under_over_outcome}'")
logger.info(f"Updated match {match_id} result to {formatted_result}") else:
match.under_over_result = None
logger.info(f"DEBUG _update_bet_results: No UNDER/OVER result, set match.under_over_result to None")
logger.info(f"Updated match {match_id} with result='{selected_result}', winning_outcomes={winning_outcome_names}, under_over_result='{under_over_outcome}'")
else: else:
logger.error(f"DEBUG _update_bet_results: Match {match_id} not found for result update!") logger.error(f"DEBUG _update_bet_results: Match {match_id} not found for result update!")
...@@ -2739,6 +2919,10 @@ class GamesThread(ThreadedComponent): ...@@ -2739,6 +2919,10 @@ class GamesThread(ThreadedComponent):
def _determine_game_status(self) -> str: def _determine_game_status(self) -> str:
"""Determine the current game status for status requests""" """Determine the current game status for status requests"""
try: try:
# If waiting for validation, return waiting status
if self.waiting_for_validation_fixture:
return "waiting_for_downloads"
# If a game is currently active, return "started" # If a game is currently active, return "started"
if self.game_active and self.current_fixture_id: if self.game_active and self.current_fixture_id:
return "started" return "started"
...@@ -2954,9 +3138,24 @@ class GamesThread(ThreadedComponent): ...@@ -2954,9 +3138,24 @@ class GamesThread(ThreadedComponent):
~MatchModel.fixture_id.like('%_recycle_%') ~MatchModel.fixture_id.like('%_recycle_%')
).all() ).all()
if all_matches:
result = all_matches[:count] if len(all_matches) >= count else all_matches result = all_matches[:count] if len(all_matches) >= count else all_matches
logger.info(f"🔄 Final fallback: returning {len(result)} matches from {len(all_matches)} total available") logger.info(f"🔄 Final fallback: returning {len(result)} matches from {len(all_matches)} total available")
return result return result
else:
# If no matches found with exclusions, recycle the oldest match from database
logger.warning("🚨 No matches available after exclusions - recycling the oldest match from database")
oldest_match = session.query(MatchModel).filter(
MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True
).order_by(MatchModel.created_at.asc()).first()
if oldest_match:
logger.info(f"♻️ Recycled oldest match: {oldest_match.match_number} ({oldest_match.fighter1_township} vs {oldest_match.fighter2_township})")
return [oldest_match]
else:
logger.error("🚨 No completed matches found in database at all")
return []
def _get_available_matches_excluding_recent(self, fixture_id: Optional[str], exclude_last_n: int, fighters_only: bool, session) -> List[MatchModel]: def _get_available_matches_excluding_recent(self, fixture_id: Optional[str], exclude_last_n: int, fighters_only: bool, session) -> List[MatchModel]:
"""Get available matches excluding the last N recent matches in the fixture""" """Get available matches excluding the last N recent matches in the fixture"""
...@@ -3071,6 +3270,29 @@ class GamesThread(ThreadedComponent): ...@@ -3071,6 +3270,29 @@ class GamesThread(ThreadedComponent):
except Exception as e: except Exception as e:
logger.error(f"DEBUG: Failed to cleanup previous match extractions: {e}") logger.error(f"DEBUG: Failed to cleanup previous match extractions: {e}")
def _handle_zip_extraction_failure(self, match_id: int, fixture_id: str):
"""Handle ZIP extraction failure by simulating PLAY_VIDEO_RESULTS_DONE behavior"""
try:
logger.info(f"DEBUG: Handling ZIP extraction failure for match {match_id}, fixture {fixture_id}")
# Use fallback result selection since ZIP extraction failed
fallback_result = self._fallback_result_selection()
logger.info(f"DEBUG: Using fallback result '{fallback_result}' for failed ZIP extraction")
# Set match status to 'done' and save result (same as _handle_play_video_result_done)
self._set_match_status_and_result(match_id, 'done', fallback_result)
# Send MATCH_DONE message
self._send_match_done(fixture_id, match_id, fallback_result)
# Send NEXT_MATCH message to advance to next match
self._send_next_match(fixture_id, match_id)
logger.info(f"DEBUG: ZIP extraction failure handled - match {match_id} completed with fallback result '{fallback_result}'")
except Exception as e:
logger.error(f"DEBUG: Failed to handle ZIP extraction failure for match {match_id}: {e}")
def _cleanup(self): def _cleanup(self):
"""Perform cleanup operations""" """Perform cleanup operations"""
try: try:
......
...@@ -574,14 +574,30 @@ class MatchTimerComponent(ThreadedComponent): ...@@ -574,14 +574,30 @@ class MatchTimerComponent(ThreadedComponent):
completed_matches = query.all() completed_matches = query.all()
if len(completed_matches) < count: if len(completed_matches) >= count:
logger.warning(f"Only {len(completed_matches)} completed matches available (excluding same fighters), requested {count}")
return completed_matches
# Select random matches # Select random matches
selected_matches = random.sample(completed_matches, count) selected_matches = random.sample(completed_matches, count)
logger.info(f"Selected {len(selected_matches)} random completed matches (excluding same fighters as last match)") logger.info(f"Selected {len(selected_matches)} random completed matches (excluding same fighters as last match)")
return selected_matches return selected_matches
else:
# Not enough matches with exclusions
if completed_matches:
logger.warning(f"Only {len(completed_matches)} completed matches available (excluding same fighters), requested {count} - returning available")
return completed_matches
else:
# No matches found with exclusions, recycle the oldest match from database
logger.warning("🚨 No matches available after exclusions - recycling the oldest match from database")
oldest_match = session.query(MatchModel).filter(
MatchModel.status.in_(['done', 'end', 'cancelled', 'failed']),
MatchModel.active_status == True
).order_by(MatchModel.created_at.asc()).first()
if oldest_match:
logger.info(f"♻️ Recycled oldest match: {oldest_match.match_number} ({oldest_match.fighter1_township} vs {oldest_match.fighter2_township})")
return [oldest_match]
else:
logger.error("🚨 No completed matches found in database at all")
return []
except Exception as e: except Exception as e:
logger.error(f"Failed to select random completed matches excluding same fighters: {e}") logger.error(f"Failed to select random completed matches excluding same fighters: {e}")
......
...@@ -86,10 +86,92 @@ class DatabaseManager: ...@@ -86,10 +86,92 @@ class DatabaseManager:
logger.info("Database manager initialized successfully") logger.info("Database manager initialized successfully")
return True return True
except SQLAlchemyError as e:
logger.warning(f"Failed to initialize database at {self.db_path}: {e}")
# Try fallback locations if the primary location fails
return self._initialize_with_fallback()
except Exception as e: except Exception as e:
logger.error(f"Failed to initialize database manager: {e}") logger.error(f"Failed to initialize database manager: {e}")
return False return False
def _initialize_with_fallback(self) -> bool:
"""Try to initialize database in fallback locations"""
fallback_paths = [
# Try current working directory
Path.cwd() / "mbetterclient.db",
# Try user's home directory
Path.home() / ".mbetterclient.db",
# Try temp directory as last resort
Path("/tmp") / "mbetterclient.db"
]
for fallback_path in fallback_paths:
try:
logger.info(f"Trying fallback database location: {fallback_path}")
# Ensure fallback directory exists
fallback_path.parent.mkdir(parents=True, exist_ok=True)
# Create database URL
db_url = f"sqlite:///{fallback_path}"
# Create engine with proper SQLite configuration
self.engine = create_engine(
db_url,
echo=False,
pool_pre_ping=True,
connect_args={
'check_same_thread': False,
'timeout': 30
}
)
# Configure SQLite for better performance and reliability
with self.engine.connect() as conn:
conn.execute(text("PRAGMA journal_mode=WAL"))
conn.execute(text("PRAGMA synchronous=NORMAL"))
conn.execute(text("PRAGMA cache_size=10000"))
conn.execute(text("PRAGMA temp_store=MEMORY"))
conn.execute(text("PRAGMA mmap_size=268435456")) # 256MB
conn.commit()
# Create session factory
self.session_factory = sessionmaker(bind=self.engine)
self.Session = scoped_session(self.session_factory)
# Create all tables
Base.metadata.create_all(self.engine)
# Mark as initialized so migrations can use get_session()
self._initialized = True
# Update the db_path to the working fallback path
self.db_path = fallback_path
logger.warning(f"Using fallback database location: {fallback_path}")
# Run database migrations
if not run_migrations(self):
logger.error("Database migrations failed")
self._initialized = False
return False
# Create default admin user if none exists
self._create_default_admin()
# Initialize default templates
self._initialize_default_templates()
logger.info("Database manager initialized successfully with fallback location")
return True
except Exception as e:
logger.warning(f"Fallback location {fallback_path} also failed: {e}")
continue
logger.error("All database initialization attempts failed")
return False
def get_session(self): def get_session(self):
"""Get database session""" """Get database session"""
if not self._initialized: if not self._initialized:
......
...@@ -682,6 +682,8 @@ class Migration_012_RemoveFixtureIdUniqueConstraint(DatabaseMigration): ...@@ -682,6 +682,8 @@ class Migration_012_RemoveFixtureIdUniqueConstraint(DatabaseMigration):
start_time DATETIME NULL, start_time DATETIME NULL,
end_time DATETIME NULL, end_time DATETIME NULL,
result VARCHAR(255) NULL, result VARCHAR(255) NULL,
winning_outcomes TEXT NULL,
under_over_result VARCHAR(50) NULL,
done BOOLEAN DEFAULT FALSE NOT NULL, done BOOLEAN DEFAULT FALSE NOT NULL,
running BOOLEAN DEFAULT FALSE NOT NULL, running BOOLEAN DEFAULT FALSE NOT NULL,
status VARCHAR(20) DEFAULT 'pending' NOT NULL, status VARCHAR(20) DEFAULT 'pending' NOT NULL,
...@@ -696,6 +698,7 @@ class Migration_012_RemoveFixtureIdUniqueConstraint(DatabaseMigration): ...@@ -696,6 +698,7 @@ class Migration_012_RemoveFixtureIdUniqueConstraint(DatabaseMigration):
zip_sha1sum VARCHAR(255) NULL, zip_sha1sum VARCHAR(255) NULL,
zip_upload_status VARCHAR(20) DEFAULT 'pending', zip_upload_status VARCHAR(20) DEFAULT 'pending',
zip_upload_progress REAL DEFAULT 0.0, zip_upload_progress REAL DEFAULT 0.0,
zip_validation_status VARCHAR(20) DEFAULT 'pending',
created_by INTEGER NULL REFERENCES users(id) ON DELETE SET NULL, created_by INTEGER NULL REFERENCES users(id) ON DELETE SET NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP, created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
...@@ -770,8 +773,11 @@ class Migration_012_RemoveFixtureIdUniqueConstraint(DatabaseMigration): ...@@ -770,8 +773,11 @@ class Migration_012_RemoveFixtureIdUniqueConstraint(DatabaseMigration):
start_time DATETIME NULL, start_time DATETIME NULL,
end_time DATETIME NULL, end_time DATETIME NULL,
result VARCHAR(255) NULL, result VARCHAR(255) NULL,
winning_outcomes TEXT NULL,
under_over_result VARCHAR(50) NULL,
done BOOLEAN DEFAULT FALSE NOT NULL, done BOOLEAN DEFAULT FALSE NOT NULL,
running BOOLEAN DEFAULT FALSE NOT NULL, running BOOLEAN DEFAULT FALSE NOT NULL,
status VARCHAR(20) DEFAULT 'pending' NOT NULL,
fixture_active_time INTEGER NULL, fixture_active_time INTEGER NULL,
filename VARCHAR(1024) NOT NULL, filename VARCHAR(1024) NOT NULL,
...@@ -783,6 +789,7 @@ class Migration_012_RemoveFixtureIdUniqueConstraint(DatabaseMigration): ...@@ -783,6 +789,7 @@ class Migration_012_RemoveFixtureIdUniqueConstraint(DatabaseMigration):
zip_sha1sum VARCHAR(255) NULL, zip_sha1sum VARCHAR(255) NULL,
zip_upload_status VARCHAR(20) DEFAULT 'pending', zip_upload_status VARCHAR(20) DEFAULT 'pending',
zip_upload_progress REAL DEFAULT 0.0, zip_upload_progress REAL DEFAULT 0.0,
zip_validation_status VARCHAR(20) DEFAULT 'pending',
created_by INTEGER NULL REFERENCES users(id) ON DELETE SET NULL, created_by INTEGER NULL REFERENCES users(id) ON DELETE SET NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP, created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
...@@ -2166,6 +2173,8 @@ class Migration_029_ChangeMatchNumberToUniqueWithinFixture(DatabaseMigration): ...@@ -2166,6 +2173,8 @@ class Migration_029_ChangeMatchNumberToUniqueWithinFixture(DatabaseMigration):
start_time DATETIME NULL, start_time DATETIME NULL,
end_time DATETIME NULL, end_time DATETIME NULL,
result VARCHAR(255) NULL, result VARCHAR(255) NULL,
winning_outcomes TEXT NULL,
under_over_result VARCHAR(50) NULL,
done BOOLEAN DEFAULT FALSE NOT NULL, done BOOLEAN DEFAULT FALSE NOT NULL,
running BOOLEAN DEFAULT FALSE NOT NULL, running BOOLEAN DEFAULT FALSE NOT NULL,
status VARCHAR(20) DEFAULT 'pending' NOT NULL, status VARCHAR(20) DEFAULT 'pending' NOT NULL,
...@@ -2180,6 +2189,7 @@ class Migration_029_ChangeMatchNumberToUniqueWithinFixture(DatabaseMigration): ...@@ -2180,6 +2189,7 @@ class Migration_029_ChangeMatchNumberToUniqueWithinFixture(DatabaseMigration):
zip_sha1sum VARCHAR(255) NULL, zip_sha1sum VARCHAR(255) NULL,
zip_upload_status VARCHAR(20) DEFAULT 'pending', zip_upload_status VARCHAR(20) DEFAULT 'pending',
zip_upload_progress REAL DEFAULT 0.0, zip_upload_progress REAL DEFAULT 0.0,
zip_validation_status VARCHAR(20) DEFAULT 'pending',
created_by INTEGER NULL REFERENCES users(id) ON DELETE SET NULL, created_by INTEGER NULL REFERENCES users(id) ON DELETE SET NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP, created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
...@@ -2258,6 +2268,8 @@ class Migration_029_ChangeMatchNumberToUniqueWithinFixture(DatabaseMigration): ...@@ -2258,6 +2268,8 @@ class Migration_029_ChangeMatchNumberToUniqueWithinFixture(DatabaseMigration):
start_time DATETIME NULL, start_time DATETIME NULL,
end_time DATETIME NULL, end_time DATETIME NULL,
result VARCHAR(255) NULL, result VARCHAR(255) NULL,
winning_outcomes TEXT NULL,
under_over_result VARCHAR(50) NULL,
done BOOLEAN DEFAULT FALSE NOT NULL, done BOOLEAN DEFAULT FALSE NOT NULL,
running BOOLEAN DEFAULT FALSE NOT NULL, running BOOLEAN DEFAULT FALSE NOT NULL,
status VARCHAR(20) DEFAULT 'pending' NOT NULL, status VARCHAR(20) DEFAULT 'pending' NOT NULL,
...@@ -2272,6 +2284,7 @@ class Migration_029_ChangeMatchNumberToUniqueWithinFixture(DatabaseMigration): ...@@ -2272,6 +2284,7 @@ class Migration_029_ChangeMatchNumberToUniqueWithinFixture(DatabaseMigration):
zip_sha1sum VARCHAR(255) NULL, zip_sha1sum VARCHAR(255) NULL,
zip_upload_status VARCHAR(20) DEFAULT 'pending', zip_upload_status VARCHAR(20) DEFAULT 'pending',
zip_upload_progress REAL DEFAULT 0.0, zip_upload_progress REAL DEFAULT 0.0,
zip_validation_status VARCHAR(20) DEFAULT 'pending',
created_by INTEGER NULL REFERENCES users(id) ON DELETE SET NULL, created_by INTEGER NULL REFERENCES users(id) ON DELETE SET NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP, created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
...@@ -2360,6 +2373,55 @@ class Migration_030_AddZipValidationStatus(DatabaseMigration): ...@@ -2360,6 +2373,55 @@ class Migration_030_AddZipValidationStatus(DatabaseMigration):
logger.warning("SQLite doesn't support DROP COLUMN - zip_validation_status column will remain") logger.warning("SQLite doesn't support DROP COLUMN - zip_validation_status column will remain")
return True return True
class Migration_031_AddWinningOutcomesFields(DatabaseMigration):
"""Add winning_outcomes and under_over_result fields to matches table"""
def __init__(self):
super().__init__("031", "Add winning_outcomes and under_over_result fields to matches table")
def up(self, db_manager) -> bool:
"""Add winning_outcomes and under_over_result columns to matches table"""
try:
with db_manager.engine.connect() as conn:
# Check if columns already exist
result = conn.execute(text("PRAGMA table_info(matches)"))
columns = [row[1] for row in result.fetchall()]
if 'winning_outcomes' not in columns:
# Add winning_outcomes column (JSON array)
conn.execute(text("""
ALTER TABLE matches
ADD COLUMN winning_outcomes TEXT
"""))
logger.info("winning_outcomes column added to matches table")
else:
logger.info("winning_outcomes column already exists in matches table")
if 'under_over_result' not in columns:
# Add under_over_result column (string)
conn.execute(text("""
ALTER TABLE matches
ADD COLUMN under_over_result VARCHAR(50)
"""))
logger.info("under_over_result column added to matches table")
else:
logger.info("under_over_result column already exists in matches table")
conn.commit()
return True
except Exception as e:
logger.error(f"Failed to add winning outcomes fields to matches: {e}")
return False
def down(self, db_manager) -> bool:
"""Remove winning_outcomes and under_over_result columns - SQLite doesn't support DROP COLUMN easily"""
logger.warning("SQLite doesn't support DROP COLUMN - winning_outcomes and under_over_result columns will remain")
return True
# Registry of all migrations in order # Registry of all migrations in order
MIGRATIONS: List[DatabaseMigration] = [ MIGRATIONS: List[DatabaseMigration] = [
Migration_001_InitialSchema(), Migration_001_InitialSchema(),
...@@ -2392,6 +2454,7 @@ MIGRATIONS: List[DatabaseMigration] = [ ...@@ -2392,6 +2454,7 @@ MIGRATIONS: List[DatabaseMigration] = [
Migration_028_AddFixtureRefreshIntervalConfig(), Migration_028_AddFixtureRefreshIntervalConfig(),
Migration_029_ChangeMatchNumberToUniqueWithinFixture(), Migration_029_ChangeMatchNumberToUniqueWithinFixture(),
Migration_030_AddZipValidationStatus(), Migration_030_AddZipValidationStatus(),
Migration_031_AddWinningOutcomesFields(),
] ]
......
...@@ -488,7 +488,9 @@ class MatchModel(BaseModel): ...@@ -488,7 +488,9 @@ class MatchModel(BaseModel):
# Match timing and results # Match timing and results
start_time = Column(DateTime, comment='Match start time') start_time = Column(DateTime, comment='Match start time')
end_time = Column(DateTime, comment='Match end time') end_time = Column(DateTime, comment='Match end time')
result = Column(String(255), comment='Match result/outcome') result = Column(String(255), comment='Match result/outcome (main result only, e.g. RET2)')
winning_outcomes = Column(JSON, comment='Array of winning outcomes from extraction associations (e.g., ["WIN1", "X1", "12"])')
under_over_result = Column(String(50), comment='UNDER/OVER result if applicable')
done = Column(Boolean, default=False, nullable=False, comment='Match completion flag (0=pending, 1=done)') done = Column(Boolean, default=False, nullable=False, comment='Match completion flag (0=pending, 1=done)')
running = Column(Boolean, default=False, nullable=False, comment='Match running flag (0=not running, 1=running)') running = Column(Boolean, default=False, nullable=False, comment='Match running flag (0=not running, 1=running)')
status = Column(Enum('pending', 'scheduled', 'bet', 'ingame', 'done', 'cancelled', 'failed', 'paused'), default='pending', nullable=False, comment='Match status enum') status = Column(Enum('pending', 'scheduled', 'bet', 'ingame', 'done', 'cancelled', 'failed', 'paused'), default='pending', nullable=False, comment='Match status enum')
......
...@@ -118,41 +118,23 @@ class OverlayWebChannel(QObject): ...@@ -118,41 +118,23 @@ class OverlayWebChannel(QObject):
"""Send data update to JavaScript (thread-safe)""" """Send data update to JavaScript (thread-safe)"""
# Validate data before sending to prevent null emissions # Validate data before sending to prevent null emissions
if not data: if not data:
logger.warning("RESULTS DEBUG: send_data_update called with null/empty data, skipping") logger.warning("send_data_update called with null/empty data, skipping")
return return
# Debug original data before cleaning
logger.info(f"RESULTS DEBUG: OverlayWebChannel received data: {data}, type: {type(data)}")
logger.info(f"RESULTS DEBUG: OverlayWebChannel data keys: {list(data.keys()) if isinstance(data, dict) else 'not dict'}")
# Check if this data contains results information # Check if this data contains results information
has_results_data = any(key in data for key in ['outcome', 'result', 'match', 'match_id', 'fixture_id']) has_results_data = any(key in data for key in ['outcome', 'result', 'match', 'match_id', 'fixture_id'])
logger.info(f"RESULTS DEBUG: Data contains results info: {has_results_data}")
# Clean data to remove null/undefined values before sending to JavaScript # Clean data to remove null/undefined values before sending to JavaScript
cleaned_data = self._clean_data(data) cleaned_data = self._clean_data(data)
logger.info(f"RESULTS DEBUG: OverlayWebChannel cleaned data: {cleaned_data}")
if not cleaned_data: if not cleaned_data:
logger.info("RESULTS DEBUG: All data properties were null/undefined, skipping JavaScript update")
return return
# Debug what data is being sent to JavaScript
data_keys = list(cleaned_data.keys()) if isinstance(cleaned_data, dict) else []
logger.info(f"RESULTS DEBUG: OverlayWebChannel sending to JavaScript: {len(cleaned_data)} items with keys: {data_keys}")
logger.info(f"RESULTS DEBUG: Data type: {type(cleaned_data)}, Data is dict: {isinstance(cleaned_data, dict)}")
with QMutexLocker(self.mutex): with QMutexLocker(self.mutex):
self.overlay_data.update(cleaned_data) self.overlay_data.update(cleaned_data)
logger.info(f"RESULTS DEBUG: Updated overlay_data, now contains: {list(self.overlay_data.keys())}")
# Add additional validation just before emit # Add additional validation just before emit
if cleaned_data and isinstance(cleaned_data, dict) and any(v is not None for v in cleaned_data.values()): if cleaned_data and isinstance(cleaned_data, dict) and any(v is not None for v in cleaned_data.values()):
logger.info(f"RESULTS DEBUG: OverlayWebChannel emitting dataUpdated signal with: {cleaned_data}")
self.dataUpdated.emit(cleaned_data) self.dataUpdated.emit(cleaned_data)
data_keys = list(cleaned_data.keys()) if isinstance(cleaned_data, dict) else []
logger.info(f"RESULTS DEBUG: Signal emitted successfully with {len(cleaned_data)} data items: {data_keys}")
else:
logger.warning(f"RESULTS DEBUG: Prevented emission of invalid data: {cleaned_data}")
def _clean_data(self, data: Dict[str, Any]) -> Dict[str, Any]: def _clean_data(self, data: Dict[str, Any]) -> Dict[str, Any]:
"""Clean data by removing null/undefined values before sending to JavaScript""" """Clean data by removing null/undefined values before sending to JavaScript"""
...@@ -210,19 +192,14 @@ class OverlayWebChannel(QObject): ...@@ -210,19 +192,14 @@ class OverlayWebChannel(QObject):
def getCurrentData(self) -> str: def getCurrentData(self) -> str:
"""Provide current overlay data to JavaScript via WebChannel""" """Provide current overlay data to JavaScript via WebChannel"""
try: try:
logger.info("RESULTS DEBUG: OverlayWebChannel getCurrentData called")
# Return current overlay data # Return current overlay data
current_data = dict(self.overlay_data) current_data = dict(self.overlay_data)
logger.info(f"RESULTS DEBUG: Current overlay_data keys: {list(current_data.keys())}")
logger.info(f"RESULTS DEBUG: Current overlay_data: {current_data}")
logger.info(f"RESULTS DEBUG: Returning current data to JavaScript: {current_data}")
json_result = json.dumps(current_data) json_result = json.dumps(current_data)
logger.info(f"RESULTS DEBUG: JSON result length: {len(json_result)}")
return json_result return json_result
except Exception as e: except Exception as e:
logger.error(f"RESULTS DEBUG: OverlayWebChannel Failed to get current data: {e}") logger.error(f"Failed to get current data: {e}")
import traceback import traceback
logger.error(f"RESULTS DEBUG: Full traceback: {traceback.format_exc()}") logger.error(f"Full traceback: {traceback.format_exc()}")
return json.dumps({}) return json.dumps({})
@pyqtSlot(result=str) @pyqtSlot(result=str)
...@@ -416,8 +393,7 @@ class OverlayWebChannel(QObject): ...@@ -416,8 +393,7 @@ class OverlayWebChannel(QObject):
# Get winning bets for this match # Get winning bets for this match
winning_bets = session.query(BetDetailModel).filter( winning_bets = session.query(BetDetailModel).filter(
BetDetailModel.match_id == match_id, BetDetailModel.match_id == match_id,
BetDetailModel.result == 'win', BetDetailModel.result == 'win'
BetDetailModel.active_status == True
).all() ).all()
# Convert to dictionary format for JavaScript # Convert to dictionary format for JavaScript
...@@ -467,7 +443,6 @@ class OverlayWebChannel(QObject): ...@@ -467,7 +443,6 @@ class OverlayWebChannel(QObject):
).join(MatchModel).filter( ).join(MatchModel).filter(
BetDetailModel.match_id == match_id, BetDetailModel.match_id == match_id,
BetDetailModel.result == 'win', BetDetailModel.result == 'win',
BetDetailModel.active_status == True,
MatchModel.active_status == True MatchModel.active_status == True
).group_by(BetDetailModel.outcome).all() ).group_by(BetDetailModel.outcome).all()
...@@ -3598,7 +3573,11 @@ class QtVideoPlayer(QObject): ...@@ -3598,7 +3573,11 @@ class QtVideoPlayer(QObject):
# Ensure the match ZIP file is extracted before trying to find the video # Ensure the match ZIP file is extracted before trying to find the video
logger.info(f"Ensuring ZIP file is extracted for match {match_id} before finding video") logger.info(f"Ensuring ZIP file is extracted for match {match_id} before finding video")
self._unzip_match_zip_file(match_id) unzip_success = self._unzip_match_zip_file(match_id)
if not unzip_success:
logger.error(f"ZIP extraction failed for match {match_id}, marking as failed and skipping video playback")
return
# Find the match video file from the ZIP # Find the match video file from the ZIP
logger.info(f"Looking for match video: {video_filename} for match {match_id}") logger.info(f"Looking for match video: {video_filename} for match {match_id}")
...@@ -3847,8 +3826,12 @@ class QtVideoPlayer(QObject): ...@@ -3847,8 +3826,12 @@ class QtVideoPlayer(QObject):
logger.error(f"QtPlayer: DEBUG - Full traceback: {traceback.format_exc()}") logger.error(f"QtPlayer: DEBUG - Full traceback: {traceback.format_exc()}")
return None return None
def _unzip_match_zip_file(self, match_id: int): def _unzip_match_zip_file(self, match_id: int) -> bool:
"""Unzip the ZIP file associated with a match to a temporary directory""" """Unzip the ZIP file associated with a match to a temporary directory
Returns:
bool: True if extraction was successful, False if it failed
"""
try: try:
import zipfile import zipfile
import tempfile import tempfile
...@@ -3861,7 +3844,7 @@ class QtVideoPlayer(QObject): ...@@ -3861,7 +3844,7 @@ class QtVideoPlayer(QObject):
db_manager = self._get_database_manager() db_manager = self._get_database_manager()
if not db_manager: if not db_manager:
logger.error("DEBUG: No database manager available for ZIP extraction") logger.error("DEBUG: No database manager available for ZIP extraction")
return return False
session = db_manager.get_session() session = db_manager.get_session()
try: try:
...@@ -3871,13 +3854,13 @@ class QtVideoPlayer(QObject): ...@@ -3871,13 +3854,13 @@ class QtVideoPlayer(QObject):
if not match: if not match:
logger.warning(f"DEBUG: Match {match_id} not found in database, skipping ZIP extraction") logger.warning(f"DEBUG: Match {match_id} not found in database, skipping ZIP extraction")
return return False
logger.info(f"DEBUG: Found match {match_id}, zip_filename: {match.zip_filename}") logger.info(f"DEBUG: Found match {match_id}, zip_filename: {match.zip_filename}")
if not match.zip_filename: if not match.zip_filename:
logger.info(f"DEBUG: Match {match_id} has no associated ZIP file, skipping extraction") logger.info(f"DEBUG: Match {match_id} has no associated ZIP file, skipping extraction")
return return False
# Determine ZIP file location (ZIP files are stored in the zip_files directory) # Determine ZIP file location (ZIP files are stored in the zip_files directory)
from ..config.settings import get_user_data_dir from ..config.settings import get_user_data_dir
...@@ -3889,7 +3872,11 @@ class QtVideoPlayer(QObject): ...@@ -3889,7 +3872,11 @@ class QtVideoPlayer(QObject):
if not zip_file_path.exists(): if not zip_file_path.exists():
logger.warning(f"DEBUG: ZIP file not found: {zip_file_path}") logger.warning(f"DEBUG: ZIP file not found: {zip_file_path}")
return # Mark match as failed
match.status = 'failed'
session.commit()
logger.info(f"DEBUG: Marked match {match_id} as failed due to missing ZIP file")
return False
logger.info(f"DEBUG: ZIP file size: {zip_file_path.stat().st_size} bytes") logger.info(f"DEBUG: ZIP file size: {zip_file_path.stat().st_size} bytes")
...@@ -3897,12 +3884,27 @@ class QtVideoPlayer(QObject): ...@@ -3897,12 +3884,27 @@ class QtVideoPlayer(QObject):
temp_dir = Path(tempfile.mkdtemp(prefix=f"match_{match_id}_")) temp_dir = Path(tempfile.mkdtemp(prefix=f"match_{match_id}_"))
logger.info(f"DEBUG: Created temp directory: {temp_dir}") logger.info(f"DEBUG: Created temp directory: {temp_dir}")
# Extract the ZIP file # Extract the ZIP file with error handling
logger.info(f"DEBUG: Starting ZIP extraction...") logger.info(f"DEBUG: Starting ZIP extraction...")
try:
with zipfile.ZipFile(str(zip_file_path), 'r') as zip_ref: with zipfile.ZipFile(str(zip_file_path), 'r') as zip_ref:
file_list = zip_ref.namelist() file_list = zip_ref.namelist()
logger.info(f"DEBUG: ZIP contains {len(file_list)} files: {file_list}") logger.info(f"DEBUG: ZIP contains {len(file_list)} files: {file_list}")
zip_ref.extractall(str(temp_dir)) zip_ref.extractall(str(temp_dir))
except zipfile.BadZipFile as e:
logger.error(f"DEBUG: Invalid or corrupted ZIP file for match {match_id}: {e}")
# Mark match as failed
match.status = 'failed'
session.commit()
logger.info(f"DEBUG: Marked match {match_id} as failed due to invalid ZIP file")
return False
except Exception as e:
logger.error(f"DEBUG: Error during ZIP extraction for match {match_id}: {e}")
# Mark match as failed
match.status = 'failed'
session.commit()
logger.info(f"DEBUG: Marked match {match_id} as failed due to ZIP extraction error")
return False
# Log extraction results # Log extraction results
extracted_files = list(temp_dir.rglob("*")) extracted_files = list(temp_dir.rglob("*"))
...@@ -3919,6 +3921,7 @@ class QtVideoPlayer(QObject): ...@@ -3919,6 +3921,7 @@ class QtVideoPlayer(QObject):
session.commit() session.commit()
logger.info(f"DEBUG: ZIP extraction completed for match {match_id}") logger.info(f"DEBUG: ZIP extraction completed for match {match_id}")
return True
finally: finally:
session.close() session.close()
...@@ -3927,6 +3930,7 @@ class QtVideoPlayer(QObject): ...@@ -3927,6 +3930,7 @@ class QtVideoPlayer(QObject):
logger.error(f"DEBUG: Failed to unzip ZIP file for match {match_id}: {e}") logger.error(f"DEBUG: Failed to unzip ZIP file for match {match_id}: {e}")
import traceback import traceback
logger.error(f"DEBUG: Full traceback: {traceback.format_exc()}") logger.error(f"DEBUG: Full traceback: {traceback.format_exc()}")
return False
def _find_match_video_file(self, match_id: int, video_filename: str) -> Optional[Path]: def _find_match_video_file(self, match_id: int, video_filename: str) -> Optional[Path]:
"""Find the match video file from the unzipped ZIP""" """Find the match video file from the unzipped ZIP"""
......
...@@ -214,7 +214,7 @@ ...@@ -214,7 +214,7 @@
<div class="overlay-container"> <div class="overlay-container">
<div class="message-panel" id="messagePanel"> <div class="message-panel" id="messagePanel">
<div class="message-icon" id="messageIcon">📢</div> <div class="message-icon" id="messageIcon">📢</div>
<div class="message-title" id="messageTitle">Mbetter Game</div> <div class="message-title" id="messageTitle">Townships Combat League</div>
<div class="message-content" id="messageContent">Waiting for game to start....</div> <div class="message-content" id="messageContent">Waiting for game to start....</div>
</div> </div>
</div> </div>
...@@ -222,7 +222,7 @@ ...@@ -222,7 +222,7 @@
<script> <script>
// Global variables for overlay data handling // Global variables for overlay data handling
let overlayData = {}; let overlayData = {};
let currentTitle = 'Mbetter system:'; let currentTitle = 'Townships Combat League:';
let currentMessage = 'Waiting for game to start...'; let currentMessage = 'Waiting for game to start...';
let currentIcon = '🥊'; let currentIcon = '🥊';
......
...@@ -1210,7 +1210,6 @@ ...@@ -1210,7 +1210,6 @@
betItem.innerHTML = ` betItem.innerHTML = `
<div class="bet-outcome">${outcome.outcome || 'Unknown'}</div> <div class="bet-outcome">${outcome.outcome || 'Unknown'}</div>
<div class="bet-amount">$${outcome.amount ? outcome.amount.toFixed(2) : '0.00'}</div>
`; `;
betsList.appendChild(betItem); betsList.appendChild(betItem);
......
...@@ -80,6 +80,8 @@ ...@@ -80,6 +80,8 @@
<th><i class="fas fa-target me-1"></i>Outcome</th> <th><i class="fas fa-target me-1"></i>Outcome</th>
<th><i class="fas fa-euro-sign me-1"></i>Amount</th> <th><i class="fas fa-euro-sign me-1"></i>Amount</th>
<th><i class="fas fa-flag me-1"></i>Result</th> <th><i class="fas fa-flag me-1"></i>Result</th>
<th><i class="fas fa-trophy me-1"></i>Winning Outcomes</th>
<th><i class="fas fa-balance-scale me-1"></i>Under/Over</th>
<th><i class="fas fa-cogs me-1"></i>Actions</th> <th><i class="fas fa-cogs me-1"></i>Actions</th>
</tr> </tr>
</thead> </thead>
...@@ -109,6 +111,22 @@ ...@@ -109,6 +111,22 @@
<span class="badge bg-secondary">Cancelled</span> <span class="badge bg-secondary">Cancelled</span>
{% endif %} {% endif %}
</td> </td>
<td>
{% if detail.match.winning_outcomes %}
{% for outcome in detail.match.winning_outcomes %}
<span class="badge bg-success">{{ outcome }}</span>
{% endfor %}
{% else %}
<span class="text-muted">Not available</span>
{% endif %}
</td>
<td>
{% if detail.match.under_over_result %}
<span class="badge bg-info">{{ detail.match.under_over_result }}</span>
{% else %}
<span class="text-muted">Not available</span>
{% endif %}
</td>
<td> <td>
{% if detail.result == 'pending' %} {% if detail.result == 'pending' %}
<button class="btn btn-sm btn-outline-danger btn-delete-detail" <button class="btn btn-sm btn-outline-danger btn-delete-detail"
......
...@@ -80,6 +80,8 @@ ...@@ -80,6 +80,8 @@
<th><i class="fas fa-target me-1"></i>Outcome</th> <th><i class="fas fa-target me-1"></i>Outcome</th>
<th><i class="fas fa-euro-sign me-1"></i>Amount</th> <th><i class="fas fa-euro-sign me-1"></i>Amount</th>
<th><i class="fas fa-flag me-1"></i>Result</th> <th><i class="fas fa-flag me-1"></i>Result</th>
<th><i class="fas fa-trophy me-1"></i>Winning Outcomes</th>
<th><i class="fas fa-balance-scale me-1"></i>Under/Over</th>
<th><i class="fas fa-cogs me-1"></i>Actions</th> <th><i class="fas fa-cogs me-1"></i>Actions</th>
</tr> </tr>
</thead> </thead>
...@@ -109,6 +111,22 @@ ...@@ -109,6 +111,22 @@
<span class="badge bg-secondary">Cancelled</span> <span class="badge bg-secondary">Cancelled</span>
{% endif %} {% endif %}
</td> </td>
<td>
{% if detail.match.winning_outcomes %}
{% for outcome in detail.match.winning_outcomes %}
<span class="badge bg-success">{{ outcome }}</span>
{% endfor %}
{% else %}
<span class="text-muted">Not available</span>
{% endif %}
</td>
<td>
{% if detail.match.under_over_result %}
<span class="badge bg-info">{{ detail.match.under_over_result }}</span>
{% else %}
<span class="text-muted">Not available</span>
{% endif %}
</td>
<td> <td>
{% if detail.result == 'pending' %} {% if detail.result == 'pending' %}
<button class="btn btn-sm btn-outline-danger btn-delete-detail" <button class="btn btn-sm btn-outline-danger btn-delete-detail"
......
...@@ -118,6 +118,8 @@ ...@@ -118,6 +118,8 @@
<th>Start Time</th> <th>Start Time</th>
<th>End Time</th> <th>End Time</th>
<th>Result</th> <th>Result</th>
<th>Winning Outcomes</th>
<th>Under/Over</th>
<th>Outcomes</th> <th>Outcomes</th>
<th>Actions</th> <th>Actions</th>
</tr> </tr>
...@@ -304,6 +306,20 @@ function showError(message) { ...@@ -304,6 +306,20 @@ function showError(message) {
document.getElementById('error-message').style.display = 'block'; document.getElementById('error-message').style.display = 'block';
} }
function formatWinningOutcomes(winningOutcomes) {
// Safely format winning outcomes, handling cases where it's not an array
if (Array.isArray(winningOutcomes) && winningOutcomes.length > 0) {
return winningOutcomes.join(', ');
} else if (winningOutcomes && typeof winningOutcomes === 'string') {
return winningOutcomes;
} else if (winningOutcomes && typeof winningOutcomes === 'object') {
// If it's an object, try to stringify it
return JSON.stringify(winningOutcomes);
} else {
return 'Not available';
}
}
function renderFixtureDetails(fixture, matches) { function renderFixtureDetails(fixture, matches) {
// Basic fixture information // Basic fixture information
document.getElementById('fixture-id').textContent = fixture.fixture_id; document.getElementById('fixture-id').textContent = fixture.fixture_id;
...@@ -394,6 +410,8 @@ function renderMatchesTable(matches) { ...@@ -394,6 +410,8 @@ function renderMatchesTable(matches) {
const startTimeDisplay = match.start_time ? new Date(match.start_time).toLocaleString() : 'Not set'; const startTimeDisplay = match.start_time ? new Date(match.start_time).toLocaleString() : 'Not set';
const endTimeDisplay = match.end_time ? new Date(match.end_time).toLocaleString() : 'Not set'; const endTimeDisplay = match.end_time ? new Date(match.end_time).toLocaleString() : 'Not set';
const resultDisplay = match.result || 'Not available'; const resultDisplay = match.result || 'Not available';
const winningOutcomesDisplay = formatWinningOutcomes(match.winning_outcomes);
const underOverDisplay = match.under_over_result || 'Not available';
const outcomesCount = match.outcome_count || 0; const outcomesCount = match.outcome_count || 0;
row.innerHTML = ` row.innerHTML = `
...@@ -407,6 +425,8 @@ function renderMatchesTable(matches) { ...@@ -407,6 +425,8 @@ function renderMatchesTable(matches) {
<td><small class="text-info">${startTimeDisplay}</small></td> <td><small class="text-info">${startTimeDisplay}</small></td>
<td><small class="text-success">${endTimeDisplay}</small></td> <td><small class="text-success">${endTimeDisplay}</small></td>
<td><small class="text-muted">${resultDisplay}</small></td> <td><small class="text-muted">${resultDisplay}</small></td>
<td><small class="text-primary">${winningOutcomesDisplay}</small></td>
<td><small class="text-warning">${underOverDisplay}</small></td>
<td><span class="badge bg-light text-dark">${outcomesCount} outcomes</span></td> <td><span class="badge bg-light text-dark">${outcomesCount} outcomes</span></td>
<td> <td>
<a href="/matches/${match.id}/${fixtureId}" class="btn btn-sm btn-outline-primary"> <a href="/matches/${match.id}/${fixtureId}" class="btn btn-sm btn-outline-primary">
...@@ -443,6 +463,8 @@ function updateMatchesTable(matches) { ...@@ -443,6 +463,8 @@ function updateMatchesTable(matches) {
const startTimeDisplay = match.start_time ? new Date(match.start_time).toLocaleString() : 'Not set'; const startTimeDisplay = match.start_time ? new Date(match.start_time).toLocaleString() : 'Not set';
const endTimeDisplay = match.end_time ? new Date(match.end_time).toLocaleString() : 'Not set'; const endTimeDisplay = match.end_time ? new Date(match.end_time).toLocaleString() : 'Not set';
const resultDisplay = match.result || 'Not available'; const resultDisplay = match.result || 'Not available';
const winningOutcomesDisplay = formatWinningOutcomes(match.winning_outcomes);
const underOverDisplay = match.under_over_result || 'Not available';
const outcomesCount = match.outcome_count || 0; const outcomesCount = match.outcome_count || 0;
const newRowHTML = ` const newRowHTML = `
...@@ -456,6 +478,8 @@ function updateMatchesTable(matches) { ...@@ -456,6 +478,8 @@ function updateMatchesTable(matches) {
<td><small class="text-info">${startTimeDisplay}</small></td> <td><small class="text-info">${startTimeDisplay}</small></td>
<td><small class="text-success">${endTimeDisplay}</small></td> <td><small class="text-success">${endTimeDisplay}</small></td>
<td><small class="text-muted">${resultDisplay}</small></td> <td><small class="text-muted">${resultDisplay}</small></td>
<td><small class="text-primary">${winningOutcomesDisplay}</small></td>
<td><small class="text-warning">${underOverDisplay}</small></td>
<td><span class="badge bg-light text-dark">${outcomesCount} outcomes</span></td> <td><span class="badge bg-light text-dark">${outcomesCount} outcomes</span></td>
<td> <td>
<a href="/matches/${match.id}/${fixtureId}" class="btn btn-sm btn-outline-primary"> <a href="/matches/${match.id}/${fixtureId}" class="btn btn-sm btn-outline-primary">
......
...@@ -84,6 +84,14 @@ ...@@ -84,6 +84,14 @@
<td><strong>Result:</strong></td> <td><strong>Result:</strong></td>
<td><span id="result" class="text-muted">Not available</span></td> <td><span id="result" class="text-muted">Not available</span></td>
</tr> </tr>
<tr id="winning-outcomes-row" style="display: none;">
<td><strong>Winning Outcomes:</strong></td>
<td><span id="winning-outcomes" class="badge bg-success"></span></td>
</tr>
<tr id="under-over-row" style="display: none;">
<td><strong>Under/Over:</strong></td>
<td><span id="under-over-result" class="badge bg-info"></span></td>
</tr>
<tr> <tr>
<td><strong>Fixture ID:</strong></td> <td><strong>Fixture ID:</strong></td>
<td><small class="text-muted" id="fixture-id"></small></td> <td><small class="text-muted" id="fixture-id"></small></td>
...@@ -283,6 +291,20 @@ function showError(message) { ...@@ -283,6 +291,20 @@ function showError(message) {
document.getElementById('error-message').style.display = 'block'; document.getElementById('error-message').style.display = 'block';
} }
function formatWinningOutcomes(winningOutcomes) {
// Safely format winning outcomes, handling cases where it's not an array
if (Array.isArray(winningOutcomes) && winningOutcomes.length > 0) {
return winningOutcomes.join(', ');
} else if (winningOutcomes && typeof winningOutcomes === 'string') {
return winningOutcomes;
} else if (winningOutcomes && typeof winningOutcomes === 'object') {
// If it's an object, try to stringify it
return JSON.stringify(winningOutcomes);
} else {
return '';
}
}
function updateMatchDetails(match) { function updateMatchDetails(match) {
// Update status badge with highlighting // Update status badge with highlighting
const statusBadge = document.getElementById('match-status-badge'); const statusBadge = document.getElementById('match-status-badge');
...@@ -336,6 +358,42 @@ function updateMatchDetails(match) { ...@@ -336,6 +358,42 @@ function updateMatchDetails(match) {
} }
} }
// Update winning outcomes if changed
const winningOutcomesRow = document.getElementById('winning-outcomes-row');
const winningOutcomesEl = document.getElementById('winning-outcomes');
const currentWinningOutcomes = formatWinningOutcomes(match.winning_outcomes);
if (currentWinningOutcomes !== winningOutcomesEl.textContent) {
if (currentWinningOutcomes) {
winningOutcomesEl.textContent = currentWinningOutcomes;
winningOutcomesRow.style.display = 'table-row';
winningOutcomesEl.style.backgroundColor = '#d1ecf1';
setTimeout(() => {
winningOutcomesEl.style.backgroundColor = '';
}, 1000);
} else {
winningOutcomesRow.style.display = 'none';
}
}
// Update under/over result if changed
const underOverRow = document.getElementById('under-over-row');
const underOverEl = document.getElementById('under-over-result');
const currentUnderOver = match.under_over_result || '';
if (currentUnderOver !== underOverEl.textContent) {
if (currentUnderOver) {
underOverEl.textContent = currentUnderOver;
underOverRow.style.display = 'table-row';
underOverEl.style.backgroundColor = '#d1ecf1';
setTimeout(() => {
underOverEl.style.backgroundColor = '';
}, 1000);
} else {
underOverRow.style.display = 'none';
}
}
// Update upload status // Update upload status
const uploadStatusEl = document.getElementById('upload-status'); const uploadStatusEl = document.getElementById('upload-status');
const newUploadStatus = getUploadStatusBadge(match); const newUploadStatus = getUploadStatusBadge(match);
...@@ -388,6 +446,19 @@ function renderMatchDetails(match) { ...@@ -388,6 +446,19 @@ function renderMatchDetails(match) {
document.getElementById('result').classList.remove('text-muted'); document.getElementById('result').classList.remove('text-muted');
} }
// Winning outcomes
const winningOutcomesText = formatWinningOutcomes(match.winning_outcomes);
if (winningOutcomesText) {
document.getElementById('winning-outcomes').textContent = winningOutcomesText;
document.getElementById('winning-outcomes-row').style.display = 'table-row';
}
// Under/Over result
if (match.under_over_result) {
document.getElementById('under-over-result').textContent = match.under_over_result;
document.getElementById('under-over-row').style.display = 'table-row';
}
// File information // File information
document.getElementById('filename').textContent = match.filename; document.getElementById('filename').textContent = match.filename;
document.getElementById('file-sha1sum').textContent = match.file_sha1sum; document.getElementById('file-sha1sum').textContent = match.file_sha1sum;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment