Match creations and cleanup of old matches now working correctly

parent 4e06f4e7
......@@ -28,14 +28,14 @@ class GamesThread(ThreadedComponent):
self.message_queue = None
def _cleanup_stale_ingame_matches(self):
"""Clean up any stale 'ingame' matches from previous crashed sessions"""
"""Clean up any stale 'ingame' matches from previous crashed sessions and old 'bet' fixtures"""
try:
session = self.db_manager.get_session()
try:
# Get today's date
today = datetime.now().date()
# Find all ingame matches from today that might be stale
# PART 1: Clean up stale 'ingame' matches from today (existing logic)
stale_matches = session.query(MatchModel).filter(
MatchModel.start_time.isnot(None),
MatchModel.start_time >= datetime.combine(today, datetime.min.time()),
......@@ -44,26 +44,63 @@ class GamesThread(ThreadedComponent):
MatchModel.active_status == True
).all()
if not stale_matches:
if stale_matches:
logger.info(f"Found {len(stale_matches)} stale ingame matches - cleaning up")
for match in stale_matches:
logger.info(f"Cleaning up stale match {match.match_number}: {match.fighter1_township} vs {match.fighter2_township}")
match.status = 'pending'
match.active_status = False
session.commit()
logger.info(f"Cleaned up {len(stale_matches)} stale ingame matches")
else:
logger.info("No stale ingame matches found")
return
logger.info(f"Found {len(stale_matches)} stale ingame matches - cleaning up")
# PART 2: Clean up ALL old 'bet' fixtures (new logic)
old_bet_matches = session.query(MatchModel).filter(
MatchModel.status == 'bet',
MatchModel.active_status == True,
# Exclude today's matches to avoid interfering with active games
~MatchModel.start_time.between(
datetime.combine(today, datetime.min.time()),
datetime.combine(today, datetime.max.time())
)
).all()
if old_bet_matches:
logger.info(f"Found {len(old_bet_matches)} old 'bet' matches - cancelling them")
# Change status to pending and set active_status to False
for match in stale_matches:
logger.info(f"Cleaning up stale match {match.match_number}: {match.fighter1_township} vs {match.fighter2_township}")
match.status = 'pending'
match.active_status = False
for match in old_bet_matches:
logger.info(f"Cancelling old bet match {match.match_number}: {match.fighter1_township} vs {match.fighter2_township}")
match.status = 'cancelled'
session.commit()
logger.info(f"Cleaned up {len(stale_matches)} stale ingame matches")
# Cancel/refund associated bets
self._cancel_match_bets(match.id, session)
session.commit()
logger.info(f"Cancelled {len(old_bet_matches)} old bet matches")
else:
logger.info("No old bet matches found to cancel")
finally:
session.close()
except Exception as e:
logger.error(f"Failed to cleanup stale ingame matches: {e}")
logger.error(f"Failed to cleanup stale matches: {e}")
def _cancel_match_bets(self, match_id: int, session):
"""Cancel all pending bets for a match"""
try:
# Update all pending bets for this match to 'cancelled'
cancelled_count = session.query(BetDetailModel).filter(
BetDetailModel.match_id == match_id,
BetDetailModel.result == 'pending'
).update({'result': 'cancelled'})
if cancelled_count > 0:
logger.info(f"Cancelled {cancelled_count} pending bets for match {match_id}")
except Exception as e:
logger.error(f"Failed to cancel bets for match {match_id}: {e}")
def initialize(self) -> bool:
"""Initialize the games thread"""
......@@ -836,6 +873,14 @@ class GamesThread(ThreadedComponent):
logger.info(f"🎬 Dispatching START_INTRO message for fixture {fixture_id}")
self._dispatch_start_intro(fixture_id)
# Broadcast GAME_STARTED message to notify all components that game has started with this fixture
game_started_message = MessageBuilder.game_started(
sender=self.name,
fixture_id=fixture_id
)
self.message_bus.publish(game_started_message, broadcast=True)
logger.info(f"🎯 Broadcast GAME_STARTED message for fixture {fixture_id}")
# Refresh dashboard statuses
self._refresh_dashboard_statuses()
......@@ -1881,11 +1926,11 @@ class GamesThread(ThreadedComponent):
logger.error(f"Failed to send NEXT_MATCH: {e}")
def _select_random_completed_matches(self, count: int, session) -> List[MatchModel]:
"""Select random completed matches from the database"""
"""Select random completed matches from the database (including cancelled and failed)"""
try:
# Get all completed matches (status = 'done')
# Get all completed matches (status = 'done', 'cancelled', or 'failed')
completed_matches = session.query(MatchModel).filter(
MatchModel.status == 'done',
MatchModel.status.in_(['done', 'cancelled', 'failed']),
MatchModel.active_status == True
).all()
......@@ -1907,7 +1952,13 @@ class GamesThread(ThreadedComponent):
"""Create new matches in the fixture by copying from old completed matches"""
try:
now = datetime.utcnow()
match_number = 1
# Find the maximum match_number in the fixture and increment from there
max_match_number = session.query(MatchModel.match_number).filter(
MatchModel.fixture_id == fixture_id
).order_by(MatchModel.match_number.desc()).first()
match_number = (max_match_number[0] + 1) if max_match_number else 1
for old_match in old_matches:
# Create a new match based on the old one
......@@ -1959,6 +2010,7 @@ class GamesThread(ThreadedComponent):
fixture_id = f"recycle_{uuid.uuid4().hex[:8]}"
now = datetime.utcnow()
# For a new fixture, start match_number from 1
match_number = 1
for old_match in old_matches:
# Create a new match based on the old one
......
......@@ -40,7 +40,7 @@ class MatchTimerComponent(ThreadedComponent):
self.message_bus.register_component(self.name)
# Register message handlers
self.message_bus.subscribe(self.name, MessageType.START_GAME, self._handle_start_game)
self.message_bus.subscribe(self.name, MessageType.GAME_STARTED, self._handle_game_started)
self.message_bus.subscribe(self.name, MessageType.SCHEDULE_GAMES, self._handle_schedule_games)
self.message_bus.subscribe(self.name, MessageType.CUSTOM, self._handle_custom_message)
self.message_bus.subscribe(self.name, MessageType.NEXT_MATCH, self._handle_next_match)
......@@ -106,8 +106,8 @@ class MatchTimerComponent(ThreadedComponent):
logger.debug(f"MatchTimer processing message: {message}")
# Handle messages directly since some messages don't trigger subscription handlers
if message.type == MessageType.START_GAME:
self._handle_start_game(message)
if message.type == MessageType.GAME_STARTED:
self._handle_game_started(message)
elif message.type == MessageType.SCHEDULE_GAMES:
self._handle_schedule_games(message)
elif message.type == MessageType.CUSTOM:
......@@ -157,12 +157,12 @@ class MatchTimerComponent(ThreadedComponent):
"elapsed_seconds": int(elapsed)
}
def _handle_start_game(self, message: Message):
"""Handle START_GAME message"""
def _handle_game_started(self, message: Message):
"""Handle GAME_STARTED message"""
try:
fixture_id = message.data.get("fixture_id")
logger.info(f"Received START_GAME message for fixture: {fixture_id}")
logger.info(f"Received GAME_STARTED message for fixture: {fixture_id}")
# Get match interval from configuration
match_interval = self._get_match_interval()
......@@ -171,7 +171,7 @@ class MatchTimerComponent(ThreadedComponent):
self._start_timer(match_interval * 60, fixture_id)
except Exception as e:
logger.error(f"Failed to handle START_GAME message: {e}")
logger.error(f"Failed to handle GAME_STARTED message: {e}")
def _handle_schedule_games(self, message: Message):
"""Handle SCHEDULE_GAMES message"""
......@@ -435,9 +435,8 @@ class MatchTimerComponent(ThreadedComponent):
}
)
# Send to web dashboard for broadcasting to clients
update_message.recipient = "web_dashboard"
self.message_bus.publish(update_message)
# Broadcast to all components including qt_player and web_dashboard
self.message_bus.publish(update_message, broadcast=True)
except Exception as e:
logger.error(f"Failed to send timer update: {e}")
\ No newline at end of file
......@@ -62,6 +62,7 @@ class MessageType(Enum):
# Game messages
START_GAME = "START_GAME"
GAME_STARTED = "GAME_STARTED"
SCHEDULE_GAMES = "SCHEDULE_GAMES"
START_GAME_DELAYED = "START_GAME_DELAYED"
START_INTRO = "START_INTRO"
......@@ -572,6 +573,17 @@ class MessageBuilder:
}
)
@staticmethod
def game_started(sender: str, fixture_id: str) -> Message:
"""Create GAME_STARTED message"""
return Message(
type=MessageType.GAME_STARTED,
sender=sender,
data={
"fixture_id": fixture_id
}
)
@staticmethod
def schedule_games(sender: str, fixture_id: Optional[str] = None) -> Message:
"""Create SCHEDULE_GAMES message"""
......
......@@ -2140,6 +2140,182 @@ class Migration_028_AddFixtureRefreshIntervalConfig(DatabaseMigration):
logger.error(f"Failed to remove fixture refresh interval configuration: {e}")
return False
class Migration_029_ChangeMatchNumberToUniqueWithinFixture(DatabaseMigration):
"""Change match_number from globally unique to unique within fixture"""
def __init__(self):
super().__init__("029", "Change match_number from globally unique to unique within fixture")
def up(self, db_manager) -> bool:
"""Change match_number constraint from global uniqueness to unique within fixture"""
try:
with db_manager.engine.connect() as conn:
# SQLite doesn't support ALTER TABLE DROP CONSTRAINT directly
# We need to recreate the table with the new constraint
# Step 1: Create new table with correct constraint
conn.execute(text("""
CREATE TABLE IF NOT EXISTS matches_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
match_number INTEGER NOT NULL,
fighter1_township VARCHAR(255) NOT NULL,
fighter2_township VARCHAR(255) NOT NULL,
venue_kampala_township VARCHAR(255) NOT NULL,
start_time DATETIME NULL,
end_time DATETIME NULL,
result VARCHAR(255) NULL,
done BOOLEAN DEFAULT FALSE NOT NULL,
running BOOLEAN DEFAULT FALSE NOT NULL,
status VARCHAR(20) DEFAULT 'pending' NOT NULL,
fixture_active_time INTEGER NULL,
filename VARCHAR(1024) NOT NULL,
file_sha1sum VARCHAR(255) NOT NULL,
fixture_id VARCHAR(255) NOT NULL,
active_status BOOLEAN DEFAULT FALSE,
zip_filename VARCHAR(1024) NULL,
zip_sha1sum VARCHAR(255) NULL,
zip_upload_status VARCHAR(20) DEFAULT 'pending',
zip_upload_progress REAL DEFAULT 0.0,
created_by INTEGER NULL REFERENCES users(id) ON DELETE SET NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
UNIQUE(fixture_id, match_number)
)
"""))
# Step 2: Copy data from old table to new table
conn.execute(text("""
INSERT INTO matches_new
SELECT * FROM matches
"""))
# Step 3: Drop old table
conn.execute(text("DROP TABLE matches"))
# Step 4: Rename new table to original name
conn.execute(text("ALTER TABLE matches_new RENAME TO matches"))
# Step 5: Recreate indexes (without the old global unique constraint)
indexes = [
"CREATE INDEX IF NOT EXISTS ix_matches_match_number ON matches(match_number)",
"CREATE INDEX IF NOT EXISTS ix_matches_fixture_id ON matches(fixture_id)",
"CREATE INDEX IF NOT EXISTS ix_matches_active_status ON matches(active_status)",
"CREATE INDEX IF NOT EXISTS ix_matches_file_sha1sum ON matches(file_sha1sum)",
"CREATE INDEX IF NOT EXISTS ix_matches_zip_sha1sum ON matches(zip_sha1sum)",
"CREATE INDEX IF NOT EXISTS ix_matches_zip_upload_status ON matches(zip_upload_status)",
"CREATE INDEX IF NOT EXISTS ix_matches_created_by ON matches(created_by)",
"CREATE INDEX IF NOT EXISTS ix_matches_fixture_active_time ON matches(fixture_active_time)",
"CREATE INDEX IF NOT EXISTS ix_matches_done ON matches(done)",
"CREATE INDEX IF NOT EXISTS ix_matches_running ON matches(running)",
"CREATE INDEX IF NOT EXISTS ix_matches_status ON matches(status)",
"CREATE INDEX IF NOT EXISTS ix_matches_composite ON matches(active_status, zip_upload_status, created_at)",
]
for index_sql in indexes:
conn.execute(text(index_sql))
conn.commit()
logger.info("Changed match_number constraint from globally unique to unique within fixture")
return True
except Exception as e:
logger.error(f"Failed to change match_number constraint: {e}")
return False
def down(self, db_manager) -> bool:
"""Revert match_number constraint back to globally unique"""
try:
with db_manager.engine.connect() as conn:
# Check if there are any duplicate match_numbers within the same fixture
# that would prevent adding back the global unique constraint
result = conn.execute(text("""
SELECT fixture_id, match_number, COUNT(*) as count
FROM matches
GROUP BY fixture_id, match_number
HAVING COUNT(*) > 1
"""))
duplicates = result.fetchall()
if duplicates:
logger.error(f"Cannot revert to global unique constraint - duplicate match_numbers within fixtures found: {[(row[0], row[1]) for row in duplicates]}")
return False
# Recreate table with global unique constraint on match_number
conn.execute(text("""
CREATE TABLE IF NOT EXISTS matches_new (
id INTEGER PRIMARY KEY AUTOINCREMENT,
match_number INTEGER NOT NULL UNIQUE,
fighter1_township VARCHAR(255) NOT NULL,
fighter2_township VARCHAR(255) NOT NULL,
venue_kampala_township VARCHAR(255) NOT NULL,
start_time DATETIME NULL,
end_time DATETIME NULL,
result VARCHAR(255) NULL,
done BOOLEAN DEFAULT FALSE NOT NULL,
running BOOLEAN DEFAULT FALSE NOT NULL,
status VARCHAR(20) DEFAULT 'pending' NOT NULL,
fixture_active_time INTEGER NULL,
filename VARCHAR(1024) NOT NULL,
file_sha1sum VARCHAR(255) NOT NULL,
fixture_id VARCHAR(255) NOT NULL,
active_status BOOLEAN DEFAULT FALSE,
zip_filename VARCHAR(1024) NULL,
zip_sha1sum VARCHAR(255) NULL,
zip_upload_status VARCHAR(20) DEFAULT 'pending',
zip_upload_progress REAL DEFAULT 0.0,
created_by INTEGER NULL REFERENCES users(id) ON DELETE SET NULL,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
)
"""))
# Copy data from old table to new table
conn.execute(text("""
INSERT INTO matches_new
SELECT * FROM matches
"""))
# Drop old table and rename new table
conn.execute(text("DROP TABLE matches"))
conn.execute(text("ALTER TABLE matches_new RENAME TO matches"))
# Recreate indexes
indexes = [
"CREATE INDEX IF NOT EXISTS ix_matches_match_number ON matches(match_number)",
"CREATE INDEX IF NOT EXISTS ix_matches_fixture_id ON matches(fixture_id)",
"CREATE INDEX IF NOT EXISTS ix_matches_active_status ON matches(active_status)",
"CREATE INDEX IF NOT EXISTS ix_matches_file_sha1sum ON matches(file_sha1sum)",
"CREATE INDEX IF NOT EXISTS ix_matches_zip_sha1sum ON matches(zip_sha1sum)",
"CREATE INDEX IF NOT EXISTS ix_matches_zip_upload_status ON matches(zip_upload_status)",
"CREATE INDEX IF NOT EXISTS ix_matches_created_by ON matches(created_by)",
"CREATE INDEX IF NOT EXISTS ix_matches_fixture_active_time ON matches(fixture_active_time)",
"CREATE INDEX IF NOT EXISTS ix_matches_done ON matches(done)",
"CREATE INDEX IF NOT EXISTS ix_matches_running ON matches(running)",
"CREATE INDEX IF NOT EXISTS ix_matches_status ON matches(status)",
"CREATE INDEX IF NOT EXISTS ix_matches_composite ON matches(active_status, zip_upload_status, created_at)",
]
for index_sql in indexes:
conn.execute(text(index_sql))
conn.commit()
logger.info("Reverted match_number constraint back to globally unique")
return True
except Exception as e:
logger.error(f"Failed to revert match_number constraint: {e}")
return False
# Registry of all migrations in order
MIGRATIONS: List[DatabaseMigration] = [
Migration_001_InitialSchema(),
......@@ -2170,6 +2346,7 @@ MIGRATIONS: List[DatabaseMigration] = [
Migration_026_AddExtractionStatsTable(),
Migration_027_AddDefaultIntroTemplatesConfig(),
Migration_028_AddFixtureRefreshIntervalConfig(),
Migration_029_ChangeMatchNumberToUniqueWithinFixture(),
]
......
......@@ -475,11 +475,11 @@ class MatchModel(BaseModel):
Index('ix_matches_created_by', 'created_by'),
Index('ix_matches_fixture_active_time', 'fixture_active_time'),
Index('ix_matches_composite', 'active_status', 'zip_upload_status', 'created_at'),
UniqueConstraint('match_number', name='uq_matches_match_number'),
UniqueConstraint('fixture_id', 'match_number', name='uq_matches_fixture_match'),
)
# Core match data from fixture file
match_number = Column(Integer, nullable=False, unique=True, comment='Match # from fixture file')
match_number = Column(Integer, nullable=False, comment='Match # from fixture file')
fighter1_township = Column(String(255), nullable=False, comment='Fighter1 (Township)')
fighter2_township = Column(String(255), nullable=False, comment='Fighter2 (Township)')
venue_kampala_township = Column(String(255), nullable=False, comment='Venue (Kampala Township)')
......
......@@ -55,11 +55,18 @@ class OverlayWebChannel(QObject):
# Signal to receive console messages from JavaScript
consoleMessage = pyqtSignal(str, str, int, str) # level, message, line, source
def __init__(self, db_manager=None):
def __init__(self, db_manager=None, message_bus=None):
super().__init__()
self.mutex = QMutex()
self.overlay_data = {}
self.db_manager = db_manager
self.message_bus = message_bus
self.timer_state = {"running": False, "remaining_seconds": 0}
# Subscribe to timer updates if message bus is available
if self.message_bus:
self.message_bus.subscribe("qt_player", MessageType.CUSTOM, self._handle_timer_update)
logger.info("OverlayWebChannel initialized")
@pyqtSlot(str)
......@@ -203,6 +210,33 @@ class OverlayWebChannel(QObject):
logger.error(f"Failed to get fixture data: {e}")
return json.dumps([])
def _handle_timer_update(self, message: Message):
"""Handle timer update messages"""
try:
logger.debug(f"OverlayWebChannel received message: {message.type} from {message.sender}")
logger.debug(f"Message data: {message.data}")
if message.data.get("timer_update"):
timer_update = message.data["timer_update"]
with QMutexLocker(self.mutex):
self.timer_state = timer_update
logger.debug(f"Timer state updated: {timer_update}")
else:
logger.debug("Message does not contain timer_update")
except Exception as e:
logger.error(f"Failed to handle timer update: {e}")
@pyqtSlot(result=str)
def getTimerState(self) -> str:
"""Provide current cached timer state to JavaScript via WebChannel"""
try:
with QMutexLocker(self.mutex):
timer_state = self.timer_state.copy()
logger.debug(f"Providing cached timer state to JavaScript: {timer_state}")
return json.dumps(timer_state)
except Exception as e:
logger.error(f"Failed to get timer state: {e}")
return json.dumps({"running": False, "remaining_seconds": 0})
def _get_fixture_data_from_games_thread(self) -> Optional[List[Dict[str, Any]]]:
"""Get fixture data from the games thread"""
try:
......@@ -302,6 +336,7 @@ class OverlayWebChannel(QObject):
class VideoProcessingWorker(QRunnable):
"""Background worker for video processing tasks"""
......@@ -427,7 +462,11 @@ class OverlayWebView(QWebEngineView):
# Setup WebChannel
self.web_channel = QWebChannel()
self.overlay_channel = OverlayWebChannel(db_manager=self.db_manager)
# Get message bus from parent window
message_bus = None
if hasattr(self.parent(), '_message_bus'):
message_bus = self.parent()._message_bus
self.overlay_channel = OverlayWebChannel(db_manager=self.db_manager, message_bus=message_bus)
self.web_channel.registerObject("overlay", self.overlay_channel)
page.setWebChannel(self.web_channel)
......@@ -659,15 +698,16 @@ class OverlayWebView(QWebEngineView):
from PyQt6.QtCore import QTimer
QTimer.singleShot(100, lambda: self._ensure_overlay_visibility_post_load(was_visible))
# If fixtures template was loaded, the template handles its own data fetching via JavaScript
if template_name == "fixtures.html" or template_name == "fixtures":
logger.info("Fixtures template loaded - template handles its own data fetching via JavaScript API calls")
# Send webServerBaseUrl to the fixtures template for API calls
logger.info(f"Sending webServerBaseUrl to fixtures template: {self.web_server_url}")
# If fixtures or match template was loaded, the template handles its own data fetching via WebChannel
if template_name == "fixtures.html" or template_name == "fixtures" or template_name == "match.html" or template_name == "match":
template_type = "fixtures" if ("fixtures" in template_name) else "match"
logger.info(f"{template_type.title()} template loaded - template handles its own data fetching via WebChannel")
# Send webServerBaseUrl to the template for WebChannel setup
logger.info(f"Sending webServerBaseUrl to {template_type} template: {self.web_server_url}")
data_to_send = {'webServerBaseUrl': self.web_server_url}
if self.debug_overlay:
data_to_send['debugMode'] = True
logger.info("Debug mode enabled for fixtures template")
logger.info(f"Debug mode enabled for {template_type} template")
self.update_overlay_data(data_to_send)
# Ensure console override is active after template load
......@@ -2918,6 +2958,10 @@ class QtVideoPlayer(QObject):
if self.debug_player:
logger.info("Calling _handle_play_video_result handler")
self._handle_play_video_result(message)
elif message.type == MessageType.CUSTOM:
if self.debug_player:
logger.info("Calling _handle_custom_message handler")
self._handle_custom_message(message)
else:
if self.debug_player:
logger.warning(f"No handler for message type: {message.type.value}")
......@@ -3945,6 +3989,27 @@ class QtVideoPlayer(QObject):
logger.info("QtPlayer: System status handling failed, trying to play intro directly")
self._check_and_play_intro()
def _handle_custom_message(self, message: Message):
"""Handle custom messages, including timer updates for WebChannel"""
try:
# Forward timer update messages to the OverlayWebChannel
if message.data.get("timer_update"):
logger.debug(f"QtPlayer: Forwarding timer update to OverlayWebChannel")
if hasattr(self, 'window') and self.window and hasattr(self.window, 'window_overlay'):
overlay_view = self.window.window_overlay
if isinstance(overlay_view, OverlayWebView) and hasattr(overlay_view, 'overlay_channel'):
overlay_view.overlay_channel._handle_timer_update(message)
logger.debug("QtPlayer: Timer update forwarded to OverlayWebChannel")
else:
logger.debug("QtPlayer: No OverlayWebView or overlay_channel available for timer update")
else:
logger.debug("QtPlayer: No window or window_overlay available for timer update")
else:
logger.debug(f"QtPlayer: Received custom message without timer_update: {message.data}")
except Exception as e:
logger.error(f"QtPlayer: Failed to handle custom message: {e}")
def _handle_web_dashboard_ready(self, message: Message):
"""Handle web dashboard ready messages to update server URL"""
try:
......
......@@ -105,13 +105,14 @@
border-radius: 20px;
padding: 30px;
max-width: 90%;
max-height: 80%;
overflow-y: auto;
max-height: 85%; /* Increased from 80% to allow more space */
overflow: visible; /* Changed from overflow-y: auto to visible to prevent scrollbar */
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.3);
backdrop-filter: blur(10px);
border: 2px solid rgba(255, 255, 255, 0.1);
opacity: 0;
animation: fadeInScale 1s ease-out forwards;
padding-bottom: 50px; /* Add extra bottom padding to ensure content doesn't touch border */
}
.fixtures-title {
......@@ -315,24 +316,6 @@
}
}
/* Scrollbar styling */
.fixtures-panel::-webkit-scrollbar {
width: 8px;
}
.fixtures-panel::-webkit-scrollbar-track {
background: rgba(255, 255, 255, 0.1);
border-radius: 4px;
}
.fixtures-panel::-webkit-scrollbar-thumb {
background: rgba(255, 255, 255, 0.3);
border-radius: 4px;
}
.fixtures-panel::-webkit-scrollbar-thumb:hover {
background: rgba(255, 255, 255, 0.5);
}
</style>
</head>
<body>
......@@ -770,46 +753,47 @@
];
}
// Find next match and start countdown
function findNextMatchAndStartCountdown() {
if (!fixturesData || fixturesData.length === 0) {
return;
}
// Get timer state and start countdown
async function getTimerStateAndStartCountdown() {
console.log('🔍 DEBUG: getTimerStateAndStartCountdown called');
// Clear any existing countdown
if (countdownInterval) {
clearInterval(countdownInterval);
countdownInterval = null;
}
try {
// Get timer state from WebChannel
const timerStateJson = await window.overlay.getTimerState();
console.log('🔍 DEBUG: Raw timer state JSON:', timerStateJson);
const timerState = JSON.parse(timerStateJson);
const now = new Date();
let nextMatch = null;
let earliestTime = null;
// Find the match with the earliest start time that hasn't started yet
for (const match of fixturesData) {
if (match.start_time) {
const startTime = new Date(match.start_time);
if (startTime > now && (!earliestTime || startTime < earliestTime)) {
earliestTime = startTime;
nextMatch = match;
}
console.log('🔍 DEBUG: Parsed timer state:', timerState);
// Clear any existing countdown
if (countdownInterval) {
clearInterval(countdownInterval);
countdownInterval = null;
}
}
if (nextMatch && earliestTime) {
nextMatchStartTime = earliestTime;
if (timerState.running && timerState.remaining_seconds > 0) {
// Timer is running, show countdown
nextMatchStartTime = new Date(Date.now() + (timerState.remaining_seconds * 1000));
// Show next match info
const nextMatchInfo = document.getElementById('nextMatchInfo');
nextMatchInfo.textContent = `Next: ${nextMatch.fighter1_township || nextMatch.fighter1} vs ${nextMatch.fighter2_township || nextMatch.fighter2}`;
nextMatchInfo.style.display = 'block';
// Show next match info (generic message since we don't know which match)
const nextMatchInfo = document.getElementById('nextMatchInfo');
nextMatchInfo.textContent = `Next match starting in:`;
nextMatchInfo.style.display = 'block';
console.log('🔍 DEBUG: Timer countdown displayed');
// Start countdown
updateCountdown();
countdownInterval = setInterval(updateCountdown, 1000);
} else {
// No upcoming matches, hide countdown
// Start countdown
updateCountdown();
countdownInterval = setInterval(updateCountdown, 1000);
console.log('🔍 DEBUG: Countdown started with timer state');
} else {
// No active timer, hide countdown
document.getElementById('nextMatchInfo').style.display = 'none';
document.getElementById('countdownTimer').style.display = 'none';
console.log('🔍 DEBUG: No active timer, countdown hidden');
}
} catch (error) {
console.log('🔍 DEBUG: Failed to get timer state:', error);
// Fallback: hide countdown
document.getElementById('nextMatchInfo').style.display = 'none';
document.getElementById('countdownTimer').style.display = 'none';
}
......@@ -982,8 +966,8 @@
fixturesContent.style.display = 'block';
debugTime('Fixtures table rendered and displayed');
// Find next match and start countdown
findNextMatchAndStartCountdown();
// Get timer state and start countdown
getTimerStateAndStartCountdown();
debugTime('Countdown initialization completed');
}
......
......@@ -2613,7 +2613,7 @@ def notifications():
def message_handler(message):
"""Handle incoming messages for this client"""
if message.type in [MessageType.START_GAME, MessageType.MATCH_START, MessageType.GAME_STATUS]:
if message.type in [MessageType.START_GAME, MessageType.GAME_STARTED, MessageType.MATCH_START, MessageType.GAME_STATUS]:
notification_data = {
"type": message.type.value,
"data": message.data,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment