Add reports sync last sync query endpoint and client implementation guides

- Add GET /api/reports/last-sync endpoint to query server for last sync information
- Update reports page with sorting by client name and cap balance
- Create comprehensive client-side implementation guide (CLIENT_SYNC_IMPLEMENTATION_GUIDE.md)
- Create minimal client prompt (CLIENT_SYNC_MINIMAL_PROMPT.md)
- Update final implementation documentation (REPORTS_FINAL_IMPLEMENTATION.md)

This allows clients to:
- Query server for last sync information before syncing
- Verify local tracking against server state
- Recover from tracking corruption
- Prevent data loss from missed syncs
parent 9158dbaf
# Client-Side Reports Sync Implementation Guide
## Overview
This guide provides instructions for implementing client-side reports synchronization with the server, including how to query the server for the last sync information to verify and recover from tracking corruption.
## Server Endpoints
### 1. Sync Reports Endpoint
**Endpoint**: `POST /api/reports/sync`
**Authentication**: Bearer token (API token)
**Request Format**:
```json
{
"sync_id": "sync_20260201_214327_abc12345",
"client_id": "client_unique_identifier",
"sync_timestamp": "2026-02-01T21:43:27.249Z",
"date_range": "all",
"start_date": "2026-01-01T00:00:00",
"end_date": "2026-02-01T21:43:27.249Z",
"bets": [...],
"extraction_stats": [...],
"cap_compensation_balance": 5000.0,
"summary": {...},
"is_incremental": true,
"sync_type": "incremental"
}
```
**Response Format**:
```json
{
"success": true,
"synced_count": 25,
"message": "Report data synchronized successfully",
"server_timestamp": "2026-02-01T21:43:27.249Z"
}
```
### 2. Get Last Sync Endpoint (NEW)
**Endpoint**: `GET /api/reports/last-sync?client_id=<client_id>`
**Authentication**: Bearer token (API token)
**Query Parameters**:
- `client_id` (required): The unique client identifier
**Response Format**:
```json
{
"success": true,
"client_id": "client_unique_identifier",
"last_sync_id": "sync_20260201_214327_abc12345",
"last_sync_timestamp": "2026-02-01T21:43:27.249Z",
"last_sync_type": "incremental",
"last_date_range": "all",
"last_start_date": "2026-01-01T00:00:00",
"last_end_date": "2026-02-01T21:43:27.249Z",
"total_syncs": 25,
"last_sync_summary": {
"total_payin": 100000.0,
"total_payout": 95000.0,
"net_profit": 5000.0,
"total_bets": 50,
"total_matches": 10,
"cap_compensation_balance": 5000.0
},
"last_sync_log": {
"operation_type": "new_sync",
"status": "success",
"bets_processed": 50,
"bets_new": 50,
"bets_duplicate": 0,
"stats_processed": 10,
"stats_new": 10,
"stats_updated": 0,
"created_at": "2026-02-01T21:43:27.249Z"
},
"server_timestamp": "2026-02-01T21:43:27.249Z"
}
```
**Response When No Syncs Exist**:
```json
{
"success": true,
"message": "No sync records found for this client",
"client_id": "client_unique_identifier",
"last_sync_id": null,
"last_sync_timestamp": null,
"last_sync_type": null,
"total_syncs": 0,
"server_timestamp": "2026-02-01T21:43:27.249Z"
}
```
## Client-Side Implementation
### Step 1: Initialize Client Tracking
Create a local tracking system to manage sync state:
```python
class ReportsSyncTracking:
def __init__(self, db_session):
self.db = db_session
self.client_id = self.get_client_id()
def get_client_id(self):
"""Get unique client identifier (machine ID or rustdesk_id)"""
# Implement your client ID generation logic
return "client_unique_identifier"
def get_last_sync_info(self):
"""Get last sync information from local tracking"""
sync_record = self.db.query(ReportsSyncTrackingModel)\
.filter_by(entity_type='sync', entity_id='latest')\
.first()
if sync_record:
return {
'last_synced_at': sync_record.last_synced_at,
'sync_id': sync_record.sync_id
}
return None
def update_last_sync(self, sync_id, timestamp):
"""Update last sync information in local tracking"""
sync_record = self.db.query(ReportsSyncTrackingModel)\
.filter_by(entity_type='sync', entity_id='latest')\
.first()
if sync_record:
sync_record.last_synced_at = timestamp
sync_record.sync_id = sync_id
else:
sync_record = ReportsSyncTrackingModel(
entity_type='sync',
entity_id='latest',
last_synced_at=timestamp,
sync_id=sync_id
)
self.db.add(sync_record)
self.db.commit()
def track_bet(self, bet_uuid, updated_at):
"""Track individual bet sync status"""
bet_record = self.db.query(ReportsSyncTrackingModel)\
.filter_by(entity_type='bet', entity_id=bet_uuid)\
.first()
if bet_record:
bet_record.last_synced_at = updated_at
else:
bet_record = ReportsSyncTrackingModel(
entity_type='bet',
entity_id=bet_uuid,
last_synced_at=updated_at
)
self.db.add(bet_record)
self.db.commit()
def track_extraction_stat(self, match_id, updated_at):
"""Track individual extraction stat sync status"""
stat_record = self.db.query(ReportsSyncTrackingModel)\
.filter_by(entity_type='extraction_stat', entity_id=match_id)\
.first()
if stat_record:
stat_record.last_synced_at = updated_at
else:
stat_record = ReportsSyncTrackingModel(
entity_type='extraction_stat',
entity_id=match_id,
last_synced_at=updated_at
)
self.db.add(stat_record)
self.db.commit()
```
### Step 2: Query Server for Last Sync (NEW)
Before performing a sync, query the server to verify your local tracking:
```python
def verify_server_sync_state(api_token, client_id):
"""Query server for last sync information"""
import requests
url = "https://your-server.com/api/reports/last-sync"
headers = {
"Authorization": f"Bearer {api_token}",
"Content-Type": "application/json"
}
params = {"client_id": client_id}
try:
response = requests.get(url, headers=headers, params=params)
response.raise_for_status()
data = response.json()
if data.get('success'):
return data
else:
print(f"Error: {data.get('error')}")
return None
except requests.RequestException as e:
print(f"Request failed: {str(e)}")
return None
```
### Step 3: Compare Local and Server State
Compare your local tracking with server state to detect discrepancies:
```python
def sync_state_verification(tracking, api_token, client_id):
"""Verify local tracking matches server state"""
# Get local tracking
local_info = tracking.get_last_sync_info()
# Get server state
server_info = verify_server_sync_state(api_token, client_id)
if not server_info:
print("No sync records on server - this is a first sync")
return 'first_sync'
if not local_info:
print("No local tracking - need to recover from server")
return 'recover_from_server'
# Compare sync IDs
if local_info['sync_id'] != server_info['last_sync_id']:
print(f"Sync ID mismatch!")
print(f" Local: {local_info['sync_id']}")
print(f" Server: {server_info['last_sync_id']}")
return 'sync_id_mismatch'
# Compare timestamps
local_time = local_info['last_synced_at']
server_time = datetime.fromisoformat(server_info['last_sync_timestamp'])
if abs((local_time - server_time).total_seconds()) > 60:
print(f"Timestamp mismatch!")
print(f" Local: {local_time}")
print(f" Server: {server_time}")
return 'timestamp_mismatch'
print("Local tracking matches server state")
return 'verified'
```
### Step 4: Handle Recovery Scenarios
Implement recovery logic for different scenarios:
```python
def handle_recovery(state, tracking, api_token, client_id):
"""Handle different recovery scenarios"""
if state == 'first_sync':
# First sync - send all data
print("Performing first sync - sending all historical data")
return perform_full_sync(tracking, api_token, client_id)
elif state == 'recover_from_server':
# Local tracking lost - recover from server
print("Recovering from server state")
server_info = verify_server_sync_state(api_token, client_id)
if server_info and server_info['last_sync_id']:
# Update local tracking with server state
server_time = datetime.fromisoformat(server_info['last_sync_timestamp'])
tracking.update_last_sync(
server_info['last_sync_id'],
server_time
)
print(f"Recovered: sync_id={server_info['last_sync_id']}")
# Perform incremental sync from server's last sync
return perform_incremental_sync(tracking, api_token, client_id)
else:
# No server data either - perform full sync
return perform_full_sync(tracking, api_token, client_id)
elif state in ['sync_id_mismatch', 'timestamp_mismatch']:
# Tracking corruption detected
print("Tracking corruption detected - performing full sync")
return perform_full_sync(tracking, api_token, client_id)
else:
# Verified - perform normal incremental sync
return perform_incremental_sync(tracking, api_token, client_id)
```
### Step 5: Perform Sync Operations
Implement full and incremental sync operations:
```python
def perform_full_sync(tracking, api_token, client_id):
"""Perform full sync - send all historical data"""
import requests
from datetime import datetime, timedelta
# Generate unique sync ID
sync_id = f"sync_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:8]}"
# Collect all bets and extraction stats
all_bets = collect_all_bets()
all_stats = collect_all_extraction_stats()
# Build sync payload
payload = {
"sync_id": sync_id,
"client_id": client_id,
"sync_timestamp": datetime.utcnow().isoformat(),
"date_range": "all",
"start_date": (datetime.utcnow() - timedelta(days=365)).isoformat(),
"end_date": datetime.utcnow().isoformat(),
"bets": all_bets,
"extraction_stats": all_stats,
"cap_compensation_balance": get_current_cap_balance(),
"summary": calculate_summary(all_bets, all_stats),
"is_incremental": False,
"sync_type": "full"
}
# Send to server
return send_sync_request(api_token, payload, tracking)
def perform_incremental_sync(tracking, api_token, client_id):
"""Perform incremental sync - send only new/changed data"""
import requests
from datetime import datetime
# Get last sync time from local tracking
last_sync_info = tracking.get_last_sync_info()
last_synced_at = last_sync_info['last_synced_at'] if last_sync_info else None
# Generate unique sync ID
sync_id = f"sync_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:8]}"
# Collect only new/changed bets and stats
new_bets = collect_new_bets(last_synced_at)
new_stats = collect_new_extraction_stats(last_synced_at)
# Build sync payload
payload = {
"sync_id": sync_id,
"client_id": client_id,
"sync_timestamp": datetime.utcnow().isoformat(),
"date_range": "all",
"start_date": last_synced_at.isoformat() if last_synced_at else None,
"end_date": datetime.utcnow().isoformat(),
"bets": new_bets,
"extraction_stats": new_stats,
"cap_compensation_balance": get_current_cap_balance(),
"summary": calculate_summary(new_bets, new_stats),
"is_incremental": True,
"sync_type": "incremental"
}
# Send to server
return send_sync_request(api_token, payload, tracking)
def send_sync_request(api_token, payload, tracking):
"""Send sync request to server and update local tracking"""
import requests
from datetime import datetime
url = "https://your-server.com/api/reports/sync"
headers = {
"Authorization": f"Bearer {api_token}",
"Content-Type": "application/json"
}
try:
response = requests.post(url, headers=headers, json=payload)
response.raise_for_status()
data = response.json()
if data.get('success'):
print(f"Sync successful: {data.get('synced_count')} items synced")
# Update local tracking
sync_timestamp = datetime.fromisoformat(payload['sync_timestamp'])
tracking.update_last_sync(payload['sync_id'], sync_timestamp)
# Track individual bets and stats
for bet in payload['bets']:
tracking.track_bet(bet['uuid'], sync_timestamp)
for stat in payload['extraction_stats']:
tracking.track_extraction_stat(stat['match_id'], sync_timestamp)
return True
else:
print(f"Sync failed: {data.get('error')}")
return False
except requests.RequestException as e:
print(f"Sync request failed: {str(e)}")
return False
```
### Step 6: Implement Periodic Sync
Set up periodic sync with verification:
```python
import time
from datetime import datetime
def periodic_sync(tracking, api_token, client_id, interval_minutes=10):
"""Perform periodic sync with server state verification"""
while True:
try:
print(f"\n[{datetime.utcnow()}] Starting sync cycle...")
# Step 1: Verify server state
state = sync_state_verification(tracking, api_token, client_id)
print(f"Verification state: {state}")
# Step 2: Handle recovery if needed
result = handle_recovery(state, tracking, api_token, client_id)
if result:
print(f"Sync completed successfully")
else:
print(f"Sync failed - will retry in {interval_minutes} minutes")
except Exception as e:
print(f"Sync error: {str(e)}")
# Wait for next sync cycle
print(f"Waiting {interval_minutes} minutes until next sync...")
time.sleep(interval_minutes * 60)
```
## Best Practices
### 1. Always Verify Before Sync
Before every sync, query the server for last sync information:
```python
# Before sync
server_info = verify_server_sync_state(api_token, client_id)
if server_info:
print(f"Server last sync: {server_info['last_sync_id']}")
print(f"Server last sync time: {server_info['last_sync_timestamp']}")
```
### 2. Handle Tracking Corruption
If local tracking is corrupted or lost, recover from server:
```python
# Detect corruption
local_info = tracking.get_last_sync_info()
server_info = verify_server_sync_state(api_token, client_id)
if not local_info and server_info:
# Local tracking lost - recover from server
server_time = datetime.fromisoformat(server_info['last_sync_timestamp'])
tracking.update_last_sync(
server_info['last_sync_id'],
server_time
)
```
### 3. Use Unique Sync IDs
Always generate unique sync IDs for each sync operation:
```python
import uuid
from datetime import datetime
sync_id = f"sync_{datetime.now().strftime('%Y%m%d_%H%M%S')}_{uuid.uuid4().hex[:8]}"
```
### 4. Track Individual Records
Track sync status for individual bets and extraction stats:
```python
# After successful sync
for bet in payload['bets']:
tracking.track_bet(bet['uuid'], sync_timestamp)
for stat in payload['extraction_stats']:
tracking.track_extraction_stat(stat['match_id'], sync_timestamp)
```
### 5. Implement Retry Logic
Implement exponential backoff for failed syncs:
```python
import time
def sync_with_retry(tracking, api_token, client_id, max_retries=5):
"""Sync with exponential backoff retry"""
for attempt in range(max_retries):
try:
result = perform_incremental_sync(tracking, api_token, client_id)
if result:
return True
except Exception as e:
print(f"Sync attempt {attempt + 1} failed: {str(e)}")
if attempt < max_retries - 1:
# Exponential backoff: 60s * 2^attempt
wait_time = 60 * (2 ** attempt)
print(f"Retrying in {wait_time} seconds...")
time.sleep(wait_time)
return False
```
## Error Handling
### Common Errors and Solutions
**Error 401: Authentication required**
- Solution: Ensure API token is valid and included in Authorization header
**Error 403: Access denied**
- Solution: Verify client_id belongs to your API token
**Error 400: Invalid request**
- Solution: Check request format and required fields
**Error 500: Internal server error**
- Solution: Log error details and retry with exponential backoff
## Testing
### Test 1: Query Last Sync
```python
# Test querying server for last sync
server_info = verify_server_sync_state(api_token, client_id)
print(f"Last sync ID: {server_info['last_sync_id']}")
print(f"Last sync time: {server_info['last_sync_timestamp']}")
```
### Test 2: Full Sync
```python
# Test full sync
result = perform_full_sync(tracking, api_token, client_id)
print(f"Full sync result: {result}")
```
### Test 3: Incremental Sync
```python
# Test incremental sync
result = perform_incremental_sync(tracking, api_token, client_id)
print(f"Incremental sync result: {result}")
```
### Test 4: Recovery Scenario
```python
# Simulate tracking corruption
# Delete local tracking records
# Then verify and recover
state = sync_state_verification(tracking, api_token, client_id)
result = handle_recovery(state, tracking, api_token, client_id)
print(f"Recovery result: {result}")
```
## Summary
The new `/api/reports/last-sync` endpoint allows clients to:
1. **Verify Server State**: Query server for last sync information
2. **Detect Tracking Corruption**: Compare local tracking with server state
3. **Recover from Server**: Restore local tracking from server state
4. **Prevent Data Loss**: Ensure no syncs are missed due to tracking issues
By implementing this verification step before each sync, clients can maintain data integrity and recover from tracking corruption automatically.
\ No newline at end of file
# Minimal Prompt: Client-Side Last Sync Query Implementation
## What Changed on Server
Server now has a new endpoint to query last sync information:
**Endpoint**: `GET /api/reports/last-sync?client_id=<client_id>`
**Authentication**: Bearer token (API token)
**Response Format**:
```json
{
"success": true,
"client_id": "client_unique_identifier",
"last_sync_id": "sync_20260201_214327_abc12345",
"last_sync_timestamp": "2026-02-01T21:43:27.249Z",
"last_sync_type": "incremental",
"total_syncs": 25,
"last_sync_summary": {
"total_payin": 100000.0,
"total_payout": 95000.0,
"net_profit": 5000.0,
"total_bets": 50,
"total_matches": 10,
"cap_compensation_balance": 5000.0
},
"server_timestamp": "2026-02-01T21:43:27.249Z"
}
```
## What You Need to Implement
### 1. Add Function to Query Server
```python
def query_server_last_sync(api_token, client_id):
"""Query server for last sync information"""
import requests
url = "https://your-server.com/api/reports/last-sync"
headers = {"Authorization": f"Bearer {api_token}"}
params = {"client_id": client_id}
response = requests.get(url, headers=headers, params=params)
return response.json()
```
### 2. Call Before Each Sync
```python
# Before performing sync
server_info = query_server_last_sync(api_token, client_id)
if server_info.get('success'):
last_sync_id = server_info.get('last_sync_id')
last_sync_time = server_info.get('last_sync_timestamp')
# Compare with your local tracking
# If mismatch detected, perform full sync instead of incremental
```
### 3. Handle Recovery
If your local tracking is corrupted or lost:
```python
# If no local tracking exists
if not local_tracking_exists():
# Query server for last sync
server_info = query_server_last_sync(api_token, client_id)
# Recover local tracking from server state
if server_info.get('last_sync_id'):
update_local_tracking(
sync_id=server_info['last_sync_id'],
timestamp=server_info['last_sync_timestamp']
)
```
## Key Benefits
1. **Verify Server State**: Check what server has before syncing
2. **Detect Corruption**: Compare local tracking with server
3. **Auto-Recovery**: Restore local tracking from server if lost
4. **Prevent Data Loss**: Ensure no syncs are missed
## Integration Point
Add this call to your existing sync flow:
```python
# Existing sync flow
def perform_sync():
# NEW: Query server first
server_info = query_server_last_sync(api_token, client_id)
# Verify and recover if needed
if needs_recovery(server_info):
recover_from_server(server_info)
# Continue with normal sync
send_sync_data()
```
That's it! Just add the query call before your existing sync logic.
\ No newline at end of file
......@@ -1170,12 +1170,15 @@ def api_reports_sync():
'details': f'Invalid bet_datetime format: {bet_data.get("bet_datetime", "unknown")}'
}), 400
# Create bet record
# Create bet record with match_id and match_number from first detail
first_detail = bet_data.get('details', [{}])[0] if bet_data.get('details') else {}
bet = Bet(
uuid=bet_data['uuid'],
sync_id=report_sync.id,
client_id=data['client_id'],
fixture_id=bet_data['fixture_id'],
match_id=first_detail.get('match_id'),
match_number=first_detail.get('match_number'),
bet_datetime=bet_datetime,
paid=bet_data.get('paid', False),
paid_out=bet_data.get('paid_out', False),
......@@ -1230,6 +1233,19 @@ def api_reports_sync():
existing_stats.over_bets = stats_data.get('over_bets', 0)
existing_stats.over_amount = stats_data.get('over_amount', 0.00)
existing_stats.result_breakdown = stats_data.get('result_breakdown')
# Update match_number from bet details
from sqlalchemy import func
bet_details_for_match = db.session.query(
BetDetail.match_number
).join(Bet).filter(
Bet.client_id == data['client_id'],
BetDetail.match_id == stats_data['match_id']
).first()
if bet_details_for_match:
existing_stats.match_number = bet_details_for_match.match_number
stats_count += 1
stats_updated += 1
else:
......@@ -1243,10 +1259,24 @@ def api_reports_sync():
'details': f'Invalid match_datetime format: {stats_data.get("match_datetime", "unknown")}'
}), 400
# Get match_number from bet details for this match
match_number = 0
from sqlalchemy import func
bet_details_for_match = db.session.query(
BetDetail.match_number
).join(Bet).filter(
Bet.client_id == data['client_id'],
BetDetail.match_id == stats_data['match_id']
).first()
if bet_details_for_match:
match_number = bet_details_for_match.match_number
extraction_stats = ExtractionStats(
sync_id=report_sync.id,
client_id=data['client_id'],
match_id=stats_data['match_id'],
match_number=match_number,
fixture_id=stats_data['fixture_id'],
match_datetime=match_datetime,
total_bets=stats_data['total_bets'],
......@@ -1278,24 +1308,29 @@ def api_reports_sync():
winning_bets = 0
losing_bets = 0
pending_bets = 0
match_number = 0
# Query all bet details for this match
# Query all bet details for this match to get match_number
from sqlalchemy import func
bet_details_query = db.session.query(
BetDetail.result,
BetDetail.match_number,
func.count(BetDetail.id).label('count')
).join(Bet).filter(
Bet.client_id == data['client_id'],
BetDetail.match_id == match_id
).group_by(BetDetail.result)
).group_by(BetDetail.result, BetDetail.match_number)
for result, count in bet_details_query.all():
for result, match_num, count in bet_details_query.all():
if result == 'won':
winning_bets = count
elif result == 'lost':
losing_bets = count
elif result == 'pending':
pending_bets = count
# Get match_number from first bet detail
if match_num and match_number == 0:
match_number = match_num
# Calculate balance (payin - payout)
total_payin = stats_data['total_amount_collected']
......@@ -1308,7 +1343,7 @@ def api_reports_sync():
client_id=data['client_id'],
client_token_name=client_token_name,
match_id=match_id,
match_number=stats_data.get('match_number', 0),
match_number=match_number,
fixture_id=stats_data['fixture_id'],
match_datetime=datetime.fromisoformat(stats_data['match_datetime']),
total_bets=stats_data['total_bets'],
......@@ -1381,4 +1416,133 @@ def api_reports_sync():
'success': False,
'error': 'Internal server error',
'details': 'An unexpected error occurred while processing sync'
}), 500
@bp.route('/reports/last-sync', methods=['GET'])
@csrf.exempt
def api_get_last_sync():
"""Get last sync information for a client - allows client to verify server state"""
try:
from app.models import ReportSync, ReportSyncLog, APIToken, ClientActivity
from app.auth.jwt_utils import validate_api_token, extract_token_from_request
from sqlalchemy import desc
# Authenticate using API token
token = extract_token_from_request()
if not token:
return jsonify({
'success': False,
'error': 'Authentication required',
'details': 'API token required'
}), 401
user, api_token = validate_api_token(token)
if not user or not user.is_active:
return jsonify({
'success': False,
'error': 'Authentication failed',
'details': 'Invalid or expired API token'
}), 401
# Get client_id from query parameter
client_id = request.args.get('client_id')
if not client_id:
return jsonify({
'success': False,
'error': 'Invalid request',
'details': 'client_id parameter is required'
}), 400
# Verify user has access to this client
if not user.is_admin:
# Check if this client belongs to user's API tokens
user_token_ids = [t.id for t in APIToken.query.filter_by(user_id=user.id).all()]
if user_token_ids:
client_ids = [c.rustdesk_id for c in ClientActivity.query.filter(
ClientActivity.api_token_id.in_(user_token_ids)
).all()]
if client_id not in client_ids:
return jsonify({
'success': False,
'error': 'Access denied',
'details': 'You do not have access to this client'
}), 403
else:
return jsonify({
'success': False,
'error': 'Access denied',
'details': 'You do not have access to this client'
}), 403
# Get most recent sync for this client
last_sync = ReportSync.query.filter_by(client_id=client_id)\
.order_by(desc(ReportSync.sync_timestamp))\
.first()
if not last_sync:
return jsonify({
'success': True,
'message': 'No sync records found for this client',
'client_id': client_id,
'last_sync_id': None,
'last_sync_timestamp': None,
'last_sync_type': None,
'total_syncs': 0,
'server_timestamp': datetime.utcnow().isoformat()
}), 200
# Get total sync count for this client
total_syncs = ReportSync.query.filter_by(client_id=client_id).count()
# Get most recent sync log for additional details
last_sync_log = ReportSyncLog.query.filter_by(client_id=client_id)\
.order_by(desc(ReportSyncLog.created_at))\
.first()
# Build response
response_data = {
'success': True,
'client_id': client_id,
'last_sync_id': last_sync.sync_id,
'last_sync_timestamp': last_sync.sync_timestamp.isoformat() if last_sync.sync_timestamp else None,
'last_sync_type': last_sync.sync_type if hasattr(last_sync, 'sync_type') else 'unknown',
'last_date_range': last_sync.date_range,
'last_start_date': last_sync.start_date.isoformat() if last_sync.start_date else None,
'last_end_date': last_sync.end_date.isoformat() if last_sync.end_date else None,
'total_syncs': total_syncs,
'last_sync_summary': {
'total_payin': float(last_sync.total_payin) if last_sync.total_payin else 0.0,
'total_payout': float(last_sync.total_payout) if last_sync.total_payout else 0.0,
'net_profit': float(last_sync.net_profit) if last_sync.net_profit else 0.0,
'total_bets': last_sync.total_bets,
'total_matches': last_sync.total_matches,
'cap_compensation_balance': float(last_sync.cap_compensation_balance) if last_sync.cap_compensation_balance else 0.0
},
'server_timestamp': datetime.utcnow().isoformat()
}
# Add sync log details if available
if last_sync_log:
response_data['last_sync_log'] = {
'operation_type': last_sync_log.operation_type,
'status': last_sync_log.status,
'bets_processed': last_sync_log.bets_processed,
'bets_new': last_sync_log.bets_new,
'bets_duplicate': last_sync_log.bets_duplicate,
'stats_processed': last_sync_log.stats_processed,
'stats_new': last_sync_log.stats_new,
'stats_updated': last_sync_log.stats_updated,
'created_at': last_sync_log.created_at.isoformat() if last_sync_log.created_at else None
}
logger.info(f"Last sync query for client {client_id} by user {user.username}: last_sync_id={last_sync.sync_id}")
return jsonify(response_data), 200
except Exception as e:
logger.error(f"API get last sync error: {str(e)}")
return jsonify({
'success': False,
'error': 'Internal server error',
'details': 'Failed to retrieve last sync information'
}), 500
\ No newline at end of file
......@@ -830,6 +830,193 @@ class Migration_011_AddCapCompensationBalance(Migration):
def can_rollback(self) -> bool:
return True
class Migration_012_AddMatchNumberToBetsAndStats(Migration):
"""Add match_id and match_number to bets table, match_number to extraction_stats table"""
def __init__(self):
super().__init__("012", "Add match_id and match_number to bets table, match_number to extraction_stats table")
def up(self):
"""Add match_id and match_number columns to bets and extraction_stats tables"""
try:
inspector = inspect(db.engine)
# Check if bets table exists
if 'bets' not in inspector.get_table_names():
logger.info("bets table does not exist yet, skipping migration")
return True
# Add match_id and match_number to bets table
bets_columns = [col['name'] for col in inspector.get_columns('bets')]
if 'match_id' not in bets_columns:
logger.info("Adding match_id column to bets table...")
alter_table_sql = '''
ALTER TABLE bets
ADD COLUMN match_id INT NOT NULL DEFAULT 0
'''
with db.engine.connect() as conn:
conn.execute(text(alter_table_sql))
conn.commit()
logger.info("match_id column added to bets table")
else:
logger.info("match_id column already exists in bets table")
if 'match_number' not in bets_columns:
logger.info("Adding match_number column to bets table...")
alter_table_sql = '''
ALTER TABLE bets
ADD COLUMN match_number INT NOT NULL DEFAULT 0
'''
with db.engine.connect() as conn:
conn.execute(text(alter_table_sql))
conn.commit()
logger.info("match_number column added to bets table")
else:
logger.info("match_number column already exists in bets table")
# Create index on bets.match_id for better query performance
try:
with db.engine.connect() as conn:
conn.execute(text("CREATE INDEX IF NOT EXISTS idx_bets_match_id ON bets(match_id)"))
conn.commit()
logger.info("Index idx_bets_match_id created")
except Exception as e:
logger.warning(f"Index idx_bets_match_id already exists or error: {e}")
# Check if extraction_stats table exists
if 'extraction_stats' not in inspector.get_table_names():
logger.info("extraction_stats table does not exist yet, skipping migration")
return True
# Add match_number to extraction_stats table
extraction_stats_columns = [col['name'] for col in inspector.get_columns('extraction_stats')]
if 'match_number' not in extraction_stats_columns:
logger.info("Adding match_number column to extraction_stats table...")
alter_table_sql = '''
ALTER TABLE extraction_stats
ADD COLUMN match_number INT NOT NULL DEFAULT 0
'''
with db.engine.connect() as conn:
conn.execute(text(alter_table_sql))
conn.commit()
logger.info("match_number column added to extraction_stats table")
else:
logger.info("match_number column already exists in extraction_stats table")
logger.info("Migration 012 completed successfully")
return True
except Exception as e:
logger.error(f"Migration 012 failed: {str(e)}")
raise
def down(self):
"""Drop match_id and match_number columns from bets and extraction_stats tables"""
try:
with db.engine.connect() as conn:
# Drop columns from bets table
conn.execute(text("DROP INDEX IF EXISTS idx_bets_match_id"))
conn.execute(text("ALTER TABLE bets DROP COLUMN IF EXISTS match_id"))
conn.execute(text("ALTER TABLE bets DROP COLUMN IF EXISTS match_number"))
# Drop column from extraction_stats table
conn.execute(text("ALTER TABLE extraction_stats DROP COLUMN IF EXISTS match_number"))
conn.commit()
logger.info("Dropped match_id and match_number columns from bets and extraction_stats tables")
return True
except Exception as e:
logger.error(f"Rollback of migration 012 failed: {str(e)}")
raise
def can_rollback(self) -> bool:
return True
class Migration_013_CreateMatchReportsTable(Migration):
"""Create match_reports table for comprehensive match-level reporting"""
def __init__(self):
super().__init__("013", "Create match_reports table for comprehensive match-level reporting")
def up(self):
"""Create match_reports table"""
try:
inspector = inspect(db.engine)
# Check if table already exists
if 'match_reports' in inspector.get_table_names():
logger.info("match_reports table already exists, skipping creation")
return True
# Create the table using raw SQL to ensure compatibility
create_table_sql = '''
CREATE TABLE match_reports (
id INT AUTO_INCREMENT PRIMARY KEY,
sync_id INT NOT NULL,
client_id VARCHAR(255) NOT NULL,
client_token_name VARCHAR(255) NOT NULL,
match_id INT NOT NULL,
match_number INT NOT NULL,
fixture_id VARCHAR(255) NOT NULL,
match_datetime DATETIME NOT NULL,
total_bets INT NOT NULL DEFAULT 0,
winning_bets INT NOT NULL DEFAULT 0,
losing_bets INT NOT NULL DEFAULT 0,
pending_bets INT NOT NULL DEFAULT 0,
total_payin DECIMAL(15,2) NOT NULL DEFAULT 0.00,
total_payout DECIMAL(15,2) NOT NULL DEFAULT 0.00,
balance DECIMAL(15,2) NOT NULL DEFAULT 0.00,
actual_result VARCHAR(50) NOT NULL,
extraction_result VARCHAR(50) NOT NULL,
cap_applied BOOLEAN NOT NULL DEFAULT FALSE,
cap_percentage DECIMAL(5,2),
cap_compensation_balance DECIMAL(15,2) NOT NULL DEFAULT 0.00,
under_bets INT NOT NULL DEFAULT 0,
under_amount DECIMAL(15,2) NOT NULL DEFAULT 0.00,
over_bets INT NOT NULL DEFAULT 0,
over_amount DECIMAL(15,2) NOT NULL DEFAULT 0.00,
result_breakdown JSON,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
INDEX idx_match_reports_sync_id (sync_id),
INDEX idx_match_reports_client_id (client_id),
INDEX idx_match_reports_client_token_name (client_token_name),
INDEX idx_match_reports_match_id (match_id),
INDEX idx_match_reports_fixture_id (fixture_id),
INDEX idx_match_reports_match_datetime (match_datetime),
INDEX idx_match_reports_actual_result (actual_result),
FOREIGN KEY (sync_id) REFERENCES report_syncs(id) ON DELETE CASCADE
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
'''
with db.engine.connect() as conn:
conn.execute(text(create_table_sql))
conn.commit()
logger.info("Created match_reports table successfully")
return True
except Exception as e:
logger.error(f"Migration 013 failed: {str(e)}")
raise
def down(self):
"""Drop match_reports table"""
try:
with db.engine.connect() as conn:
conn.execute(text("DROP TABLE IF EXISTS match_reports"))
conn.commit()
logger.info("Dropped match_reports table")
return True
except Exception as e:
logger.error(f"Rollback of migration 013 failed: {str(e)}")
raise
def can_rollback(self) -> bool:
return True
class MigrationManager:
"""Manages database migrations and versioning"""
......@@ -846,6 +1033,8 @@ class MigrationManager:
Migration_009_CreateClientActivityTable(),
Migration_010_CreateReportsTables(),
Migration_011_AddCapCompensationBalance(),
Migration_012_AddMatchNumberToBetsAndStats(),
Migration_013_CreateMatchReportsTable(),
]
def ensure_version_table(self):
......
"""
Migration script to add match_id and match_number fields to bets and extraction_stats tables
"""
def upgrade():
"""Add match_id and match_number fields to bets table, match_number to extraction_stats table"""
from app import create_app, db
from app.models import Bet, ExtractionStats
app = create_app()
with app.app_context():
# Check if match_id column exists in bets table
inspector = db.inspect(db.engine)
bets_columns = [col['name'] for col in inspector.get_columns('bets')]
if 'match_id' not in bets_columns:
print("Adding match_id column to bets table...")
db.engine.execute(db.DDL(
"ALTER TABLE bets ADD COLUMN match_id INTEGER NOT NULL DEFAULT 0"
))
print("match_id column added to bets table")
else:
print("match_id column already exists in bets table")
if 'match_number' not in bets_columns:
print("Adding match_number column to bets table...")
db.engine.execute(db.DDL(
"ALTER TABLE bets ADD COLUMN match_number INTEGER NOT NULL DEFAULT 0"
))
print("match_number column added to bets table")
else:
print("match_number column already exists in bets table")
# Check if match_number column exists in extraction_stats table
extraction_stats_columns = [col['name'] for col in inspector.get_columns('extraction_stats')]
if 'match_number' not in extraction_stats_columns:
print("Adding match_number column to extraction_stats table...")
db.engine.execute(db.DDL(
"ALTER TABLE extraction_stats ADD COLUMN match_number INTEGER NOT NULL DEFAULT 0"
))
print("match_number column added to extraction_stats table")
else:
print("match_number column already exists in extraction_stats table")
# Create indexes for better query performance
print("Creating indexes...")
# Index on bets.match_id
try:
db.engine.execute(db.DDL(
"CREATE INDEX IF NOT EXISTS idx_bets_match_id ON bets(match_id)"
))
print("Index idx_bets_match_id created")
except Exception as e:
print(f"Index idx_bets_match_id already exists or error: {e}")
print("\nMigration completed successfully!")
def downgrade():
"""Remove match_id and match_number fields from bets and extraction_stats tables"""
from app import create_app, db
app = create_app()
with app.app_context():
print("Removing match_id and match_number columns from bets table...")
db.engine.execute(db.DDL("ALTER TABLE bets DROP COLUMN match_id"))
db.engine.execute(db.DDL("ALTER TABLE bets DROP COLUMN match_number"))
print("Removing match_number column from extraction_stats table...")
db.engine.execute(db.DDL("ALTER TABLE extraction_stats DROP COLUMN match_number"))
print("\nDowngrade completed successfully!")
if __name__ == '__main__':
import sys
if len(sys.argv) > 1 and sys.argv[1] == 'downgrade':
downgrade()
else:
upgrade()
\ No newline at end of file
......@@ -1742,6 +1742,10 @@ def reports():
clients_list.sort(key=lambda x: x['winning_bets'], reverse=(sort_order == 'desc'))
elif sort_by == 'losing_bets':
clients_list.sort(key=lambda x: x['losing_bets'], reverse=(sort_order == 'desc'))
elif sort_by == 'token_name':
clients_list.sort(key=lambda x: x['token_name'].lower(), reverse=(sort_order == 'desc'))
elif sort_by == 'cap_balance':
clients_list.sort(key=lambda x: x['cap_balance'], reverse=(sort_order == 'desc'))
else:
clients_list.sort(key=lambda x: x['last_match_timestamp'], reverse=(sort_order == 'desc'))
......
......@@ -898,6 +898,8 @@ class Bet(db.Model):
sync_id = db.Column(db.Integer, db.ForeignKey('report_syncs.id'), nullable=False, index=True)
client_id = db.Column(db.String(255), nullable=False, index=True)
fixture_id = db.Column(db.String(255), nullable=False, index=True)
match_id = db.Column(db.Integer, nullable=False, index=True)
match_number = db.Column(db.Integer, nullable=False)
bet_datetime = db.Column(db.DateTime, nullable=False, index=True)
paid = db.Column(db.Boolean, default=False)
paid_out = db.Column(db.Boolean, default=False)
......@@ -918,6 +920,8 @@ class Bet(db.Model):
'sync_id': self.sync_id,
'client_id': self.client_id,
'fixture_id': self.fixture_id,
'match_id': self.match_id,
'match_number': self.match_number,
'bet_datetime': self.bet_datetime.isoformat() if self.bet_datetime else None,
'paid': self.paid,
'paid_out': self.paid_out,
......@@ -970,6 +974,7 @@ class ExtractionStats(db.Model):
sync_id = db.Column(db.Integer, db.ForeignKey('report_syncs.id'), nullable=False, index=True)
client_id = db.Column(db.String(255), nullable=False, index=True)
match_id = db.Column(db.Integer, nullable=False, index=True)
match_number = db.Column(db.Integer, nullable=False)
fixture_id = db.Column(db.String(255), nullable=False, index=True)
match_datetime = db.Column(db.DateTime, nullable=False)
total_bets = db.Column(db.Integer, nullable=False)
......@@ -995,6 +1000,7 @@ class ExtractionStats(db.Model):
'sync_id': self.sync_id,
'client_id': self.client_id,
'match_id': self.match_id,
'match_number': self.match_number,
'fixture_id': self.fixture_id,
'match_datetime': self.match_datetime.isoformat() if self.match_datetime else None,
'total_bets': self.total_bets,
......
......@@ -104,11 +104,13 @@
<label for="sort_by" class="form-label">Sort By</label>
<select class="form-select" id="sort_by" name="sort_by">
<option value="last_match_timestamp" {% if filters.sort_by == 'last_match_timestamp' %}selected{% endif %}>Last Match</option>
<option value="token_name" {% if filters.sort_by == 'token_name' %}selected{% endif %}>Client Name</option>
<option value="total_payin" {% if filters.sort_by == 'total_payin' %}selected{% endif %}>Total Payin</option>
<option value="total_payout" {% if filters.sort_by == 'total_payout' %}selected{% endif %}>Total Payout</option>
<option value="net_profit" {% if filters.sort_by == 'net_profit' %}selected{% endif %}>Net Profit</option>
<option value="total_bets" {% if filters.sort_by == 'total_bets' %}selected{% endif %}>Total Bets</option>
<option value="total_matches" {% if filters.sort_by == 'total_matches' %}selected{% endif %}>Total Matches</option>
<option value="cap_balance" {% if filters.sort_by == 'cap_balance' %}selected{% endif %}>CAP Balance</option>
</select>
</div>
<div class="col-12">
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment