Implement async API design for /api/analyze

- Modified /api/analyze to return job_id immediately after file upload
- Added /api/result/{job_id} endpoint for checking job status
- Job status includes: queued (with queue position), processing, completed (with result), failed
- Analysis runs in background thread
- Updated API documentation with new async workflow and result endpoint examples
parent b7add29d
......@@ -352,9 +352,82 @@ curl -X POST -H "Authorization: Bearer YOUR_API_TOKEN" \
<div class="response-section">
<h4><i class="fas fa-reply"></i> Response</h4>
<div class="code-block">{
"result": "Analysis result here...",
"job_id": "550e8400-e29b-41d4-a716-446655440000",
"status": "accepted",
"message": "Analysis job queued successfully",
"tokens_used": 10,
"remaining_tokens": 90
}</div>
<p style="margin-top: 1rem; color: #6b7280; font-size: 0.9rem;">
<strong>Note:</strong> Use the returned <code>job_id</code> with <code>/api/result/{job_id}</code> to check job status and get results.
</p>
</div>
</div>
</div>
<div class="endpoint-card" id="result">
<div class="endpoint-header">
<span class="method">GET</span>
<h3>/api/result/{job_id}</h3>
</div>
<div class="endpoint-content">
<p class="endpoint-description">
<i class="fas fa-tasks text-info"></i>
Check the status and get results of an analysis job. Use the job_id returned from /api/analyze.
</p>
<div class="params-section">
<h4><i class="fas fa-cogs"></i> URL Parameters</h4>
<div class="params-list">
<div class="param-item">
<span class="param-name">job_id</span>
<span class="param-type">string (required)</span>
<div class="param-desc">The job ID returned from the analyze endpoint</div>
</div>
</div>
</div>
<div class="curl-section">
<h4><i class="fas fa-terminal"></i> Curl Example</h4>
<div class="code-block">curl -H "Authorization: Bearer YOUR_API_TOKEN" \
{{ request.host_url }}api/result/550e8400-e29b-41d4-a716-446655440000</div>
</div>
<div class="response-section">
<h4><i class="fas fa-reply"></i> Response Examples</h4>
<h5>Queued Job:</h5>
<div class="code-block">{
"job_id": "550e8400-e29b-41d4-a716-446655440000",
"status": "queued",
"queue_position": 3,
"message": "Job is queued. 2 jobs ahead.",
"created_at": 1640995200.0
}</div>
<h5>Processing Job:</h5>
<div class="code-block">{
"job_id": "550e8400-e29b-41d4-a716-446655440000",
"status": "processing",
"message": "Analysis is in progress...",
"created_at": 1640995200.0
}</div>
<h5>Completed Job:</h5>
<div class="code-block">{
"job_id": "550e8400-e29b-41d4-a716-446655440000",
"status": "completed",
"result": "Analysis result here...",
"tokens_used": 10,
"created_at": 1640995200.0
}</div>
<h5>Failed Job:</h5>
<div class="code-block">{
"job_id": "550e8400-e29b-41d4-a716-446655440000",
"status": "failed",
"error": "Analysis failed: file not found",
"created_at": 1640995200.0
}</div>
</div>
</div>
......
......@@ -195,6 +195,37 @@ def api_analyze():
else:
return json.dumps({'error': 'Either file upload or file_path (admin only) is required'}), 400
# Create job ID and store job info
import uuid
job_id = str(uuid.uuid4())
# Store job information (in a real implementation, use a database)
job_info = {
'id': job_id,
'user_id': user['id'],
'status': 'queued',
'model_path': model_path,
'prompt': prompt,
'media_path': media_path,
'interval': interval,
'created_at': time.time(),
'tokens_used': 10 if user.get('role') != 'admin' else 0
}
# Store job globally (in production, use proper storage)
if not hasattr(api_bp, 'jobs'):
api_bp.jobs = {}
api_bp.jobs[job_id] = job_info
# Deduct tokens immediately (skip for admin users)
if user.get('role') != 'admin':
update_user_tokens(user['id'], -10)
# Start analysis in background
import threading
def process_analysis():
try:
job_info['status'] = 'processing'
# Send to backend for processing
data = {
'model_path': model_path,
......@@ -207,18 +238,64 @@ def api_analyze():
result_data = get_result(msg_id)
if 'data' in result_data:
result = result_data['data'].get('result', 'Analysis completed')
# Deduct tokens (skip for admin users)
if user.get('role') != 'admin':
update_user_tokens(user['id'], -10)
job_info['status'] = 'completed'
job_info['result'] = result_data['data'].get('result', 'Analysis completed')
else:
job_info['status'] = 'failed'
job_info['error'] = result_data.get('error', 'Analysis failed')
except Exception as e:
job_info['status'] = 'failed'
job_info['error'] = str(e)
# Start background thread
thread = threading.Thread(target=process_analysis, daemon=True)
thread.start()
return json.dumps({
'result': result,
'tokens_used': 10,
'job_id': job_id,
'status': 'accepted',
'message': 'Analysis job queued successfully',
'tokens_used': job_info['tokens_used'],
'remaining_tokens': get_user_tokens(user['id'])
})
else:
error = result_data.get('error', 'Analysis failed')
return json.dumps({'error': error}), 500
@api_bp.route('/api/result/<job_id>')
@api_auth_required
def api_result(job_id):
"""Get the status/result of an analysis job."""
user = request.api_user
# Check if job exists and belongs to user
if not hasattr(api_bp, 'jobs') or job_id not in api_bp.jobs:
return json.dumps({'error': 'Job not found'}), 404
job = api_bp.jobs[job_id]
if job['user_id'] != user['id'] and user.get('role') != 'admin':
return json.dumps({'error': 'Access denied'}), 403
response = {
'job_id': job_id,
'status': job['status'],
'created_at': job['created_at']
}
if job['status'] == 'queued':
# Count jobs ahead in queue (simplified - in real implementation, query actual queue)
queued_jobs = [j for j in api_bp.jobs.values() if j['status'] == 'queued' and j['created_at'] < job['created_at']]
response['queue_position'] = len(queued_jobs) + 1
response['message'] = f'Job is queued. {len(queued_jobs)} jobs ahead.'
elif job['status'] == 'processing':
response['message'] = 'Analysis is in progress...'
elif job['status'] == 'completed':
response['result'] = job['result']
response['tokens_used'] = job['tokens_used']
elif job['status'] == 'failed':
response['error'] = job.get('error', 'Analysis failed')
return json.dumps(response)
@api_bp.route('/api/api_tokens')
@api_auth_required
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment