Fix NameError by adding job_id_int extraction in worker_process

parent 1cb041a7
...@@ -175,7 +175,7 @@ def check_job_cancelled(job_id): ...@@ -175,7 +175,7 @@ def check_job_cancelled(job_id):
def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None, comm=None): def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None, comm=None):
"""Analyze media using dynamic model loading.""" """Analyze media using dynamic model loading."""
print(f"DEBUG: Starting analyze_media for job {job_id}, media_path={media_path}") print(f"DEBUG: Starting analyze_media for job {job_id_int}, media_path={media_path}")
# Send initial progress update # Send initial progress update
if comm: if comm:
...@@ -193,9 +193,9 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None, ...@@ -193,9 +193,9 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
total_tokens = 0 total_tokens = 0
# Get model with reference counting # Get model with reference counting
print(f"DEBUG: Loading model {model_path} for job {job_id}") print(f"DEBUG: Loading model {model_path} for job {job_id_int}")
model = get_or_load_model(model_path) model = get_or_load_model(model_path)
print(f"DEBUG: Model loaded for job {job_id}") print(f"DEBUG: Model loaded for job {job_id_int}")
# Send progress update after model loading # Send progress update after model loading
if comm: if comm:
...@@ -210,17 +210,17 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None, ...@@ -210,17 +210,17 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
print(f"PROGRESS: Job {job_id_int} - 8% - Model loaded successfully") print(f"PROGRESS: Job {job_id_int} - 8% - Model loaded successfully")
# Get system prompt # Get system prompt
print(f"DEBUG: Retrieving system prompt for job {job_id}") print(f"DEBUG: Retrieving system prompt for job {job_id_int}")
try: try:
from .config import get_system_prompt_content from .config import get_system_prompt_content
system_prompt = get_system_prompt_content() system_prompt = get_system_prompt_content()
full_prompt = system_prompt + " " + prompt if system_prompt else prompt full_prompt = system_prompt + " " + prompt if system_prompt else prompt
except: except:
full_prompt = prompt full_prompt = prompt
print(f"DEBUG: Full prompt set for job {job_id}") print(f"DEBUG: Full prompt set for job {job_id_int}")
if is_video(media_path): if is_video(media_path):
print(f"DEBUG: Detected video, extracting frames for job {job_id}") print(f"DEBUG: Detected video, extracting frames for job {job_id_int}")
frames, output_dir = extract_frames(media_path, interval, optimize=True) frames, output_dir = extract_frames(media_path, interval, optimize=True)
total_frames = len(frames) total_frames = len(frames)
...@@ -239,7 +239,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None, ...@@ -239,7 +239,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
descriptions = [] descriptions = []
for i, (frame_path, ts) in enumerate(frames): for i, (frame_path, ts) in enumerate(frames):
print(f"DEBUG: Processing frame {i+1}/{total_frames} at {ts:.2f}s for job {job_id}") print(f"DEBUG: Processing frame {i+1}/{total_frames} at {ts:.2f}s for job {job_id_int}")
# Send progress update before processing # Send progress update before processing
if comm: if comm:
...@@ -255,8 +255,8 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None, ...@@ -255,8 +255,8 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
print(f"PROGRESS: Job {job_id_int} - {progress_percent}% - Processing frame {i+1}/{total_frames}") print(f"PROGRESS: Job {job_id_int} - {progress_percent}% - Processing frame {i+1}/{total_frames}")
# Check for cancellation # Check for cancellation
if job_id and check_job_cancelled(job_id): if job_id_int and check_job_cancelled(job_id_int):
print(f"DEBUG: Job {job_id} cancelled during frame processing") print(f"DEBUG: Job {job_id_int} cancelled during frame processing")
# Clean up and return cancelled message # Clean up and return cancelled message
for fp, _ in frames[i:]: for fp, _ in frames[i:]:
try: try:
...@@ -273,7 +273,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None, ...@@ -273,7 +273,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
desc, tokens = analyze_single_image(frame_path, full_prompt, model) desc, tokens = analyze_single_image(frame_path, full_prompt, model)
total_tokens += tokens total_tokens += tokens
print(f"DEBUG: Frame {i+1} analyzed for job {job_id}") print(f"DEBUG: Frame {i+1} analyzed for job {job_id_int}")
descriptions.append(f"At {ts:.2f}s: {desc}") descriptions.append(f"At {ts:.2f}s: {desc}")
os.unlink(frame_path) os.unlink(frame_path)
...@@ -294,7 +294,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None, ...@@ -294,7 +294,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
import shutil import shutil
shutil.rmtree(output_dir) shutil.rmtree(output_dir)
print(f"DEBUG: All frames processed, generating summary for job {job_id}") print(f"DEBUG: All frames processed, generating summary for job {job_id_int}")
# Send progress update for summary generation # Send progress update for summary generation
if comm: if comm:
...@@ -309,8 +309,8 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None, ...@@ -309,8 +309,8 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
print(f"PROGRESS: Job {job_id_int} - 85% - Generating video summary") print(f"PROGRESS: Job {job_id_int} - 85% - Generating video summary")
# Check for cancellation before summary # Check for cancellation before summary
if job_id and check_job_cancelled(job_id): if job_id_int and check_job_cancelled(job_id_int):
print(f"DEBUG: Job {job_id} cancelled before summary") print(f"DEBUG: Job {job_id_int} cancelled before summary")
return "Job cancelled by user", total_tokens return "Job cancelled by user", total_tokens
# Generate summary # Generate summary
...@@ -334,7 +334,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None, ...@@ -334,7 +334,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
summary_tokens = 0 summary_tokens = 0
total_tokens += summary_tokens total_tokens += summary_tokens
print(f"DEBUG: Summary generated for job {job_id}") print(f"DEBUG: Summary generated for job {job_id_int}")
# Send final progress update # Send final progress update
if comm: if comm:
...@@ -351,7 +351,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None, ...@@ -351,7 +351,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
result = f"Frame Descriptions:\n" + "\n".join(descriptions) + f"\n\nSummary:\n{summary}" result = f"Frame Descriptions:\n" + "\n".join(descriptions) + f"\n\nSummary:\n{summary}"
return result, total_tokens return result, total_tokens
else: else:
print(f"DEBUG: Detected image, analyzing for job {job_id}") print(f"DEBUG: Detected image, analyzing for job {job_id_int}")
# Send progress update for image analysis start # Send progress update for image analysis start
if comm: if comm:
...@@ -366,8 +366,8 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None, ...@@ -366,8 +366,8 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
print(f"PROGRESS: Job {job_id_int} - 20% - Starting image analysis") print(f"PROGRESS: Job {job_id_int} - 20% - Starting image analysis")
# Check for cancellation before processing image # Check for cancellation before processing image
if job_id and check_job_cancelled(job_id): if job_id_int and check_job_cancelled(job_id_int):
print(f"DEBUG: Job {job_id} cancelled before image analysis") print(f"DEBUG: Job {job_id_int} cancelled before image analysis")
return "Job cancelled by user", total_tokens return "Job cancelled by user", total_tokens
# Send progress update before model inference # Send progress update before model inference
...@@ -384,7 +384,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None, ...@@ -384,7 +384,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
result, tokens = analyze_single_image(media_path, full_prompt, model) result, tokens = analyze_single_image(media_path, full_prompt, model)
total_tokens += tokens total_tokens += tokens
print(f"DEBUG: Image analysis completed for job {job_id}") print(f"DEBUG: Image analysis completed for job {job_id_int}")
# Send progress update for completion # Send progress update for completion
if comm: if comm:
...@@ -445,8 +445,9 @@ def worker_process(backend_type: str): ...@@ -445,8 +445,9 @@ def worker_process(backend_type: str):
model_path = data.get('model_path', 'Qwen/Qwen2.5-VL-7B-Instruct') model_path = data.get('model_path', 'Qwen/Qwen2.5-VL-7B-Instruct')
interval = data.get('interval', 10) interval = data.get('interval', 10)
job_id = message.msg_id # Use message ID for job identification job_id = message.msg_id # Use message ID for job identification
job_id_int = int(message.msg_id.split('_')[1]) # Extract integer job ID
if get_debug(): if get_debug():
print(f"PROGRESS: Job {message.msg_id} accepted - Starting analysis") print(f"PROGRESS: Job {job_id_int} accepted - Starting analysis")
print(f"DEBUG: Starting analysis of {media_path} with model {model_path} for job {job_id}") print(f"DEBUG: Starting analysis of {media_path} with model {model_path} for job {job_id}")
result, tokens_used = analyze_media(media_path, prompt, model_path, interval, job_id_int, comm) result, tokens_used = analyze_media(media_path, prompt, model_path, interval, job_id_int, comm)
print(f"DEBUG: Analysis completed for job {message.msg_id}, used {tokens_used} tokens") print(f"DEBUG: Analysis completed for job {message.msg_id}, used {tokens_used} tokens")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment