Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in
Toggle navigation
V
vidai
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
SexHackMe
vidai
Commits
113d49fd
Commit
113d49fd
authored
Oct 09, 2025
by
Stefy Lanza (nextime / spora )
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Make PROGRESS output messages visible without --debug flag
parent
be188699
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
12 additions
and
24 deletions
+12
-24
worker_analysis.py
vidai/worker_analysis.py
+12
-24
No files found.
vidai/worker_analysis.py
View file @
113d49fd
...
@@ -194,8 +194,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
...
@@ -194,8 +194,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
'message'
:
'Initializing analysis job'
'message'
:
'Initializing analysis job'
})
})
comm
.
send_message
(
progress_msg
)
comm
.
send_message
(
progress_msg
)
if
get_debug
():
print
(
f
"PROGRESS: Job {job_id_int} - 5
% -
Initializing analysis job"
)
print
(
f
"PROGRESS: Job {job_id_int} - 5
% -
Initializing analysis job"
)
torch
.
cuda
.
empty_cache
()
torch
.
cuda
.
empty_cache
()
total_tokens
=
0
total_tokens
=
0
...
@@ -214,8 +213,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
...
@@ -214,8 +213,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
'message'
:
f
'Model {model_path.split("/")[-1]} loaded successfully'
'message'
:
f
'Model {model_path.split("/")[-1]} loaded successfully'
})
})
comm
.
send_message
(
progress_msg
)
comm
.
send_message
(
progress_msg
)
if
get_debug
():
print
(
f
"PROGRESS: Job {job_id_int} - 8
% -
Model loaded successfully"
)
print
(
f
"PROGRESS: Job {job_id_int} - 8
% -
Model loaded successfully"
)
# Get system prompt
# Get system prompt
print
(
f
"DEBUG: Retrieving system prompt for job {job_id_int}"
)
print
(
f
"DEBUG: Retrieving system prompt for job {job_id_int}"
)
...
@@ -241,8 +239,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
...
@@ -241,8 +239,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
'message'
:
f
'Extracted {total_frames} frames'
'message'
:
f
'Extracted {total_frames} frames'
})
})
comm
.
send_message
(
progress_msg
)
comm
.
send_message
(
progress_msg
)
if
get_debug
():
print
(
f
"PROGRESS: Job {job_id_int} - 10
% -
Extracted {total_frames} frames"
)
print
(
f
"PROGRESS: Job {job_id_int} - 10
% -
Extracted {total_frames} frames"
)
descriptions
=
[]
descriptions
=
[]
...
@@ -259,8 +256,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
...
@@ -259,8 +256,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
'message'
:
f
'Processing frame {i+1}/{total_frames} at {ts:.1f}s'
'message'
:
f
'Processing frame {i+1}/{total_frames} at {ts:.1f}s'
})
})
comm
.
send_message
(
progress_msg
)
comm
.
send_message
(
progress_msg
)
if
get_debug
():
print
(
f
"PROGRESS: Job {job_id_int} - {progress_percent}
% -
Processing frame {i+1}/{total_frames}"
)
print
(
f
"PROGRESS: Job {job_id_int} - {progress_percent}
% -
Processing frame {i+1}/{total_frames}"
)
# Check for cancellation
# Check for cancellation
if
job_id_int
and
check_job_cancelled
(
job_id_int
):
if
job_id_int
and
check_job_cancelled
(
job_id_int
):
...
@@ -295,8 +291,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
...
@@ -295,8 +291,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
'message'
:
f
'Completed frame {i+1}/{total_frames} ({progress_percent}
%
)'
'message'
:
f
'Completed frame {i+1}/{total_frames} ({progress_percent}
%
)'
})
})
comm
.
send_message
(
progress_msg
)
comm
.
send_message
(
progress_msg
)
if
get_debug
():
print
(
f
"PROGRESS: Job {job_id_int} - {progress_percent}
% -
Completed frame {i+1}/{total_frames}"
)
print
(
f
"PROGRESS: Job {job_id_int} - {progress_percent}
% -
Completed frame {i+1}/{total_frames}"
)
if
output_dir
:
if
output_dir
:
import
shutil
import
shutil
...
@@ -313,8 +308,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
...
@@ -313,8 +308,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
'message'
:
'Generating video summary'
'message'
:
'Generating video summary'
})
})
comm
.
send_message
(
progress_msg
)
comm
.
send_message
(
progress_msg
)
if
get_debug
():
print
(
f
"PROGRESS: Job {job_id_int} - 85
% -
Generating video summary"
)
print
(
f
"PROGRESS: Job {job_id_int} - 85
% -
Generating video summary"
)
# Check for cancellation before summary
# Check for cancellation before summary
if
job_id_int
and
check_job_cancelled
(
job_id_int
):
if
job_id_int
and
check_job_cancelled
(
job_id_int
):
...
@@ -367,8 +361,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
...
@@ -367,8 +361,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
'message'
:
'Analysis completed'
'message'
:
'Analysis completed'
})
})
comm
.
send_message
(
progress_msg
)
comm
.
send_message
(
progress_msg
)
if
get_debug
():
print
(
f
"PROGRESS: Job {job_id_int} - 100
% -
Analysis completed"
)
print
(
f
"PROGRESS: Job {job_id_int} - 100
% -
Analysis completed"
)
result
=
f
"Frame Descriptions:
\n
"
+
"
\n
"
.
join
(
descriptions
)
+
f
"
\n\n
Summary:
\n
{summary}"
result
=
f
"Frame Descriptions:
\n
"
+
"
\n
"
.
join
(
descriptions
)
+
f
"
\n\n
Summary:
\n
{summary}"
return
result
,
total_tokens
return
result
,
total_tokens
...
@@ -384,8 +377,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
...
@@ -384,8 +377,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
'message'
:
'Starting image analysis'
'message'
:
'Starting image analysis'
})
})
comm
.
send_message
(
progress_msg
)
comm
.
send_message
(
progress_msg
)
if
get_debug
():
print
(
f
"PROGRESS: Job {job_id_int} - 20
% -
Starting image analysis"
)
print
(
f
"PROGRESS: Job {job_id_int} - 20
% -
Starting image analysis"
)
# Check for cancellation before processing image
# Check for cancellation before processing image
if
job_id_int
and
check_job_cancelled
(
job_id_int
):
if
job_id_int
and
check_job_cancelled
(
job_id_int
):
...
@@ -401,8 +393,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
...
@@ -401,8 +393,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
'message'
:
'Processing image with AI model'
'message'
:
'Processing image with AI model'
})
})
comm
.
send_message
(
progress_msg
)
comm
.
send_message
(
progress_msg
)
if
get_debug
():
print
(
f
"PROGRESS: Job {job_id_int} - 50
% -
Processing image with AI model"
)
print
(
f
"PROGRESS: Job {job_id_int} - 50
% -
Processing image with AI model"
)
result
,
tokens
=
analyze_single_image
(
media_path
,
full_prompt
,
model
)
result
,
tokens
=
analyze_single_image
(
media_path
,
full_prompt
,
model
)
total_tokens
+=
tokens
total_tokens
+=
tokens
...
@@ -417,8 +408,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
...
@@ -417,8 +408,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
'message'
:
'Finalizing analysis results'
'message'
:
'Finalizing analysis results'
})
})
comm
.
send_message
(
progress_msg
)
comm
.
send_message
(
progress_msg
)
if
get_debug
():
print
(
f
"PROGRESS: Job {job_id_int} - 90
% -
Finalizing analysis results"
)
print
(
f
"PROGRESS: Job {job_id_int} - 90
% -
Finalizing analysis results"
)
# Send final progress update
# Send final progress update
if
comm
:
if
comm
:
...
@@ -429,8 +419,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
...
@@ -429,8 +419,7 @@ def analyze_media(media_path, prompt, model_path, interval=10, job_id_int=None,
'message'
:
'Image analysis completed successfully'
'message'
:
'Image analysis completed successfully'
})
})
comm
.
send_message
(
progress_msg
)
comm
.
send_message
(
progress_msg
)
if
get_debug
():
print
(
f
"PROGRESS: Job {job_id_int} - 100
% -
Image analysis completed successfully"
)
print
(
f
"PROGRESS: Job {job_id_int} - 100
% -
Image analysis completed successfully"
)
torch
.
cuda
.
empty_cache
()
torch
.
cuda
.
empty_cache
()
return
result
,
total_tokens
return
result
,
total_tokens
...
@@ -468,8 +457,7 @@ def worker_process(backend_type: str):
...
@@ -468,8 +457,7 @@ def worker_process(backend_type: str):
interval
=
data
.
get
(
'interval'
,
10
)
interval
=
data
.
get
(
'interval'
,
10
)
job_id
=
message
.
msg_id
# Use message ID for job identification
job_id
=
message
.
msg_id
# Use message ID for job identification
job_id_int
=
int
(
message
.
msg_id
.
split
(
'_'
)[
1
])
# Extract integer job ID
job_id_int
=
int
(
message
.
msg_id
.
split
(
'_'
)[
1
])
# Extract integer job ID
if
get_debug
():
print
(
f
"PROGRESS: Job {job_id_int} accepted - Starting analysis"
)
print
(
f
"PROGRESS: Job {job_id_int} accepted - Starting analysis"
)
print
(
f
"DEBUG: Starting analysis of {media_path} with model {model_path} for job {job_id}"
)
print
(
f
"DEBUG: Starting analysis of {media_path} with model {model_path} for job {job_id}"
)
result
,
tokens_used
=
analyze_media
(
media_path
,
prompt
,
model_path
,
interval
,
job_id_int
,
comm
)
result
,
tokens_used
=
analyze_media
(
media_path
,
prompt
,
model_path
,
interval
,
job_id_int
,
comm
)
print
(
f
"DEBUG: Analysis completed for job {message.msg_id}, used {tokens_used} tokens"
)
print
(
f
"DEBUG: Analysis completed for job {message.msg_id}, used {tokens_used} tokens"
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment