Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in
Toggle navigation
V
vidai
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
SexHackMe
vidai
Commits
2f2c962e
Commit
2f2c962e
authored
Oct 08, 2025
by
Stefy Lanza (nextime / spora )
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Fix import error for get_result and modify queue to keep jobs queued when no workers available
parent
55528540
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
57 additions
and
1 deletion
+57
-1
backend.py
vidai/backend.py
+20
-0
queue.py
vidai/queue.py
+37
-1
No files found.
vidai/backend.py
View file @
2f2c962e
...
...
@@ -29,6 +29,26 @@ worker_sockets = {} # type: dict
pending_results
=
{}
# msg_id -> result message
def
get_result
(
msg_id
:
str
)
->
dict
:
"""Poll for result from backend via socket."""
import
uuid
from
.
import
comm
try
:
# Send get_result request
result_msg
=
Message
(
'get_result'
,
str
(
uuid
.
uuid4
()),
{
'request_id'
:
msg_id
})
comm
.
send_message
(
result_msg
)
# Wait for response
response
=
comm
.
receive_message
(
timeout
=
5
)
if
response
:
return
response
.
data
else
:
return
{
'error'
:
'Timeout waiting for result'
}
except
Exception
as
e
:
return
{
'error'
:
str
(
e
)}
def
handle_web_message
(
message
:
Message
)
->
Message
:
"""Handle messages from web interface."""
if
message
.
msg_type
==
'analyze_request'
:
...
...
vidai/queue.py
View file @
2f2c962e
...
...
@@ -88,6 +88,41 @@ class QueueManager:
return
True
def
_can_start_job
(
self
,
job
:
Dict
[
str
,
Any
])
->
bool
:
"""Check if a job can be started (worker available)."""
from
.cluster_master
import
cluster_master
from
.config
import
get_analysis_backend
,
get_training_backend
from
.backend
import
worker_sockets
request_type
=
job
[
'request_type'
]
if
request_type
==
'analyze'
:
process_type
=
'analysis'
elif
request_type
==
'train'
:
process_type
=
'training'
else
:
process_type
=
request_type
model_path
=
job
[
'data'
]
.
get
(
'model_path'
,
'Qwen/Qwen2.5-VL-7B-Instruct'
)
# Check for distributed worker
worker_key
=
cluster_master
.
select_worker_for_job
(
process_type
,
model_path
,
job
[
'data'
])
if
worker_key
:
return
True
# Check for local worker
if
request_type
==
'analyze'
:
backend
=
get_analysis_backend
()
worker_key_local
=
f
'analysis_{backend}'
elif
request_type
==
'train'
:
backend
=
get_training_backend
()
worker_key_local
=
f
'training_{backend}'
else
:
return
False
if
worker_key_local
in
worker_sockets
:
return
True
return
False
def
_process_queue
(
self
)
->
None
:
"""Background thread to process queued jobs."""
while
self
.
running
:
...
...
@@ -97,6 +132,7 @@ class QueueManager:
pending
=
get_pending_queue_items
()
if
pending
:
job
=
pending
[
0
]
# Get highest priority job
if
self
.
_can_start_job
(
job
):
self
.
_start_job
(
job
)
time
.
sleep
(
1
)
# Check every second
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment