Fix import and parameter issues in job scheduling

- Made transformers import conditional in models.py to avoid import errors when not installed
- Fixed update_queue_status call to use 'error' parameter instead of 'error_message'
- Added checks for transformers availability before using it in model loading
- This resolves the ModuleNotFoundError and TypeError when running jobs

The system can now handle job scheduling even when transformers library is not available, and properly reports errors when job execution fails.
parent 2dc5eebf
......@@ -23,7 +23,14 @@ import os
import torch
from abc import ABC, abstractmethod
from typing import Dict, Any, Optional, List
from transformers import AutoProcessor, AutoModelForCausalLM, AutoTokenizer
# Lazy import of transformers to avoid import errors when not installed
_transformers_available = False
try:
from transformers import AutoProcessor, AutoModelForCausalLM, AutoTokenizer
_transformers_available = True
except ImportError:
pass
class BaseModel(ABC):
......@@ -74,6 +81,8 @@ class VisionLanguageModel(BaseModel):
def load_model(self) -> None:
"""Load Qwen2.5-VL model."""
if not _transformers_available:
raise ImportError("transformers library is not installed")
from transformers import Qwen2_5_VLForConditionalGeneration
kwargs = {"device_map": "auto", "low_cpu_mem_usage": True, **self.kwargs}
......@@ -149,6 +158,8 @@ class TextOnlyModel(BaseModel):
def load_model(self) -> None:
"""Load text-only model."""
if not _transformers_available:
raise ImportError("transformers library is not installed")
kwargs = {"device_map": "auto", "low_cpu_mem_usage": True, **self.kwargs}
if os.environ.get('VIDAI_FLASH', '').lower() == 'true':
......
......@@ -102,7 +102,7 @@ class QueueManager:
self._execute_local_or_distributed_job(job)
except Exception as e:
update_queue_status(job['id'], 'failed', error_message=str(e))
update_queue_status(job['id'], 'failed', error=str(e))
finally:
with self.lock:
self.active_jobs -= 1
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment