Fix for rocm vllm

This commit is contained in:
Jake Poznanski 2025-06-27 16:23:31 +00:00
parent 633b03d1da
commit 14b9b2dc8f

View File

@ -623,7 +623,7 @@ async def vllm_server_task(model_name_or_path, args, semaphore):
if match: if match:
last_running_req = int(match.group(1)) last_running_req = int(match.group(1))
match = re.search(r"Waiting: (\d+)", line) match = re.search(r'(?:Waiting|Pending):\s*(\d+)', line)
if match: if match:
last_queue_req = int(match.group(1)) last_queue_req = int(match.group(1))
logger.info(f"vllm running req: {last_running_req} queue req: {last_queue_req}") logger.info(f"vllm running req: {last_running_req} queue req: {last_queue_req}")