mirror of
https://github.com/infiniflow/ragflow.git
synced 2025-11-16 18:14:08 +00:00
Refa: revert to original task message collection logic (#8251)
### What problem does this PR solve? Get rid of 'RedisDB.get_unacked_iterator queue rag_flow_svr_queue_1 doesn't exist' ---- Edit: revert to original message collection logic. ### Type of change - [x] Refactoring --------- Co-authored-by: Zhichang Yu <yuzhichang@gmail.com> Co-authored-by: Kevin Hu <kevinhu.sh@gmail.com>
This commit is contained in:
parent
65d5268439
commit
8f9e7a6f6f
@ -21,8 +21,6 @@ import sys
|
|||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from valkey import RedisError
|
|
||||||
|
|
||||||
from api.utils.log_utils import initRootLogger, get_project_base_directory
|
from api.utils.log_utils import initRootLogger, get_project_base_directory
|
||||||
from graphrag.general.index import run_graphrag
|
from graphrag.general.index import run_graphrag
|
||||||
from graphrag.utils import get_llm_cache, set_llm_cache, get_tags_from_cache, set_tags_to_cache
|
from graphrag.utils import get_llm_cache, set_llm_cache, get_tags_from_cache, set_tags_to_cache
|
||||||
@ -188,45 +186,20 @@ def set_progress(task_id, from_page=0, to_page=-1, prog=None, msg="Processing...
|
|||||||
async def collect():
|
async def collect():
|
||||||
global CONSUMER_NAME, DONE_TASKS, FAILED_TASKS
|
global CONSUMER_NAME, DONE_TASKS, FAILED_TASKS
|
||||||
global UNACKED_ITERATOR
|
global UNACKED_ITERATOR
|
||||||
svr_queue_names = get_svr_queue_names()
|
|
||||||
redis_msg = None
|
|
||||||
|
|
||||||
|
svr_queue_names = get_svr_queue_names()
|
||||||
try:
|
try:
|
||||||
if not UNACKED_ITERATOR:
|
if not UNACKED_ITERATOR:
|
||||||
UNACKED_ITERATOR = None
|
|
||||||
logging.debug("Rebuilding UNACKED_ITERATOR due to it is None")
|
|
||||||
try:
|
|
||||||
UNACKED_ITERATOR = REDIS_CONN.get_unacked_iterator(svr_queue_names, SVR_CONSUMER_GROUP_NAME, CONSUMER_NAME)
|
UNACKED_ITERATOR = REDIS_CONN.get_unacked_iterator(svr_queue_names, SVR_CONSUMER_GROUP_NAME, CONSUMER_NAME)
|
||||||
logging.debug("UNACKED_ITERATOR rebuilt successfully")
|
|
||||||
except RedisError as e:
|
|
||||||
UNACKED_ITERATOR = None
|
|
||||||
logging.warning(f"Failed to rebuild UNACKED_ITERATOR: {e}")
|
|
||||||
|
|
||||||
if UNACKED_ITERATOR:
|
|
||||||
try:
|
try:
|
||||||
redis_msg = next(UNACKED_ITERATOR)
|
redis_msg = next(UNACKED_ITERATOR)
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
UNACKED_ITERATOR = None
|
|
||||||
logging.debug("UNACKED_ITERATOR exhausted, clearing")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
UNACKED_ITERATOR = None
|
|
||||||
logging.warning(f"UNACKED_ITERATOR raised exception: {e}")
|
|
||||||
|
|
||||||
if not redis_msg:
|
|
||||||
for svr_queue_name in svr_queue_names:
|
for svr_queue_name in svr_queue_names:
|
||||||
try:
|
|
||||||
redis_msg = REDIS_CONN.queue_consumer(svr_queue_name, SVR_CONSUMER_GROUP_NAME, CONSUMER_NAME)
|
redis_msg = REDIS_CONN.queue_consumer(svr_queue_name, SVR_CONSUMER_GROUP_NAME, CONSUMER_NAME)
|
||||||
if redis_msg:
|
if redis_msg:
|
||||||
break
|
break
|
||||||
except RedisError as e:
|
except Exception:
|
||||||
logging.warning(f"queue_consumer failed for {svr_queue_name}: {e}")
|
logging.exception("collect got exception")
|
||||||
continue
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logging.exception(f"collect task encountered unexpected exception: {e}")
|
|
||||||
UNACKED_ITERATOR = None
|
|
||||||
await trio.sleep(1)
|
|
||||||
return None, None
|
return None, None
|
||||||
|
|
||||||
if not redis_msg:
|
if not redis_msg:
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user