Browse Source

Ignore SSE comments to support openrouter streaming (#2432)

tags/0.5.6
johnpccd 1 year ago
parent
commit
b2ee738bb1
No account linked to committer's email address

+ 4
- 1
api/core/model_runtime/model_providers/openai_api_compatible/llm/llm.py View File



for chunk in response.iter_lines(decode_unicode=True, delimiter=delimiter): for chunk in response.iter_lines(decode_unicode=True, delimiter=delimiter):
if chunk: if chunk:
#ignore sse comments
if chunk.startswith(':'):
continue
decoded_chunk = chunk.strip().lstrip('data: ').lstrip() decoded_chunk = chunk.strip().lstrip('data: ').lstrip()
chunk_json = None chunk_json = None
try: try:
chunk_json = json.loads(decoded_chunk) chunk_json = json.loads(decoded_chunk)
# stream ended # stream ended
except json.JSONDecodeError as e: except json.JSONDecodeError as e:
logger.error(f"decoded_chunk error,delimiter={delimiter},decoded_chunk={decoded_chunk}")
logger.error(f"decoded_chunk error: {e}, delimiter={delimiter}, decoded_chunk={decoded_chunk}")
yield create_final_llm_result_chunk( yield create_final_llm_result_chunk(
index=chunk_index + 1, index=chunk_index + 1,
message=AssistantPromptMessage(content=""), message=AssistantPromptMessage(content=""),

Loading…
Cancel
Save