From d4780487d4fb94a23a1892c8ee035eefc5a9542f Mon Sep 17 00:00:00 2001 From: johnd0e <1838643+johnd0e@users.noreply.github.com> Date: Mon, 8 Apr 2024 01:39:21 +0200 Subject: [PATCH] Enable the stream callback function to get complete chunk information And do not demand `object: "chat.completion.chunk"` within the chunk object to support not fully OpenAI-compliant services --- openai/init.moon | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/openai/init.moon b/openai/init.moon index 072242d..25402e3 100644 --- a/openai/init.moon +++ b/openai/init.moon @@ -83,7 +83,7 @@ parse_chat_response = types.partial { parse_completion_chunk = types.partial { - object: "chat.completion.chunk" + --object: "chat.completion.chunk" -- not sure of the whole range of chunks, so for now we strictly parse an append choices: types.shape { types.partial { @@ -190,7 +190,8 @@ class ChatSession parts = {} f = @client\create_stream_filter (c) -> - table.insert parts, c.content + if c = parse_completion_chunk c + table.insert parts, c.content f response message = { @@ -250,7 +251,7 @@ class OpenAI break accumulation_buffer = rest - if chunk = parse_completion_chunk cjson.decode json_blob + if chunk = cjson.decode json_blob chunk_callback chunk ...