Skip to content

Commit 38f4c6e

Browse files
committed
fix(fncall): fix regression introduced in #1963
Otherwise newlines presents in the LLM response in JSON responses might break JSON unmarshalling.
1 parent 33c78d2 commit 38f4c6e

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

core/services/openai.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -778,13 +778,13 @@ func parseFunctionCall(llmresult string, multipleResults bool) []funcCallResults
778778
// As we have to change the result before processing, we can't stream the answer token-by-token (yet?)
779779
ss := map[string]interface{}{}
780780
// This prevent newlines to break JSON parsing for clients
781-
// s := utils.EscapeNewLines(llmresult)
782-
json.Unmarshal([]byte(llmresult), &ss)
781+
s := utils.EscapeNewLines(llmresult)
782+
json.Unmarshal([]byte(s), &ss)
783783

784784
// The grammar defines the function name as "function", while OpenAI returns "name"
785785
func_name, ok := ss["function"]
786786
if !ok {
787-
log.Debug().Msg("ss[function] is not OK!")
787+
log.Debug().Msg("ss[function] is not OK!, llm result: " + llmresult)
788788
return results
789789
}
790790
// Similarly, while here arguments is a map[string]interface{}, OpenAI actually want a stringified object

0 commit comments

Comments
 (0)