diff --git a/core/http/endpoints/openai/chat.go b/core/http/endpoints/openai/chat.go index c7afb7bf95e..86b75601bc4 100644 --- a/core/http/endpoints/openai/chat.go +++ b/core/http/endpoints/openai/chat.go @@ -226,12 +226,12 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup // Update input grammar jsStruct := funcs.ToJSONStructure(config.FunctionsConfig.FunctionNameKey, config.FunctionsConfig.FunctionNameKey) - g, err := jsStruct.Grammar(config.FunctionsConfig.GrammarConfig.Options()...) + g, err := jsStruct.Grammar(config.FunctionsConfig.GrammarOptions()...) if err == nil { config.Grammar = g } case input.JSONFunctionGrammarObject != nil: - g, err := input.JSONFunctionGrammarObject.Grammar(config.FunctionsConfig.GrammarConfig.Options()...) + g, err := input.JSONFunctionGrammarObject.Grammar(config.FunctionsConfig.GrammarOptions()...) if err == nil { config.Grammar = g } diff --git a/pkg/functions/parse.go b/pkg/functions/parse.go index 1c95f2f5300..f5593690a9b 100644 --- a/pkg/functions/parse.go +++ b/pkg/functions/parse.go @@ -23,7 +23,9 @@ type GrammarConfig struct { MixedMode bool `yaml:"mixed_mode"` // NoMixedFreeString disables the mixed mode for free strings - // In this way if the LLM selects a free string, it won't be mixed necessarly with JSON objects + // In this way if the LLM selects a free string, it won't be mixed necessarly with JSON objects. + // For example, if enabled the LLM or returns a JSON object or a free string, but not a mix of both + // If disabled(default): the LLM can return a JSON object surrounded by free strings (e.g. `this is the JSON result: { "bar": "baz" } for your question`). This forces the LLM to return at least a JSON object, but its not going to be strict NoMixedFreeString bool `yaml:"no_mixed_free_string"` // NoGrammar disables the grammar parsing and parses the responses directly from the LLM