diff --git a/src/vanna/ollama/ollama.py b/src/vanna/ollama/ollama.py index 9a7f9b33..bcd6a2e8 100644 --- a/src/vanna/ollama/ollama.py +++ b/src/vanna/ollama/ollama.py @@ -91,7 +91,7 @@ def submit_prompt(self, prompt, **kwargs) -> str: f"model={self.model},\n" f"options={self.ollama_options},\n" f"keep_alive={self.keep_alive}") - self.log(f"Prompt Content:\n{json.dumps(prompt)}") + self.log(f"Prompt Content:\n{json.dumps(prompt, ensure_ascii=False)}") response_dict = self.ollama_client.chat(model=self.model, messages=prompt, stream=False, diff --git a/src/vanna/remote.py b/src/vanna/remote.py index 6cfb4c01..c3b0220d 100644 --- a/src/vanna/remote.py +++ b/src/vanna/remote.py @@ -62,7 +62,7 @@ def assistant_message(self, message: str) -> any: def submit_prompt(self, prompt, **kwargs) -> str: # JSON-ify the prompt - json_prompt = json.dumps(prompt) + json_prompt = json.dumps(prompt, ensure_ascii=False) params = [StringData(data=json_prompt)]