| 
									
										
										
										
											2025-02-06 06:35:18 +08:00
										 |  |  | from open_webui.utils.task import prompt_template, prompt_variables_template | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  | from open_webui.utils.misc import ( | 
					
						
							| 
									
										
										
										
											2025-05-29 07:33:11 +08:00
										 |  |  |     deep_update, | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  |     add_or_update_system_message, | 
					
						
							|  |  |  | ) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | from typing import Callable, Optional | 
					
						
							| 
									
										
										
										
											2025-02-20 04:11:39 +08:00
										 |  |  | import json | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # inplace function: form_data is modified | 
					
						
							| 
									
										
										
										
											2025-08-21 07:38:26 +08:00
										 |  |  | def apply_system_prompt_to_body( | 
					
						
							| 
									
										
										
										
											2025-05-29 16:57:58 +08:00
										 |  |  |     system: Optional[str], form_data: dict, metadata: Optional[dict] = None, user=None | 
					
						
							| 
									
										
										
										
											2025-01-30 13:56:51 +08:00
										 |  |  | ) -> dict: | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  |     if not system: | 
					
						
							|  |  |  |         return form_data | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-02-11 03:25:38 +08:00
										 |  |  |     # Metadata (WebUI Usage) | 
					
						
							|  |  |  |     if metadata: | 
					
						
							|  |  |  |         variables = metadata.get("variables", {}) | 
					
						
							|  |  |  |         if variables: | 
					
						
							|  |  |  |             system = prompt_variables_template(system, variables) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-02-06 06:35:18 +08:00
										 |  |  |     # Legacy (API Usage) | 
					
						
							| 
									
										
										
										
											2025-08-21 07:38:26 +08:00
										 |  |  |     system = prompt_template(system, user) | 
					
						
							| 
									
										
										
										
											2025-02-06 06:35:18 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  |     form_data["messages"] = add_or_update_system_message( | 
					
						
							|  |  |  |         system, form_data.get("messages", []) | 
					
						
							|  |  |  |     ) | 
					
						
							|  |  |  |     return form_data | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | # inplace function: form_data is modified | 
					
						
							|  |  |  | def apply_model_params_to_body( | 
					
						
							|  |  |  |     params: dict, form_data: dict, mappings: dict[str, Callable] | 
					
						
							|  |  |  | ) -> dict: | 
					
						
							|  |  |  |     if not params: | 
					
						
							|  |  |  |         return form_data | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-05-29 06:56:37 +08:00
										 |  |  |     for key, value in params.items(): | 
					
						
							|  |  |  |         if value is not None: | 
					
						
							|  |  |  |             if key in mappings: | 
					
						
							|  |  |  |                 cast_func = mappings[key] | 
					
						
							|  |  |  |                 if isinstance(cast_func, Callable): | 
					
						
							|  |  |  |                     form_data[key] = cast_func(value) | 
					
						
							|  |  |  |             else: | 
					
						
							|  |  |  |                 form_data[key] = value | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |     return form_data | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-05-29 16:57:58 +08:00
										 |  |  | def remove_open_webui_params(params: dict) -> dict: | 
					
						
							|  |  |  |     """
 | 
					
						
							|  |  |  |     Removes OpenWebUI specific parameters from the provided dictionary. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     Args: | 
					
						
							|  |  |  |         params (dict): The dictionary containing parameters. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     Returns: | 
					
						
							|  |  |  |         dict: The modified dictionary with OpenWebUI parameters removed. | 
					
						
							|  |  |  |     """
 | 
					
						
							|  |  |  |     open_webui_params = { | 
					
						
							|  |  |  |         "stream_response": bool, | 
					
						
							| 
									
										
										
										
											2025-08-10 03:43:27 +08:00
										 |  |  |         "stream_delta_chunk_size": int, | 
					
						
							| 
									
										
										
										
											2025-05-29 16:57:58 +08:00
										 |  |  |         "function_calling": str, | 
					
						
							| 
									
										
										
										
											2025-08-27 21:24:16 +08:00
										 |  |  |         "reasoning_tags": list, | 
					
						
							| 
									
										
										
										
											2025-05-29 16:57:58 +08:00
										 |  |  |         "system": str, | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     for key in list(params.keys()): | 
					
						
							|  |  |  |         if key in open_webui_params: | 
					
						
							|  |  |  |             del params[key] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     return params | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  | # inplace function: form_data is modified | 
					
						
							|  |  |  | def apply_model_params_to_body_openai(params: dict, form_data: dict) -> dict: | 
					
						
							| 
									
										
										
										
											2025-05-29 16:57:58 +08:00
										 |  |  |     params = remove_open_webui_params(params) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-05-29 07:33:11 +08:00
										 |  |  |     custom_params = params.pop("custom_params", {}) | 
					
						
							|  |  |  |     if custom_params: | 
					
						
							| 
									
										
										
										
											2025-05-30 03:32:14 +08:00
										 |  |  |         # Attempt to parse custom_params if they are strings | 
					
						
							|  |  |  |         for key, value in custom_params.items(): | 
					
						
							|  |  |  |             if isinstance(value, str): | 
					
						
							|  |  |  |                 try: | 
					
						
							|  |  |  |                     # Attempt to parse the string as JSON | 
					
						
							|  |  |  |                     custom_params[key] = json.loads(value) | 
					
						
							|  |  |  |                 except json.JSONDecodeError: | 
					
						
							|  |  |  |                     # If it fails, keep the original string | 
					
						
							|  |  |  |                     pass | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-05-29 07:33:11 +08:00
										 |  |  |         # If there are custom parameters, we need to apply them first | 
					
						
							|  |  |  |         params = deep_update(params, custom_params) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  |     mappings = { | 
					
						
							|  |  |  |         "temperature": float, | 
					
						
							| 
									
										
										
										
											2024-09-08 23:52:58 +08:00
										 |  |  |         "top_p": float, | 
					
						
							| 
									
										
										
										
											2025-05-24 01:28:02 +08:00
										 |  |  |         "min_p": float, | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  |         "max_tokens": int, | 
					
						
							| 
									
										
										
										
											2024-09-08 23:52:58 +08:00
										 |  |  |         "frequency_penalty": float, | 
					
						
							| 
									
										
										
										
											2025-05-07 06:56:20 +08:00
										 |  |  |         "presence_penalty": float, | 
					
						
							| 
									
										
										
										
											2025-01-23 04:07:04 +08:00
										 |  |  |         "reasoning_effort": str, | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  |         "seed": lambda x: x, | 
					
						
							|  |  |  |         "stop": lambda x: [bytes(s, "utf-8").decode("unicode_escape") for s in x], | 
					
						
							| 
									
										
										
										
											2025-02-20 02:33:49 +08:00
										 |  |  |         "logit_bias": lambda x: x, | 
					
						
							| 
									
										
										
										
											2025-03-28 10:26:08 +08:00
										 |  |  |         "response_format": dict, | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  |     } | 
					
						
							|  |  |  |     return apply_model_params_to_body(params, form_data, mappings) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def apply_model_params_to_body_ollama(params: dict, form_data: dict) -> dict: | 
					
						
							| 
									
										
										
										
											2025-05-29 16:57:58 +08:00
										 |  |  |     params = remove_open_webui_params(params) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-05-29 07:33:11 +08:00
										 |  |  |     custom_params = params.pop("custom_params", {}) | 
					
						
							|  |  |  |     if custom_params: | 
					
						
							| 
									
										
										
										
											2025-05-30 03:32:14 +08:00
										 |  |  |         # Attempt to parse custom_params if they are strings | 
					
						
							|  |  |  |         for key, value in custom_params.items(): | 
					
						
							|  |  |  |             if isinstance(value, str): | 
					
						
							|  |  |  |                 try: | 
					
						
							|  |  |  |                     # Attempt to parse the string as JSON | 
					
						
							|  |  |  |                     custom_params[key] = json.loads(value) | 
					
						
							|  |  |  |                 except json.JSONDecodeError: | 
					
						
							|  |  |  |                     # If it fails, keep the original string | 
					
						
							|  |  |  |                     pass | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-05-29 07:33:11 +08:00
										 |  |  |         # If there are custom parameters, we need to apply them first | 
					
						
							|  |  |  |         params = deep_update(params, custom_params) | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-02-19 22:25:47 +08:00
										 |  |  |     # Convert OpenAI parameter names to Ollama parameter names if needed. | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  |     name_differences = { | 
					
						
							|  |  |  |         "max_tokens": "num_predict", | 
					
						
							|  |  |  |     } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     for key, value in name_differences.items(): | 
					
						
							|  |  |  |         if (param := params.get(key, None)) is not None: | 
					
						
							| 
									
										
										
										
											2025-02-19 22:25:47 +08:00
										 |  |  |             # Copy the parameter to new name then delete it, to prevent Ollama warning of invalid option provided | 
					
						
							|  |  |  |             params[value] = params[key] | 
					
						
							|  |  |  |             del params[key] | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-02-19 22:30:16 +08:00
										 |  |  |     # See https://github.com/ollama/ollama/blob/main/docs/api.md#request-8 | 
					
						
							|  |  |  |     mappings = { | 
					
						
							|  |  |  |         "temperature": float, | 
					
						
							|  |  |  |         "top_p": float, | 
					
						
							|  |  |  |         "seed": lambda x: x, | 
					
						
							|  |  |  |         "mirostat": int, | 
					
						
							|  |  |  |         "mirostat_eta": float, | 
					
						
							|  |  |  |         "mirostat_tau": float, | 
					
						
							|  |  |  |         "num_ctx": int, | 
					
						
							|  |  |  |         "num_batch": int, | 
					
						
							|  |  |  |         "num_keep": int, | 
					
						
							|  |  |  |         "num_predict": int, | 
					
						
							|  |  |  |         "repeat_last_n": int, | 
					
						
							|  |  |  |         "top_k": int, | 
					
						
							|  |  |  |         "min_p": float, | 
					
						
							|  |  |  |         "repeat_penalty": float, | 
					
						
							|  |  |  |         "presence_penalty": float, | 
					
						
							|  |  |  |         "frequency_penalty": float, | 
					
						
							|  |  |  |         "stop": lambda x: [bytes(s, "utf-8").decode("unicode_escape") for s in x], | 
					
						
							|  |  |  |         "num_gpu": int, | 
					
						
							|  |  |  |         "use_mmap": bool, | 
					
						
							|  |  |  |         "use_mlock": bool, | 
					
						
							|  |  |  |         "num_thread": int, | 
					
						
							|  |  |  |     } | 
					
						
							| 
									
										
										
										
											2024-09-07 10:09:57 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-06-10 16:48:34 +08:00
										 |  |  |     def parse_json(value: str) -> dict: | 
					
						
							|  |  |  |         """
 | 
					
						
							|  |  |  |         Parses a JSON string into a dictionary, handling potential JSONDecodeError. | 
					
						
							|  |  |  |         """
 | 
					
						
							|  |  |  |         try: | 
					
						
							|  |  |  |             return json.loads(value) | 
					
						
							|  |  |  |         except Exception as e: | 
					
						
							|  |  |  |             return value | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     ollama_root_params = { | 
					
						
							|  |  |  |         "format": lambda x: parse_json(x), | 
					
						
							|  |  |  |         "keep_alive": lambda x: parse_json(x), | 
					
						
							|  |  |  |         "think": bool, | 
					
						
							|  |  |  |     } | 
					
						
							| 
									
										
										
										
											2025-03-15 00:08:05 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-06-10 16:48:34 +08:00
										 |  |  |     for key, value in ollama_root_params.items(): | 
					
						
							|  |  |  |         if (param := params.get(key, None)) is not None: | 
					
						
							|  |  |  |             # Copy the parameter to new name then delete it, to prevent Ollama warning of invalid option provided | 
					
						
							|  |  |  |             form_data[key] = value(param) | 
					
						
							|  |  |  |             del params[key] | 
					
						
							| 
									
										
										
										
											2025-03-28 11:27:16 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-06-10 20:52:37 +08:00
										 |  |  |     # Unlike OpenAI, Ollama does not support params directly in the body | 
					
						
							|  |  |  |     form_data["options"] = apply_model_params_to_body( | 
					
						
							|  |  |  |         params, (form_data.get("options", {}) or {}), mappings | 
					
						
							|  |  |  |     ) | 
					
						
							|  |  |  |     return form_data | 
					
						
							| 
									
										
										
										
											2024-09-21 06:30:13 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-10-21 19:10:28 +08:00
										 |  |  | def convert_messages_openai_to_ollama(messages: list[dict]) -> list[dict]: | 
					
						
							|  |  |  |     ollama_messages = [] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     for message in messages: | 
					
						
							|  |  |  |         # Initialize the new message structure with the role | 
					
						
							|  |  |  |         new_message = {"role": message["role"]} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         content = message.get("content", []) | 
					
						
							| 
									
										
										
										
											2025-02-20 04:11:39 +08:00
										 |  |  |         tool_calls = message.get("tool_calls", None) | 
					
						
							|  |  |  |         tool_call_id = message.get("tool_call_id", None) | 
					
						
							| 
									
										
										
										
											2024-10-21 19:10:28 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |         # Check if the content is a string (just a simple message) | 
					
						
							| 
									
										
										
										
											2025-02-22 12:13:14 +08:00
										 |  |  |         if isinstance(content, str) and not tool_calls: | 
					
						
							| 
									
										
										
										
											2024-10-21 19:10:28 +08:00
										 |  |  |             # If the content is a string, it's pure text | 
					
						
							|  |  |  |             new_message["content"] = content | 
					
						
							| 
									
										
										
										
											2025-02-20 04:11:39 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |             # If message is a tool call, add the tool call id to the message | 
					
						
							|  |  |  |             if tool_call_id: | 
					
						
							|  |  |  |                 new_message["tool_call_id"] = tool_call_id | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         elif tool_calls: | 
					
						
							|  |  |  |             # If tool calls are present, add them to the message | 
					
						
							|  |  |  |             ollama_tool_calls = [] | 
					
						
							|  |  |  |             for tool_call in tool_calls: | 
					
						
							|  |  |  |                 ollama_tool_call = { | 
					
						
							|  |  |  |                     "index": tool_call.get("index", 0), | 
					
						
							|  |  |  |                     "id": tool_call.get("id", None), | 
					
						
							|  |  |  |                     "function": { | 
					
						
							|  |  |  |                         "name": tool_call.get("function", {}).get("name", ""), | 
					
						
							|  |  |  |                         "arguments": json.loads( | 
					
						
							|  |  |  |                             tool_call.get("function", {}).get("arguments", {}) | 
					
						
							|  |  |  |                         ), | 
					
						
							|  |  |  |                     }, | 
					
						
							|  |  |  |                 } | 
					
						
							|  |  |  |                 ollama_tool_calls.append(ollama_tool_call) | 
					
						
							|  |  |  |             new_message["tool_calls"] = ollama_tool_calls | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             # Put the content to empty string (Ollama requires an empty string for tool calls) | 
					
						
							|  |  |  |             new_message["content"] = "" | 
					
						
							| 
									
										
										
										
											2025-02-20 17:01:29 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-10-21 19:10:28 +08:00
										 |  |  |         else: | 
					
						
							|  |  |  |             # Otherwise, assume the content is a list of dicts, e.g., text followed by an image URL | 
					
						
							|  |  |  |             content_text = "" | 
					
						
							|  |  |  |             images = [] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             # Iterate through the list of content items | 
					
						
							|  |  |  |             for item in content: | 
					
						
							|  |  |  |                 # Check if it's a text type | 
					
						
							|  |  |  |                 if item.get("type") == "text": | 
					
						
							|  |  |  |                     content_text += item.get("text", "") | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |                 # Check if it's an image URL type | 
					
						
							|  |  |  |                 elif item.get("type") == "image_url": | 
					
						
							|  |  |  |                     img_url = item.get("image_url", {}).get("url", "") | 
					
						
							|  |  |  |                     if img_url: | 
					
						
							| 
									
										
										
										
											2024-10-22 18:16:48 +08:00
										 |  |  |                         # If the image url starts with data:, it's a base64 image and should be trimmed | 
					
						
							|  |  |  |                         if img_url.startswith("data:"): | 
					
						
							|  |  |  |                             img_url = img_url.split(",")[-1] | 
					
						
							| 
									
										
										
										
											2024-10-21 19:10:28 +08:00
										 |  |  |                         images.append(img_url) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             # Add content text (if any) | 
					
						
							|  |  |  |             if content_text: | 
					
						
							|  |  |  |                 new_message["content"] = content_text.strip() | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |             # Add images (if any) | 
					
						
							|  |  |  |             if images: | 
					
						
							|  |  |  |                 new_message["images"] = images | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # Append the new formatted message to the result | 
					
						
							|  |  |  |         ollama_messages.append(new_message) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     return ollama_messages | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-09-21 06:30:13 +08:00
										 |  |  | def convert_payload_openai_to_ollama(openai_payload: dict) -> dict: | 
					
						
							|  |  |  |     """
 | 
					
						
							|  |  |  |     Converts a payload formatted for OpenAI's API to be compatible with Ollama's API endpoint for chat completions. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     Args: | 
					
						
							|  |  |  |         openai_payload (dict): The payload originally designed for OpenAI API usage. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     Returns: | 
					
						
							|  |  |  |         dict: A modified payload compatible with the Ollama API. | 
					
						
							|  |  |  |     """
 | 
					
						
							|  |  |  |     ollama_payload = {} | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     # Mapping basic model and message details | 
					
						
							|  |  |  |     ollama_payload["model"] = openai_payload.get("model") | 
					
						
							| 
									
										
										
										
											2024-10-21 19:10:28 +08:00
										 |  |  |     ollama_payload["messages"] = convert_messages_openai_to_ollama( | 
					
						
							|  |  |  |         openai_payload.get("messages") | 
					
						
							|  |  |  |     ) | 
					
						
							| 
									
										
										
										
											2024-09-21 06:30:13 +08:00
										 |  |  |     ollama_payload["stream"] = openai_payload.get("stream", False) | 
					
						
							| 
									
										
										
										
											2025-02-05 13:08:26 +08:00
										 |  |  |     if "tools" in openai_payload: | 
					
						
							|  |  |  |         ollama_payload["tools"] = openai_payload["tools"] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-09-21 06:30:13 +08:00
										 |  |  |     # If there are advanced parameters in the payload, format them in Ollama's options field | 
					
						
							| 
									
										
										
										
											2024-12-30 08:30:36 +08:00
										 |  |  |     if openai_payload.get("options"): | 
					
						
							|  |  |  |         ollama_payload["options"] = openai_payload["options"] | 
					
						
							|  |  |  |         ollama_options = openai_payload["options"] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-06-10 16:48:34 +08:00
										 |  |  |         def parse_json(value: str) -> dict: | 
					
						
							|  |  |  |             """
 | 
					
						
							|  |  |  |             Parses a JSON string into a dictionary, handling potential JSONDecodeError. | 
					
						
							|  |  |  |             """
 | 
					
						
							|  |  |  |             try: | 
					
						
							|  |  |  |                 return json.loads(value) | 
					
						
							|  |  |  |             except Exception as e: | 
					
						
							|  |  |  |                 return value | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         ollama_root_params = { | 
					
						
							|  |  |  |             "format": lambda x: parse_json(x), | 
					
						
							|  |  |  |             "keep_alive": lambda x: parse_json(x), | 
					
						
							|  |  |  |             "think": bool, | 
					
						
							|  |  |  |         } | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         # Ollama's options field can contain parameters that should be at the root level. | 
					
						
							|  |  |  |         for key, value in ollama_root_params.items(): | 
					
						
							|  |  |  |             if (param := ollama_options.get(key, None)) is not None: | 
					
						
							|  |  |  |                 # Copy the parameter to new name then delete it, to prevent Ollama warning of invalid option provided | 
					
						
							|  |  |  |                 ollama_payload[key] = value(param) | 
					
						
							|  |  |  |                 del ollama_options[key] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-02-19 21:52:34 +08:00
										 |  |  |         # Re-Mapping OpenAI's `max_tokens` -> Ollama's `num_predict` | 
					
						
							|  |  |  |         if "max_tokens" in ollama_options: | 
					
						
							| 
									
										
										
										
											2025-02-20 17:01:29 +08:00
										 |  |  |             ollama_options["num_predict"] = ollama_options["max_tokens"] | 
					
						
							| 
									
										
										
										
											2025-06-10 16:48:34 +08:00
										 |  |  |             del ollama_options["max_tokens"] | 
					
						
							| 
									
										
										
										
											2025-02-19 21:55:11 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |         # Ollama lacks a "system" prompt option. It has to be provided as a direct parameter, so we copy it down. | 
					
						
							| 
									
										
										
										
											2025-06-10 16:48:34 +08:00
										 |  |  |         # Comment: Not sure why this is needed, but we'll keep it for compatibility. | 
					
						
							| 
									
										
										
										
											2025-02-19 21:55:11 +08:00
										 |  |  |         if "system" in ollama_options: | 
					
						
							| 
									
										
										
										
											2025-02-20 17:01:29 +08:00
										 |  |  |             ollama_payload["system"] = ollama_options["system"] | 
					
						
							| 
									
										
										
										
											2025-06-10 16:48:34 +08:00
										 |  |  |             del ollama_options["system"] | 
					
						
							| 
									
										
										
										
											2025-03-15 00:08:05 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-06-10 16:53:18 +08:00
										 |  |  |         ollama_payload["options"] = ollama_options | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-02-22 01:46:10 +08:00
										 |  |  |     # If there is the "stop" parameter in the openai_payload, remap it to the ollama_payload.options | 
					
						
							|  |  |  |     if "stop" in openai_payload: | 
					
						
							|  |  |  |         ollama_options = ollama_payload.get("options", {}) | 
					
						
							|  |  |  |         ollama_options["stop"] = openai_payload.get("stop") | 
					
						
							| 
									
										
										
										
											2024-09-21 06:30:13 +08:00
										 |  |  |         ollama_payload["options"] = ollama_options | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-01-30 13:56:51 +08:00
										 |  |  |     if "metadata" in openai_payload: | 
					
						
							|  |  |  |         ollama_payload["metadata"] = openai_payload["metadata"] | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-03-28 10:26:08 +08:00
										 |  |  |     if "response_format" in openai_payload: | 
					
						
							|  |  |  |         response_format = openai_payload["response_format"] | 
					
						
							|  |  |  |         format_type = response_format.get("type", None) | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |         schema = response_format.get(format_type, None) | 
					
						
							|  |  |  |         if schema: | 
					
						
							|  |  |  |             format = schema.get("schema", None) | 
					
						
							|  |  |  |             ollama_payload["format"] = format | 
					
						
							|  |  |  | 
 | 
					
						
							| 
									
										
										
										
											2024-09-21 06:30:13 +08:00
										 |  |  |     return ollama_payload | 
					
						
							| 
									
										
										
										
											2025-06-04 22:11:40 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  | def convert_embedding_payload_openai_to_ollama(openai_payload: dict) -> dict: | 
					
						
							|  |  |  |     """
 | 
					
						
							|  |  |  |     Convert an embeddings request payload from OpenAI format to Ollama format. | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     Args: | 
					
						
							| 
									
										
										
										
											2025-06-04 23:06:38 +08:00
										 |  |  |         openai_payload (dict): The original payload designed for OpenAI API usage. | 
					
						
							| 
									
										
										
										
											2025-06-04 22:11:40 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |     Returns: | 
					
						
							|  |  |  |         dict: A payload compatible with the Ollama API embeddings endpoint. | 
					
						
							|  |  |  |     """
 | 
					
						
							| 
									
										
										
										
											2025-06-05 05:12:28 +08:00
										 |  |  |     ollama_payload = {"model": openai_payload.get("model")} | 
					
						
							| 
									
										
										
										
											2025-06-04 22:11:40 +08:00
										 |  |  |     input_value = openai_payload.get("input") | 
					
						
							| 
									
										
										
										
											2025-06-04 23:06:38 +08:00
										 |  |  | 
 | 
					
						
							|  |  |  |     # Ollama expects 'input' as a list, and 'prompt' as a single string. | 
					
						
							| 
									
										
										
										
											2025-06-04 22:11:40 +08:00
										 |  |  |     if isinstance(input_value, list): | 
					
						
							|  |  |  |         ollama_payload["input"] = input_value | 
					
						
							| 
									
										
										
										
											2025-06-04 23:06:38 +08:00
										 |  |  |         ollama_payload["prompt"] = "\n".join(str(x) for x in input_value) | 
					
						
							| 
									
										
										
										
											2025-06-04 22:11:40 +08:00
										 |  |  |     else: | 
					
						
							|  |  |  |         ollama_payload["input"] = [input_value] | 
					
						
							| 
									
										
										
										
											2025-06-04 23:06:38 +08:00
										 |  |  |         ollama_payload["prompt"] = str(input_value) | 
					
						
							| 
									
										
										
										
											2025-06-04 22:11:40 +08:00
										 |  |  | 
 | 
					
						
							| 
									
										
										
										
											2025-06-04 23:06:38 +08:00
										 |  |  |     # Optionally forward other fields if present | 
					
						
							| 
									
										
										
										
											2025-06-04 22:11:40 +08:00
										 |  |  |     for optional_key in ("options", "truncate", "keep_alive"): | 
					
						
							|  |  |  |         if optional_key in openai_payload: | 
					
						
							|  |  |  |             ollama_payload[optional_key] = openai_payload[optional_key] | 
					
						
							|  |  |  | 
 | 
					
						
							|  |  |  |     return ollama_payload |