chore: format
This commit is contained in:
		
							parent
							
								
									482a41e49b
								
							
						
					
					
						commit
						e8fc522eba
					
				|  | @ -831,7 +831,9 @@ def search_web(engine: str, query: str) -> list[SearchResult]: | ||||||
| @app.post("/web/search") | @app.post("/web/search") | ||||||
| def store_web_search(form_data: SearchForm, user=Depends(get_current_user)): | def store_web_search(form_data: SearchForm, user=Depends(get_current_user)): | ||||||
|     try: |     try: | ||||||
|         logging.info(f"trying to web search with {app.state.config.RAG_WEB_SEARCH_ENGINE, form_data.query}") |         logging.info( | ||||||
|  |             f"trying to web search with {app.state.config.RAG_WEB_SEARCH_ENGINE, form_data.query}" | ||||||
|  |         ) | ||||||
|         web_results = search_web( |         web_results = search_web( | ||||||
|             app.state.config.RAG_WEB_SEARCH_ENGINE, form_data.query |             app.state.config.RAG_WEB_SEARCH_ENGINE, form_data.query | ||||||
|         ) |         ) | ||||||
|  | @ -1241,8 +1243,10 @@ def reset(user=Depends(get_admin_user)) -> bool: | ||||||
| 
 | 
 | ||||||
|     return True |     return True | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
| class SafeWebBaseLoader(WebBaseLoader): | class SafeWebBaseLoader(WebBaseLoader): | ||||||
|     """WebBaseLoader with enhanced error handling for URLs.""" |     """WebBaseLoader with enhanced error handling for URLs.""" | ||||||
|  | 
 | ||||||
|     def lazy_load(self) -> Iterator[Document]: |     def lazy_load(self) -> Iterator[Document]: | ||||||
|         """Lazy load text from the url(s) in web_path with error handling.""" |         """Lazy load text from the url(s) in web_path with error handling.""" | ||||||
|         for path in self.web_paths: |         for path in self.web_paths: | ||||||
|  | @ -1255,7 +1259,9 @@ class SafeWebBaseLoader(WebBaseLoader): | ||||||
|                 if title := soup.find("title"): |                 if title := soup.find("title"): | ||||||
|                     metadata["title"] = title.get_text() |                     metadata["title"] = title.get_text() | ||||||
|                 if description := soup.find("meta", attrs={"name": "description"}): |                 if description := soup.find("meta", attrs={"name": "description"}): | ||||||
|                     metadata["description"] = description.get("content", "No description found.") |                     metadata["description"] = description.get( | ||||||
|  |                         "content", "No description found." | ||||||
|  |                     ) | ||||||
|                 if html := soup.find("html"): |                 if html := soup.find("html"): | ||||||
|                     metadata["language"] = html.get("lang", "No language found.") |                     metadata["language"] = html.get("lang", "No language found.") | ||||||
| 
 | 
 | ||||||
|  | @ -1264,6 +1270,7 @@ class SafeWebBaseLoader(WebBaseLoader): | ||||||
|                 # Log the error and continue with the next URL |                 # Log the error and continue with the next URL | ||||||
|                 log.error(f"Error loading {path}: {e}") |                 log.error(f"Error loading {path}: {e}") | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
| if ENV == "dev": | if ENV == "dev": | ||||||
| 
 | 
 | ||||||
|     @app.get("/ef") |     @app.get("/ef") | ||||||
|  |  | ||||||
|  | @ -18,7 +18,7 @@ def search_serply( | ||||||
|     hl: str = "us", |     hl: str = "us", | ||||||
|     limit: int = 10, |     limit: int = 10, | ||||||
|     device_type: str = "desktop", |     device_type: str = "desktop", | ||||||
|         proxy_location: str = "US" |     proxy_location: str = "US", | ||||||
| ) -> list[SearchResult]: | ) -> list[SearchResult]: | ||||||
|     """Search using serper.dev's API and return the results as a list of SearchResult objects. |     """Search using serper.dev's API and return the results as a list of SearchResult objects. | ||||||
| 
 | 
 | ||||||
|  | @ -37,7 +37,7 @@ def search_serply( | ||||||
|         "language": "en", |         "language": "en", | ||||||
|         "num": limit, |         "num": limit, | ||||||
|         "gl": proxy_location.upper(), |         "gl": proxy_location.upper(), | ||||||
|         "hl": hl.lower() |         "hl": hl.lower(), | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     url = f"{url}{urlencode(query_payload)}" |     url = f"{url}{urlencode(query_payload)}" | ||||||
|  | @ -45,7 +45,7 @@ def search_serply( | ||||||
|         "X-API-KEY": api_key, |         "X-API-KEY": api_key, | ||||||
|         "X-User-Agent": device_type, |         "X-User-Agent": device_type, | ||||||
|         "User-Agent": "open-webui", |         "User-Agent": "open-webui", | ||||||
|         "X-Proxy-Location": proxy_location |         "X-Proxy-Location": proxy_location, | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
|     response = requests.request("GET", url, headers=headers) |     response = requests.request("GET", url, headers=headers) | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue