mirror of
				https://github.com/imartinez/privateGPT.git
				synced 2025-10-31 05:08:45 +00:00 
			
		
		
		
	* feat: add retry connection to ollama When Ollama is running in the docker-compose, traefik is not ready sometimes to route the request, and it fails * fix: mypy
		
			
				
	
	
		
			32 lines
		
	
	
		
			750 B
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			32 lines
		
	
	
		
			750 B
		
	
	
	
		
			Python
		
	
	
	
	
	
| import logging
 | |
| from collections.abc import Callable
 | |
| from typing import Any
 | |
| 
 | |
| from retry_async import retry as retry_untyped  # type: ignore
 | |
| 
 | |
| retry_logger = logging.getLogger(__name__)
 | |
| 
 | |
| 
 | |
| def retry(
 | |
|     exceptions: Any = Exception,
 | |
|     *,
 | |
|     is_async: bool = False,
 | |
|     tries: int = -1,
 | |
|     delay: float = 0,
 | |
|     max_delay: float | None = None,
 | |
|     backoff: float = 1,
 | |
|     jitter: float | tuple[float, float] = 0,
 | |
|     logger: logging.Logger = retry_logger,
 | |
| ) -> Callable[..., Any]:
 | |
|     wrapped = retry_untyped(
 | |
|         exceptions=exceptions,
 | |
|         is_async=is_async,
 | |
|         tries=tries,
 | |
|         delay=delay,
 | |
|         max_delay=max_delay,
 | |
|         backoff=backoff,
 | |
|         jitter=jitter,
 | |
|         logger=logger,
 | |
|     )
 | |
|     return wrapped  # type: ignore
 |