From 2dd0626a3ad76076a71b42943ec33c4512e9e1a5 Mon Sep 17 00:00:00 2001 From: Luiz Costa Date: Tue, 23 Sep 2025 22:39:45 -0300 Subject: [PATCH] make the retry logic faster --- src/ghost_mcp/utils/retry.py | 51 +++++++++++++++++++++++++++++++++++- 1 file changed, 50 insertions(+), 1 deletion(-) diff --git a/src/ghost_mcp/utils/retry.py b/src/ghost_mcp/utils/retry.py index 2c7d327..552c465 100644 --- a/src/ghost_mcp/utils/retry.py +++ b/src/ghost_mcp/utils/retry.py @@ -6,7 +6,7 @@ from typing import Any, Awaitable, Callable, Optional, TypeVar from pydantic import BaseModel -from ..types.errors import NetworkError +from ..types.errors import NetworkError, AuthenticationError, GhostApiError, ValidationError from .logging import get_logger T = TypeVar("T") @@ -22,6 +22,44 @@ class RetryConfig(BaseModel): jitter: bool = True +def _should_retry(exception: Exception) -> bool: + """Determine if an exception should trigger a retry. + + Only retry transient network errors, not client errors or authentication issues. + """ + # Retry network errors (connection issues, timeouts) + if isinstance(exception, NetworkError): + return True + + # Don't retry authentication errors - these need manual intervention + if isinstance(exception, AuthenticationError): + return False + + # Don't retry validation errors - the request is malformed + if isinstance(exception, ValidationError): + return False + + # For Ghost API errors, only retry 5xx server errors, not 4xx client errors + if isinstance(exception, GhostApiError): + # Check if the error context indicates a server error (5xx) + if exception.context and "HTTP 5" in exception.context: + return True + # Check if it's a rate limiting error (429) - should be retried + if exception.context and "HTTP 429" in exception.context: + return True + # All other Ghost API errors (4xx) should not be retried + return False + + # For unknown exceptions, be conservative and retry (could be network issues) + # but log a warning so we can identify what should/shouldn't be retried + logger.warning( + "Unknown exception type encountered in retry logic", + exception_type=type(exception).__name__, + exception=str(exception) + ) + return True + + async def with_retry( operation: Callable[[], Awaitable[T]], config: Optional[RetryConfig] = None, @@ -39,6 +77,17 @@ async def with_retry( except Exception as e: last_exception = e + # Check if this exception should trigger a retry + if not _should_retry(e): + logger.debug( + "Exception not suitable for retry, failing immediately", + attempt=attempt, + exception_type=type(e).__name__, + error=str(e), + request_id=request_id, + ) + break + if attempt == config.max_retries: logger.error( "Operation failed after all retries",