A metasearch library that aggregates results from diverse web search services
—
Exception classes for handling errors during search operations including rate limiting, timeouts, and general API errors. All DDGS exceptions inherit from the base DDGSException class.
Base exception class that all DDGS-specific exceptions inherit from.
class DDGSException(Exception):
"""
Base exception class for all ddgs-related errors.
This is the parent class for all exceptions raised by the DDGS library.
Catch this exception to handle any DDGS-related error generically.
"""Raised when search services return rate limit errors, indicating too many requests have been made in a short time period.
class RatelimitException(DDGSException):
"""
Raised when rate limit is exceeded during API requests.
This exception is thrown when search backends indicate that
the request rate limit has been exceeded. The calling code
should implement backoff strategies when catching this exception.
"""Raised when search requests exceed the configured timeout period.
class TimeoutException(DDGSException):
"""
Raised when API requests timeout.
This exception is thrown when search operations take longer
than the configured timeout value. Consider increasing the
timeout value or implementing retry logic.
"""from ddgs import DDGS, DDGSException
try:
with DDGS() as ddgs:
results = ddgs.text("search query")
print(f"Found {len(results)} results")
except DDGSException as e:
print(f"Search failed: {e}")from ddgs import DDGS, DDGSException, RatelimitException, TimeoutException
import time
def robust_search(query, max_retries=3, backoff_factor=2):
"""Perform search with retry logic for common exceptions."""
for attempt in range(max_retries):
try:
with DDGS(timeout=10) as ddgs:
return ddgs.text(query, max_results=20)
except RatelimitException:
if attempt < max_retries - 1:
wait_time = backoff_factor ** attempt
print(f"Rate limited. Waiting {wait_time} seconds before retry...")
time.sleep(wait_time)
continue
else:
print("Rate limit exceeded. Max retries reached.")
raise
except TimeoutException:
if attempt < max_retries - 1:
print(f"Request timed out. Retrying attempt {attempt + 2}...")
continue
else:
print("Request timed out. Max retries reached.")
raise
except DDGSException as e:
print(f"Search error: {e}")
raise
# Usage
try:
results = robust_search("python programming")
print(f"Successfully retrieved {len(results)} results")
except DDGSException as e:
print(f"All retry attempts failed: {e}")from ddgs import DDGS, DDGSException, RatelimitException, TimeoutException
def safe_multi_search(query):
"""Perform multiple search types with proper exception handling."""
results = {
'text': [],
'images': [],
'news': [],
'videos': [],
'books': []
}
search_methods = [
('text', lambda ddgs: ddgs.text(query, max_results=10)),
('images', lambda ddgs: ddgs.images(query, max_results=5)),
('news', lambda ddgs: ddgs.news(query, max_results=5)),
('videos', lambda ddgs: ddgs.videos(query, max_results=5)),
('books', lambda ddgs: ddgs.books(query, max_results=5))
]
with DDGS(timeout=15) as ddgs:
for search_type, search_func in search_methods:
try:
results[search_type] = search_func(ddgs)
print(f"{search_type.title()} search: {len(results[search_type])} results")
except RatelimitException:
print(f"{search_type.title()} search rate limited, skipping...")
continue
except TimeoutException:
print(f"{search_type.title()} search timed out, skipping...")
continue
except DDGSException as e:
print(f"{search_type.title()} search failed: {e}")
continue
return results
# Usage
try:
all_results = safe_multi_search("artificial intelligence")
total_results = sum(len(results) for results in all_results.values())
print(f"Total results across all search types: {total_results}")
except Exception as e:
print(f"Unexpected error: {e}")from ddgs import DDGS, DDGSException, RatelimitException, TimeoutException
class RobustDDGS:
"""Wrapper class with built-in exception handling and retry logic."""
def __init__(self, timeout=10, max_retries=3, backoff_factor=1.5):
self.timeout = timeout
self.max_retries = max_retries
self.backoff_factor = backoff_factor
def _execute_with_retry(self, search_func):
"""Execute search function with retry logic."""
last_exception = None
for attempt in range(self.max_retries):
try:
with DDGS(timeout=self.timeout) as ddgs:
return search_func(ddgs)
except RatelimitException as e:
last_exception = e
if attempt < self.max_retries - 1:
wait_time = self.backoff_factor ** attempt
time.sleep(wait_time)
continue
except TimeoutException as e:
last_exception = e
if attempt < self.max_retries - 1:
continue
except DDGSException as e:
# Don't retry for general DDGS exceptions
raise e
# If we get here, all retries failed
raise last_exception
def text(self, query, **kwargs):
"""Text search with automatic retry."""
return self._execute_with_retry(
lambda ddgs: ddgs.text(query, **kwargs)
)
def images(self, query, **kwargs):
"""Image search with automatic retry."""
return self._execute_with_retry(
lambda ddgs: ddgs.images(query, **kwargs)
)
# Similar methods for news, videos, books...
# Usage
robust_ddgs = RobustDDGS(timeout=15, max_retries=5)
try:
results = robust_ddgs.text("machine learning", max_results=50)
print(f"Retrieved {len(results)} results successfully")
except DDGSException as e:
print(f"Search failed after all retries: {e}")Always use specific exception handling - Catch RatelimitException and TimeoutException separately when you need different handling logic.
Implement backoff strategies - When catching RatelimitException, use exponential backoff before retrying.
Set appropriate timeouts - Balance between allowing enough time for searches and not hanging indefinitely.
Use context managers - Always use with DDGS() as ddgs: to ensure proper resource cleanup.
Log exceptions for debugging - Include exception details in logs to help diagnose issues.
Graceful degradation - Continue with available results even if some search types fail.
Install with Tessl CLI
npx tessl i tessl/pypi-ddgs