CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-deepgram-sdk

The official Python SDK for the Deepgram automated speech recognition platform.

Pending
Overview
Eval results
Files

text-analysis.mddocs/

Text Analysis

Advanced text intelligence capabilities including sentiment analysis, topic detection, intent recognition, and content summarization. The Read/Analyze module processes text content to extract insights and understanding, providing detailed analysis results for various natural language processing tasks.

Capabilities

Analyze Client

Synchronous and asynchronous clients for comprehensive text analysis with configurable analysis options.

class AnalyzeClient:
    def analyze_url(
        self,
        source: UrlSource,
        options: AnalyzeOptions = None,
        headers: dict = None,
        timeout = None
    ) -> AnalyzeResponse:
        """
        Analyze text content from URL.
        
        Args:
            source: URL source containing text to analyze
            options: Analysis configuration options
            headers: Additional HTTP headers
            timeout: Request timeout
            
        Returns:
            AnalyzeResponse: Complete analysis results with metadata
        """
    
    def analyze_text(
        self,
        source: TextSource,
        options: AnalyzeOptions = None,
        headers: dict = None,
        timeout = None
    ) -> AnalyzeResponse:
        """
        Analyze text content directly.
        
        Args:
            source: Text source to analyze
            options: Analysis configuration options
            headers: Additional HTTP headers
            timeout: Request timeout
            
        Returns:
            AnalyzeResponse: Complete analysis results with metadata
        """

class AsyncAnalyzeClient:
    async def analyze_url(
        self,
        source: UrlSource,
        options: AnalyzeOptions = None,
        headers: dict = None,
        timeout = None
    ) -> AsyncAnalyzeResponse:
        """Async version of analyze_url method"""
    
    async def analyze_text(
        self,
        source: TextSource,
        options: AnalyzeOptions = None,
        headers: dict = None,
        timeout = None
    ) -> AsyncAnalyzeResponse:
        """Async version of analyze_text method"""

# Alternative client names
class ReadClient(AnalyzeClient): ...
class AsyncReadClient(AsyncAnalyzeClient): ...

Router Access

Access text analysis clients through the main client's read router.

class ReadRouter:
    @property
    def analyze(self) -> AnalyzeClient: ...
    @property 
    def asyncanalyze(self) -> AsyncAnalyzeClient: ...

Options Classes

class AnalyzeOptions:
    def __init__(self, **kwargs): ...
    
    # Language settings
    language: str = "en"  # Language code for analysis
    
    # Analysis features
    topics: bool = False  # Topic detection and classification
    intents: bool = False  # Intent recognition
    sentiment: bool = False  # Sentiment analysis
    summarize: bool = False  # Text summarization
    
    # Custom models
    custom_intent: list = None  # Custom intent models
    custom_intent_mode: str = None  # Custom intent processing mode
    custom_topic: list = None  # Custom topic models
    custom_topic_mode: str = None  # Custom topic processing mode
    
    # Additional options
    callback: str = None  # Webhook callback URL
    callback_method: str = "POST"  # Callback HTTP method
    extra: dict = None  # Additional analysis options

Source Types

Input sources for text data in various formats.

class AnalyzeSource:
    """Base class for text analysis sources"""

class TextSource(AnalyzeSource):
    def __init__(self, text: str):
        """
        Text from string.
        
        Args:
            text: Text content to analyze
        """

class BufferSource(AnalyzeSource):
    def __init__(self, buffer: bytes):
        """
        Text from byte buffer.
        
        Args:
            buffer: Text content as bytes
        """

class StreamSource(AnalyzeSource):
    def __init__(self, stream):
        """
        Text from stream object.
        
        Args:
            stream: File-like stream object
        """

class FileSource(AnalyzeSource):
    def __init__(self, file: str):
        """
        Text from local file.
        
        Args:
            file: Path to local text file
        """

class UrlSource(AnalyzeSource):
    def __init__(self, url: str):
        """
        Text from URL.
        
        Args:
            url: HTTP/HTTPS URL to text content
        """

class AnalyzeStreamSource(AnalyzeSource):
    """Stream-specific source for analysis"""

Response Types

class AnalyzeResponse:
    """Main text analysis response"""
    metadata: AnalyzeMetadata
    results: AnalyzeResults

class AsyncAnalyzeResponse(AnalyzeResponse):
    """Async analysis response"""

class SyncAnalyzeResponse(AnalyzeResponse):
    """Sync analysis response"""

class AnalyzeMetadata:
    """Analysis request metadata"""
    request_id: str
    created: str
    language: str
    intents_info: IntentsInfo = None
    sentiment_info: SentimentInfo = None
    topics_info: TopicsInfo = None
    summary_info: SummaryInfo = None

class AnalyzeResults:
    """Analysis results container"""
    sentiments: Sentiments = None
    topics: Topics = None
    intents: Intents = None
    summary: AnalyzeSummary = None

class AnalyzeSummary:
    """Text summary results"""
    text: str
    start_word: int = None
    end_word: int = None

Analysis Result Types

Sentiment Analysis

class Sentiments:
    """Collection of sentiment analysis results"""
    segments: list[Segment]
    average: Average

class Sentiment:
    """Individual sentiment result"""
    sentiment: str  # "positive", "negative", "neutral"
    confidence: float

class SentimentInfo:
    """Sentiment analysis metadata"""
    input_tokens: int
    model_uuid: str

Topic Detection

class Topics:
    """Collection of topic detection results"""
    segments: list[Segment] 
    
class Topic:
    """Individual topic result"""
    topic: str
    confidence: float

class TopicsInfo:
    """Topic detection metadata"""
    input_tokens: int
    model_uuid: str

Intent Recognition

class Intents:
    """Collection of intent recognition results"""
    segments: list[Segment]

class Intent:
    """Individual intent result"""
    intent: str
    confidence: float

class IntentsInfo:
    """Intent recognition metadata"""
    input_tokens: int
    model_uuid: str

Common Analysis Types

class Segment:
    """Analysis segment with results"""
    text: str
    start_word: int
    end_word: int
    sentiments: list[Sentiment] = None
    topics: list[Topic] = None
    intents: list[Intent] = None

class Average:
    """Average analysis metrics"""
    sentiment: str
    confidence: float

class SummaryInfo:
    """Summary generation metadata"""
    input_tokens: int
    model_uuid: str

Usage Examples

Basic Text Analysis

from deepgram import DeepgramClient, TextSource, AnalyzeOptions

client = DeepgramClient(api_key="your-api-key")

# Analyze text for multiple insights
text = """
I absolutely love this new product! It's innovative and well-designed. 
The customer service team was incredibly helpful when I had questions about pricing and features.
I'm definitely planning to recommend this to my colleagues for our upcoming project.
"""

source = TextSource(text)
options = AnalyzeOptions(
    language="en",
    sentiment=True,
    topics=True,
    intents=True,
    summarize=True
)

response = client.read.analyze.analyze_text(source, options)

# Access analysis results
if response.results.sentiments:
    avg_sentiment = response.results.sentiments.average
    print(f"Overall sentiment: {avg_sentiment.sentiment} ({avg_sentiment.confidence:.2f})")

if response.results.topics:
    for segment in response.results.topics.segments:
        for topic in segment.topics:
            print(f"Topic: {topic.topic} ({topic.confidence:.2f})")

if response.results.intents:
    for segment in response.results.intents.segments:
        for intent in segment.intents:
            print(f"Intent: {intent.intent} ({intent.confidence:.2f})")

if response.results.summary:
    print(f"Summary: {response.results.summary.text}")

Sentiment Analysis Only

from deepgram import DeepgramClient, TextSource, AnalyzeOptions

client = DeepgramClient(api_key="your-api-key")

# Focus on sentiment analysis
reviews = [
    "This product exceeded my expectations! Highly recommend.",
    "The service was okay, nothing special but not bad either.",
    "Very disappointed with the quality. Would not buy again."
]

for i, review in enumerate(reviews):
    source = TextSource(review)
    options = AnalyzeOptions(sentiment=True)
    
    response = client.read.analyze.analyze_text(source, options)
    
    if response.results.sentiments:
        sentiment = response.results.sentiments.average
        print(f"Review {i+1}: {sentiment.sentiment} ({sentiment.confidence:.2f})")
        
        # Detailed segment analysis
        for segment in response.results.sentiments.segments:
            for sent in segment.sentiments:
                print(f"  Segment: '{segment.text}' -> {sent.sentiment} ({sent.confidence:.2f})")

Topic Detection

from deepgram import DeepgramClient, TextSource, AnalyzeOptions

client = DeepgramClient(api_key="your-api-key")

# Analyze topics in longer content
content = """
The quarterly financial results show strong performance across all business units.
Revenue increased by 15% compared to last quarter, driven primarily by growth in our software division.
Customer satisfaction scores remain high, with 92% reporting positive experiences.
The marketing team launched several successful campaigns that contributed to brand awareness.
Our technology infrastructure investments are paying dividends with improved system reliability.
Looking ahead, we're optimistic about market expansion opportunities in the coming year.
"""

source = TextSource(content)
options = AnalyzeOptions(
    topics=True,
    language="en"
)

response = client.read.analyze.analyze_text(source, options)

if response.results.topics:
    print("Detected Topics:")
    topics_found = set()
    
    for segment in response.results.topics.segments:
        for topic in segment.topics:
            if topic.topic not in topics_found:
                topics_found.add(topic.topic)
                print(f"- {topic.topic} (confidence: {topic.confidence:.2f})")

Intent Recognition

from deepgram import DeepgramClient, TextSource, AnalyzeOptions

client = DeepgramClient(api_key="your-api-key")

# Analyze customer service interactions for intent
interactions = [
    "I need help with canceling my subscription",
    "Can you tell me about your pricing plans?", 
    "I want to upgrade my account to the premium tier",
    "There's an issue with my recent order, it hasn't arrived yet",
    "How do I reset my password?"
]

for interaction in interactions:
    source = TextSource(interaction)
    options = AnalyzeOptions(intents=True)
    
    response = client.read.analyze.analyze_text(source, options)
    
    print(f"Text: '{interaction}'")
    
    if response.results.intents:
        for segment in response.results.intents.segments:
            for intent in segment.intents:
                print(f"  Intent: {intent.intent} (confidence: {intent.confidence:.2f})")
    print()

Text Summarization

from deepgram import DeepgramClient, FileSource, AnalyzeOptions

client = DeepgramClient(api_key="your-api-key")

# Summarize content from file
source = FileSource("long_article.txt")
options = AnalyzeOptions(
    summarize=True,
    language="en"
)

response = client.read.analyze.analyze_text(source, options)

if response.results.summary:
    print("Summary:")
    print(response.results.summary.text)
    
    if response.results.summary.start_word is not None:
        print(f"Summary covers words {response.results.summary.start_word} to {response.results.summary.end_word}")

Async Text Analysis

import asyncio
from deepgram import DeepgramClient, TextSource, AnalyzeOptions

async def async_analysis_example():
    client = DeepgramClient(api_key="your-api-key")
    
    texts = [
        "This is the first text to analyze.",
        "Here's another piece of content for analysis.",
        "And this is the third text sample."
    ]
    
    options = AnalyzeOptions(
        sentiment=True,
        topics=True,
        language="en"
    )
    
    # Analyze multiple texts concurrently
    tasks = []
    for text in texts:
        source = TextSource(text)
        task = client.read.asyncanalyze.analyze(source, options)
        tasks.append(task)
    
    responses = await asyncio.gather(*tasks)
    
    for i, response in enumerate(responses):
        print(f"Analysis {i+1}:")
        if response.results.sentiments:
            sentiment = response.results.sentiments.average
            print(f"  Sentiment: {sentiment.sentiment} ({sentiment.confidence:.2f})")
        print()

# Run async example
asyncio.run(async_analysis_example())

Custom Models

from deepgram import DeepgramClient, TextSource, AnalyzeOptions

client = DeepgramClient(api_key="your-api-key")

# Use custom trained models
source = TextSource("Text for analysis with custom models")
options = AnalyzeOptions(
    intents=True,
    topics=True,
    custom_intent=["custom-intent-model-id"],
    custom_intent_mode="strict",
    custom_topic=["custom-topic-model-id"],
    custom_topic_mode="extended"
)

response = client.read.analyze.analyze_text(source, options)

# Process results from custom models
if response.results.intents:
    print("Custom Intent Results:")
    for segment in response.results.intents.segments:
        for intent in segment.intents:
            print(f"  {intent.intent}: {intent.confidence:.2f}")

if response.results.topics:
    print("Custom Topic Results:")
    for segment in response.results.topics.segments:
        for topic in segment.topics:
            print(f"  {topic.topic}: {topic.confidence:.2f}")

Error Handling

from deepgram import DeepgramClient, DeepgramApiError, TextSource, AnalyzeOptions

client = DeepgramClient(api_key="your-api-key")

try:
    source = TextSource("Text to analyze")
    options = AnalyzeOptions(
        sentiment=True,
        language="invalid-language-code"  # This may cause an error
    )
    
    response = client.read.analyze.analyze_text(source, options)
    
    # Process results
    if response.results.sentiments:
        sentiment = response.results.sentiments.average
        print(f"Sentiment: {sentiment.sentiment}")
        
except DeepgramApiError as e:
    print(f"API Error: {e}")
except Exception as e:
    print(f"Unexpected error: {e}")

Install with Tessl CLI

npx tessl i tessl/pypi-deepgram-sdk

docs

audio-utilities.md

conversational-ai.md

index.md

project-management.md

speech-to-text.md

text-analysis.md

text-to-speech.md

tile.json