CtrlK
BlogDocsLog inGet started
Tessl Logo

tessl/pypi-ffn

Financial functions for Python providing performance analysis, risk metrics, portfolio optimization, and data retrieval for quantitative finance

Pending
Overview
Eval results
Files

portfolio-optimization.mddocs/

Portfolio Optimization

Modern portfolio theory implementations including mean-variance optimization, risk parity approaches, and weight constraint utilities. Provides sophisticated portfolio construction algorithms for quantitative asset allocation.

Capabilities

Mean-Variance Optimization

Classic Markowitz mean-variance optimization for finding optimal portfolio weights.

def calc_mean_var_weights(returns, weight_bounds=(0.0, 1.0), rf=0.0, covar_method="ledoit-wolf", options=None):
    """
    Calculate mean-variance optimization weights (maximum Sharpe ratio portfolio).

    Parameters:
    - returns (pd.DataFrame): Return series for assets
    - weight_bounds (tuple): Min and max weight bounds (default: (0.0, 1.0))
    - rf (float): Risk-free rate (default: 0.0)
    - covar_method (str): Covariance estimation method ('ledoit-wolf', 'empirical', 'oas')
    - options (dict): Additional optimization options

    Returns:
    pd.Series: Optimal portfolio weights indexed by asset names
    """

Risk Parity Optimization

Equal risk contribution portfolio construction methods.

def calc_erc_weights(returns, initial_weights=None, risk_weights=None, covar_method="ledoit-wolf", risk_parity_method="ccd", maximum_iterations=100, tolerance=1e-8):
    """
    Calculate equal risk contribution (ERC) weights where each asset contributes equally to portfolio risk.

    Parameters:
    - returns (pd.DataFrame): Return series for assets
    - initial_weights (pd.Series): Starting weights for optimization (default: equal weights)
    - risk_weights (pd.Series): Target risk contribution weights (default: equal risk)
    - covar_method (str): Covariance estimation method ('ledoit-wolf', 'empirical', 'oas')
    - risk_parity_method (str): Risk parity algorithm ('ccd' - cyclical coordinate descent)
    - maximum_iterations (int): Maximum optimization iterations (default: 100)
    - tolerance (float): Convergence tolerance (default: 1e-8)

    Returns:
    pd.Series: ERC portfolio weights indexed by asset names
    """

def calc_inv_vol_weights(returns):
    """
    Calculate inverse volatility weights (simple risk parity approach).

    Parameters:
    - returns (pd.DataFrame): Return series for assets

    Returns:
    pd.Series: Inverse volatility weights indexed by asset names
    """

Weight Utilities

Utility functions for weight manipulation and constraint handling.

def limit_weights(weights, limit=0.1):
    """
    Apply maximum weight limits and redistribute excess proportionally.

    Parameters:
    - weights (pd.Series): Portfolio weights
    - limit (float): Maximum weight per asset (default: 0.1 for 10%)

    Returns:
    pd.Series: Adjusted weights respecting limits
    """

def random_weights(n, bounds=(0.0, 1.0), total=1.0):
    """
    Generate random portfolio weights for testing or Monte Carlo simulation.

    Parameters:
    - n (int): Number of assets
    - bounds (tuple): Weight bounds for each asset (default: (0.0, 1.0))
    - total (float): Total weight sum (default: 1.0)

    Returns:
    np.array: Random weights summing to total
    """

Usage Examples

Mean-Variance Optimization

import ffn
import pandas as pd

# Download asset data
tickers = ['AAPL', 'MSFT', 'GOOGL', 'AMZN', 'TSLA']
prices = ffn.get(tickers, start='2020-01-01')
returns = ffn.to_returns(prices).dropna()

# Calculate mean-variance optimal weights
mv_weights = ffn.calc_mean_var_weights(returns, rf=0.02)
print("Mean-Variance Optimal Weights:")
print(mv_weights.round(4))

# Apply weight limits
limited_weights = ffn.limit_weights(mv_weights, limit=0.3)
print("\nWith 30% Weight Limit:")
print(limited_weights.round(4))

# Portfolio performance
portfolio_returns = (returns * mv_weights).sum(axis=1)
portfolio_sharpe = ffn.calc_sharpe(portfolio_returns, rf=0.02)
print(f"\nPortfolio Sharpe Ratio: {portfolio_sharpe:.3f}")

Risk Parity Optimization

import ffn

# Download diversified asset data
assets = ['VTI', 'VEA', 'VWO', 'BND', 'VNQ']  # Stocks, Bonds, REITs
prices = ffn.get(assets, start='2015-01-01')
returns = ffn.to_returns(prices).dropna()

# Equal Risk Contribution weights
erc_weights = ffn.calc_erc_weights(returns)
print("Equal Risk Contribution Weights:")
print(erc_weights.round(4))

# Inverse volatility weights (simpler approach)
inv_vol_weights = ffn.calc_inv_vol_weights(returns)
print("\nInverse Volatility Weights:")
print(inv_vol_weights.round(4))

# Compare portfolio risk
erc_returns = (returns * erc_weights).sum(axis=1)
inv_vol_returns = (returns * inv_vol_weights).sum(axis=1)

print(f"\nERC Portfolio Volatility: {erc_returns.std() * (252**0.5):.3f}")
print(f"Inv Vol Portfolio Volatility: {inv_vol_returns.std() * (252**0.5):.3f}")

Custom Risk Targets

import ffn
import pandas as pd

# Multi-asset portfolio
assets = ['SPY', 'QQQ', 'IWM', 'TLT', 'GLD']
prices = ffn.get(assets, start='2018-01-01')
returns = ffn.to_returns(prices).dropna()

# Custom risk allocation: 60% equity risk, 40% alternative risk
equity_assets = ['SPY', 'QQQ', 'IWM']
alt_assets = ['TLT', 'GLD']

# Target risk weights
risk_weights = pd.Series(index=returns.columns)
risk_weights[equity_assets] = 0.60 / len(equity_assets)  # Equal risk within equity
risk_weights[alt_assets] = 0.40 / len(alt_assets)       # Equal risk within alternatives

# Calculate ERC weights with custom risk targets
custom_erc_weights = ffn.calc_erc_weights(returns, risk_weights=risk_weights)
print("Custom Risk Target Weights:")
print(custom_erc_weights.round(4))

# Verify risk contributions
portfolio_returns = (returns * custom_erc_weights).sum(axis=1)
print(f"\nPortfolio Sharpe: {ffn.calc_sharpe(portfolio_returns, rf=0.02):.3f}")

Weight Constraint Management

import ffn
import numpy as np

# Generate optimization scenario
assets = ['AAPL', 'MSFT', 'GOOGL', 'AMZN', 'NFLX', 'NVDA', 'META', 'TSLA']
prices = ffn.get(assets, start='2020-01-01')
returns = ffn.to_returns(prices).dropna()

# Unconstrained mean-variance optimization
unconstrained_weights = ffn.calc_mean_var_weights(returns, rf=0.02)
print("Unconstrained Weights:")
print(unconstrained_weights.round(4))
print(f"Max weight: {unconstrained_weights.max():.3f}")

# Apply progressive weight limits
for limit in [0.4, 0.3, 0.2, 0.15]:
    limited = ffn.limit_weights(unconstrained_weights, limit=limit)
    print(f"\nMax {limit*100:.0f}% Weight Limit:")
    print(f"Largest weight: {limited.max():.3f}")
    print(f"Number of assets > 5%: {(limited > 0.05).sum()}")

# Compare portfolio performance across constraints
results = {}
for limit in [None, 0.4, 0.3, 0.2, 0.15]:
    if limit is None:
        weights = unconstrained_weights
        label = 'Unconstrained'
    else:
        weights = ffn.limit_weights(unconstrained_weights, limit=limit)
        label = f'{limit*100:.0f}% Limit'
    
    port_returns = (returns * weights).sum(axis=1)
    results[label] = {
        'Sharpe': ffn.calc_sharpe(port_returns, rf=0.02),
        'Volatility': port_returns.std() * (252**0.5),
        'Max Weight': weights.max()
    }

constraint_df = pd.DataFrame(results).T
print("\nConstraint Impact Analysis:")
print(constraint_df.round(3))

Advanced Optimization

import ffn
import pandas as pd

# Multi-period optimization example
prices = ffn.get('SPY,QQQ,IWM,EFA,EEM,TLT,GLD', start='2010-01-01')
returns = ffn.to_returns(prices).dropna()

# Rolling optimization (rebalancing quarterly)
lookback_days = 252 * 2  # 2 years of data
rebalance_freq = 63      # Quarterly (approx 63 trading days)

portfolio_performance = []
rebalance_dates = returns.index[lookback_days::rebalance_freq]

for rebal_date in rebalance_dates[:8]:  # Limited example
    # Get historical data for optimization
    end_idx = returns.index.get_loc(rebal_date)
    start_idx = end_idx - lookbook_days
    hist_returns = returns.iloc[start_idx:end_idx]
    
    # Calculate optimal weights
    weights = ffn.calc_mean_var_weights(hist_returns, rf=0.02)
    
    # Apply to next period
    next_period_start = end_idx
    next_period_end = min(end_idx + rebalance_freq, len(returns))
    next_returns = returns.iloc[next_period_start:next_period_end]
    
    # Portfolio returns for this period
    port_returns = (next_returns * weights).sum(axis=1)
    
    portfolio_performance.extend(port_returns.tolist())
    
    print(f"Rebalance {rebal_date.strftime('%Y-%m-%d')}: Sharpe = {ffn.calc_sharpe(port_returns, rf=0.02):.3f}")

# Convert to series and analyze
portfolio_series = pd.Series(portfolio_performance, 
                           index=returns.index[lookback_days:lookback_days+len(portfolio_performance)])

print(f"\nOverall Rolling Portfolio Sharpe: {ffn.calc_sharpe(portfolio_series, rf=0.02):.3f}")

Install with Tessl CLI

npx tessl i tessl/pypi-ffn

docs

data-retrieval.md

data-utilities.md

index.md

pandas-extensions.md

performance-analysis.md

portfolio-optimization.md

return-calculations.md

risk-metrics.md

statistical-analysis.md

tile.json