A comprehensive toolbox for modeling and simulating photovoltaic energy systems.
—
Model various loss mechanisms that affect photovoltaic system performance including soiling, snow coverage, shading effects, and system losses. Comprehensive tools for quantifying and predicting energy losses under real-world conditions.
Model the accumulation and cleaning of dust and debris on PV modules.
def hsu(rainfall, cleaning_threshold, surface_tilt, pm2_5, pm10,
depo_veloc=None, rain_accum_period=None):
"""
Calculate soiling losses using Hsu soiling model.
Parameters:
- rainfall: array-like, daily rainfall in mm
- cleaning_threshold: numeric, rainfall threshold for cleaning in mm
- surface_tilt: numeric, surface tilt angle in degrees
- pm2_5: array-like, PM2.5 concentration in μg/m³
- pm10: array-like, PM10 concentration in μg/m³
- depo_veloc: dict, deposition velocities by particle size
- rain_accum_period: int, period for rainfall accumulation in days
Returns:
pandas.Series with soiling ratio (0-1, where 1 = clean)
"""
def kimber(rainfall, cleaning_threshold=6, soiling_loss_rate=0.0015,
grace_period=14, max_loss_rate=0.3, manual_wash_dates=None):
"""
Calculate soiling losses using Kimber soiling model.
Parameters:
- rainfall: pandas.Series, daily rainfall in mm
- cleaning_threshold: numeric, rainfall threshold for cleaning in mm
- soiling_loss_rate: numeric, daily soiling loss rate (fraction/day)
- grace_period: int, days before soiling begins after cleaning
- max_loss_rate: numeric, maximum soiling loss (0-1)
- manual_wash_dates: array-like, dates of manual cleaning
Returns:
pandas.Series with soiling ratio (0-1, where 1 = clean)
"""Model snow accumulation and melting effects on PV modules.
def fully_covered_nrel(snowfall, snow_depth=None, threshold_snowfall=1.0,
threshold_snow_depth=None):
"""
Calculate snow coverage using NREL fully covered model.
Parameters:
- snowfall: array-like, daily snowfall in cm
- snow_depth: array-like, snow depth on ground in cm (optional)
- threshold_snowfall: numeric, snowfall threshold for full coverage in cm
- threshold_snow_depth: numeric, snow depth threshold in cm
Returns:
pandas.Series with snow coverage fraction (0-1)
"""
def coverage_nrel(snowfall, poa_irradiance, temp_air, surface_tilt,
threshold_snowfall=1.0, threshold_temp=2.0):
"""
Calculate snow coverage using NREL coverage model with melting.
Parameters:
- snowfall: array-like, daily snowfall in cm
- poa_irradiance: array-like, plane-of-array irradiance in W/m²
- temp_air: array-like, air temperature in degrees C
- surface_tilt: numeric, surface tilt angle in degrees
- threshold_snowfall: numeric, snowfall threshold in cm
- threshold_temp: numeric, temperature threshold for melting in °C
Returns:
pandas.Series with snow coverage fraction (0-1)
"""
def dc_loss_nrel(snow_coverage, num_strings):
"""
Calculate DC power loss from snow coverage using NREL model.
Parameters:
- snow_coverage: array-like, snow coverage fraction (0-1)
- num_strings: int, number of strings in the array
Returns:
pandas.Series with DC loss factor (0-1, where 0 = no loss)
"""
def loss_townsend(snow_total, snow_events, surface_tilt, relative_humidity,
wind_speed, temp_air=None):
"""
Calculate snow losses using Townsend model.
Parameters:
- snow_total: numeric, total seasonal snowfall in cm
- snow_events: int, number of snow events per season
- surface_tilt: numeric, surface tilt angle in degrees
- relative_humidity: numeric, average relative humidity (0-100)
- wind_speed: numeric, average wind speed in m/s
- temp_air: numeric, average air temperature in °C (optional)
Returns:
numeric, seasonal snow loss fraction (0-1)
"""Model shading effects from nearby objects and inter-row shading.
def ground_angle(surface_tilt, gcr, slant_height):
"""
Calculate ground coverage angle for shading analysis.
Parameters:
- surface_tilt: numeric, surface tilt angle in degrees
- gcr: numeric, ground coverage ratio (0-1)
- slant_height: numeric, slant height of module in meters
Returns:
numeric, ground coverage angle in degrees
"""
def masking_angle(surface_tilt, gcr, slant_height):
"""
Calculate masking angle for row-to-row shading.
Parameters:
- surface_tilt: numeric, surface tilt angle in degrees
- gcr: numeric, ground coverage ratio (0-1)
- slant_height: numeric, slant height of module in meters
Returns:
numeric, masking angle in degrees
"""
def masking_angle_passias(surface_tilt, gcr):
"""
Calculate masking angle using Passias method.
Parameters:
- surface_tilt: numeric, surface tilt angle in degrees
- gcr: numeric, ground coverage ratio (0-1)
Returns:
numeric, masking angle in degrees
"""
def sky_diffuse_passias(masking_angle):
"""
Calculate sky diffuse fraction using Passias method.
Parameters:
- masking_angle: numeric, masking angle in degrees
Returns:
numeric, sky diffuse fraction (0-1)
"""
def projected_solar_zenith_angle(solar_zenith, solar_azimuth, axis_azimuth):
"""
Calculate projected solar zenith angle for tracking systems.
Parameters:
- solar_zenith: numeric, solar zenith angle in degrees
- solar_azimuth: numeric, solar azimuth angle in degrees
- axis_azimuth: numeric, tracker axis azimuth in degrees
Returns:
numeric, projected solar zenith angle in degrees
"""
def shaded_fraction1d(solar_zenith, solar_azimuth, axis_azimuth,
gcr, cross_axis_tilt=0.0):
"""
Calculate shaded fraction for 1D tracking systems.
Parameters:
- solar_zenith: numeric, solar zenith angle in degrees
- solar_azimuth: numeric, solar azimuth angle in degrees
- axis_azimuth: numeric, tracker axis azimuth in degrees
- gcr: numeric, ground coverage ratio (0-1)
- cross_axis_tilt: numeric, cross-axis tilt in degrees
Returns:
numeric, shaded fraction (0-1)
"""Model various electrical and optical system losses.
def pvwatts_losses(soiling=2, shading=3, snow=0, mismatch=2, wiring=2,
connections=0.5, lid=1.5, nameplate_rating=1, age=0,
availability=3):
"""
Calculate PVWatts system loss factors.
Parameters:
- soiling: numeric, soiling loss percentage
- shading: numeric, shading loss percentage
- snow: numeric, snow loss percentage
- mismatch: numeric, module mismatch loss percentage
- wiring: numeric, DC wiring loss percentage
- connections: numeric, DC connections loss percentage
- lid: numeric, light-induced degradation percentage
- nameplate_rating: numeric, nameplate rating loss percentage
- age: numeric, age-related degradation percentage
- availability: numeric, system availability loss percentage
Returns:
dict with individual loss factors and total system derate factor
"""
def dc_ohms_from_percent(vmp_ref, imp_ref, dc_ohmic_percent,
modules_per_string=1, strings=1):
"""
Convert DC loss percentage to resistance value.
Parameters:
- vmp_ref: numeric, module voltage at max power in volts
- imp_ref: numeric, module current at max power in amps
- dc_ohmic_percent: numeric, DC ohmic loss percentage
- modules_per_string: int, number of modules per string
- strings: int, number of parallel strings
Returns:
numeric, equivalent series resistance in ohms
"""
def dc_ohmic_losses(resistance, current):
"""
Calculate DC ohmic losses from resistance and current.
Parameters:
- resistance: numeric, series resistance in ohms
- current: array-like, DC current in amps
Returns:
array-like, power loss in watts
"""
def combine_loss_factors(index, *losses, fill_method='ffill'):
"""
Combine multiple loss factors into single time series.
Parameters:
- index: pandas.Index, time index for output
- losses: multiple pandas.Series or scalars, loss factors to combine
- fill_method: str, method to fill missing values
Returns:
pandas.Series with combined loss factors
"""Additional loss models for specific conditions and applications.
def thermal_loss_factor(cell_temperature, temp_ref=25, temp_coeff=-0.004):
"""
Calculate thermal loss factor from cell temperature.
Parameters:
- cell_temperature: array-like, cell temperature in °C
- temp_ref: numeric, reference temperature in °C
- temp_coeff: numeric, temperature coefficient per °C
Returns:
array-like, thermal loss factor (multiplicative, <1 indicates loss)
"""
def spectral_loss_factor(airmass, precipitable_water, module_type='csi'):
"""
Calculate spectral mismatch loss factor.
Parameters:
- airmass: array-like, absolute airmass
- precipitable_water: array-like, precipitable water in cm
- module_type: str, module technology type
Returns:
array-like, spectral loss factor (multiplicative)
"""
def iam_loss_factor(aoi, iam_model='physical', **kwargs):
"""
Calculate incidence angle modifier loss factor.
Parameters:
- aoi: array-like, angle of incidence in degrees
- iam_model: str, IAM model to use
- kwargs: additional parameters for IAM model
Returns:
array-like, IAM loss factor (multiplicative, <1 indicates loss)
"""
def degradation_loss(years_operation, degradation_rate=0.005):
"""
Calculate cumulative degradation losses over time.
Parameters:
- years_operation: numeric, years since installation
- degradation_rate: numeric, annual degradation rate (fraction/year)
Returns:
numeric, cumulative degradation factor (<1 indicates loss)
"""import pvlib
from pvlib import soiling, iotools
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# Load weather data with precipitation
lat, lon = 37.8756, -122.2441 # Berkeley, CA
try:
# Try to get actual weather data
weather_data, metadata = iotools.get_psm3(
lat, lon, api_key='DEMO_KEY', email='user@example.com',
names=2023, attributes=['ghi', 'temp_air', 'precip']
)
has_precip = 'precip' in weather_data.columns
except:
# Generate synthetic weather data
print("Using synthetic weather data")
dates = pd.date_range('2023-01-01', '2023-12-31', freq='D')
# Simulate seasonal precipitation pattern (more in winter)
np.random.seed(42)
seasonal_pattern = 1 + 0.8 * np.sin(2 * np.pi * (dates.dayofyear + 90) / 365)
base_precip = np.random.exponential(2.0, len(dates)) # Exponential distribution
daily_precip = base_precip * seasonal_pattern
daily_precip[np.random.random(len(dates)) > 0.3] = 0 # 70% dry days
weather_data = pd.DataFrame({
'ghi': 200 + 300 * np.sin(2 * np.pi * dates.dayofyear / 365), # Seasonal GHI
'temp_air': 15 + 10 * np.sin(2 * np.pi * dates.dayofyear / 365), # Seasonal temperature
'precip': daily_precip
}, index=dates)
has_precip = True
if not has_precip:
# Add synthetic precipitation if not available
dates = weather_data.index
np.random.seed(42)
seasonal_pattern = 1 + 0.8 * np.sin(2 * np.pi * (dates.dayofyear + 90) / 365)
base_precip = np.random.exponential(2.0, len(dates))
daily_precip = base_precip * seasonal_pattern
daily_precip[np.random.random(len(dates)) > 0.3] = 0
weather_data['precip'] = daily_precip
# System parameters
surface_tilt = 25 # degrees
cleaning_threshold_low = 3 # mm (frequent cleaning)
cleaning_threshold_high = 10 # mm (infrequent cleaning)
# Soiling model parameters for different environments
environments = {
'Clean': {'loss_rate': 0.0005, 'pm2_5': 8, 'pm10': 15}, # Rural/coastal
'Moderate': {'loss_rate': 0.0015, 'pm2_5': 15, 'pm10': 30}, # Suburban
'Dusty': {'loss_rate': 0.003, 'pm2_5': 30, 'pm10': 60} # Desert/industrial
}
# Calculate soiling for different scenarios
soiling_results = {}
for env_name, params in environments.items():
print(f"Calculating soiling for {env_name} environment...")
# Kimber model with different cleaning thresholds
for threshold in [cleaning_threshold_low, cleaning_threshold_high]:
scenario_name = f"{env_name}_T{threshold}"
soiling_ratio = soiling.kimber(
rainfall=weather_data['precip'],
cleaning_threshold=threshold,
soiling_loss_rate=params['loss_rate'],
grace_period=7,
max_loss_rate=0.25
)
soiling_results[scenario_name] = soiling_ratio
# Hsu model (if PM data available)
if 'pm2_5' in params and 'pm10' in params:
try:
# Create constant PM concentrations (in real application, use time series)
pm2_5_series = pd.Series(params['pm2_5'], index=weather_data.index)
pm10_series = pd.Series(params['pm10'], index=weather_data.index)
hsu_soiling = soiling.hsu(
rainfall=weather_data['precip'],
cleaning_threshold=cleaning_threshold_low,
surface_tilt=surface_tilt,
pm2_5=pm2_5_series,
pm10=pm10_series
)
soiling_results[f"{env_name}_Hsu"] = hsu_soiling
except Exception as e:
print(f"Hsu model not available: {e}")
# Calculate energy impact
poa_clean = weather_data['ghi'] * 1.2 # Simplified POA calculation
annual_energy_impacts = {}
for scenario, soiling_ratio in soiling_results.items():
# Apply soiling losses
poa_soiled = poa_clean * soiling_ratio
# Calculate energy totals
clean_energy = poa_clean.sum() / 1000 # kWh/m²/year
soiled_energy = poa_soiled.sum() / 1000
energy_loss = (clean_energy - soiled_energy) / clean_energy * 100
annual_energy_impacts[scenario] = {
'clean_energy': clean_energy,
'soiled_energy': soiled_energy,
'energy_loss_pct': energy_loss,
'avg_soiling_ratio': soiling_ratio.mean()
}
# Plot soiling analysis
fig, axes = plt.subplots(3, 2, figsize=(15, 12))
# Time series comparison for one environment
env_scenarios = [k for k in soiling_results.keys() if k.startswith('Moderate')]
for scenario in env_scenarios:
axes[0, 0].plot(soiling_results[scenario].index,
soiling_results[scenario] * 100,
label=scenario.replace('Moderate_', ''), linewidth=1.5)
axes[0, 0].set_ylabel('Soiling Ratio (%)')
axes[0, 0].set_title('Soiling Time Series - Moderate Environment')
axes[0, 0].legend()
axes[0, 0].grid(True)
# Monthly averages comparison
monthly_soiling = {}
for scenario in soiling_results:
monthly_avg = soiling_results[scenario].resample('M').mean() * 100
monthly_soiling[scenario] = monthly_avg
# Plot monthly averages for clean vs dusty environments
clean_scenarios = [k for k in monthly_soiling.keys() if 'Clean' in k]
dusty_scenarios = [k for k in monthly_soiling.keys() if 'Dusty' in k]
for scenario in clean_scenarios:
axes[0, 1].plot(monthly_soiling[scenario].index.month,
monthly_soiling[scenario],
label=scenario, linewidth=2, linestyle='-')
for scenario in dusty_scenarios:
axes[0, 1].plot(monthly_soiling[scenario].index.month,
monthly_soiling[scenario],
label=scenario, linewidth=2, linestyle='--')
axes[0, 1].set_xlabel('Month')
axes[0, 1].set_ylabel('Avg Soiling Ratio (%)')
axes[0, 1].set_title('Monthly Average Soiling Ratios')
axes[0, 1].legend()
axes[0, 1].grid(True)
# Energy impact summary
scenarios = list(annual_energy_impacts.keys())
energy_losses = [annual_energy_impacts[s]['energy_loss_pct'] for s in scenarios]
avg_soiling = [annual_energy_impacts[s]['avg_soiling_ratio'] * 100 for s in scenarios]
colors = ['blue' if 'Clean' in s else 'green' if 'Moderate' in s else 'red'
for s in scenarios]
bars = axes[1, 0].bar(range(len(scenarios)), energy_losses, color=colors, alpha=0.7)
axes[1, 0].set_xticks(range(len(scenarios)))
axes[1, 0].set_xticklabels(scenarios, rotation=45, ha='right')
axes[1, 0].set_ylabel('Annual Energy Loss (%)')
axes[1, 0].set_title('Annual Energy Impact by Scenario')
axes[1, 0].grid(True, axis='y')
# Add value labels on bars
for bar, loss in zip(bars, energy_losses):
height = bar.get_height()
axes[1, 0].text(bar.get_x() + bar.get_width()/2., height + 0.1,
f'{loss:.1f}%', ha='center', va='bottom')
# Correlation: precipitation vs soiling recovery
precip_monthly = weather_data['precip'].resample('M').sum()
# Use moderate environment with low threshold as reference
ref_soiling = monthly_soiling['Moderate_T3']
axes[1, 1].scatter(precip_monthly, ref_soiling, alpha=0.7, s=50)
axes[1, 1].set_xlabel('Monthly Precipitation (mm)')
axes[1, 1].set_ylabel('Monthly Avg Soiling Ratio (%)')
axes[1, 1].set_title('Precipitation vs Soiling Recovery')
axes[1, 1].grid(True)
# Add trend line
z = np.polyfit(precip_monthly, ref_soiling, 1)
p = np.poly1d(z)
axes[1, 1].plot(precip_monthly, p(precip_monthly), "r--", alpha=0.8, linewidth=2)
# Cleaning event analysis
cleaning_events = (weather_data['precip'] >= cleaning_threshold_low).sum()
days_between_cleaning = 365 / cleaning_events if cleaning_events > 0 else 365
# Economic analysis (simplified)
system_size = 100 # kW
electricity_rate = 0.15 # $/kWh
cleaning_cost_per_event = 200 # $
economic_analysis = []
for scenario, results in annual_energy_impacts.items():
threshold = int(scenario.split('_T')[-1]) if '_T' in scenario else cleaning_threshold_low
# Annual cleaning events
annual_events = (weather_data['precip'] >= threshold).sum()
# Annual costs and savings
energy_loss_kwh = results['energy_loss_pct'] / 100 * system_size * 1500 # Assume 1500 kWh/kW/year
revenue_loss = energy_loss_kwh * electricity_rate
cleaning_costs = annual_events * cleaning_cost_per_event
economic_analysis.append({
'scenario': scenario,
'threshold': threshold,
'events': annual_events,
'energy_loss_pct': results['energy_loss_pct'],
'revenue_loss': revenue_loss,
'cleaning_costs': cleaning_costs,
'net_cost': revenue_loss + cleaning_costs
})
# Plot economic analysis
econ_df = pd.DataFrame(economic_analysis)
econ_summary = econ_df.groupby('threshold').agg({
'energy_loss_pct': 'mean',
'revenue_loss': 'mean',
'cleaning_costs': 'mean',
'net_cost': 'mean'
}).reset_index()
x_pos = np.arange(len(econ_summary))
width = 0.35
bars1 = axes[2, 0].bar(x_pos - width/2, econ_summary['revenue_loss'],
width, label='Revenue Loss', alpha=0.7, color='red')
bars2 = axes[2, 0].bar(x_pos + width/2, econ_summary['cleaning_costs'],
width, label='Cleaning Costs', alpha=0.7, color='blue')
axes[2, 0].set_xlabel('Cleaning Threshold (mm)')
axes[2, 0].set_ylabel('Annual Cost ($)')
axes[2, 0].set_title('Economic Impact of Cleaning Strategy')
axes[2, 0].set_xticks(x_pos)
axes[2, 0].set_xticklabels(econ_summary['threshold'])
axes[2, 0].legend()
axes[2, 0].grid(True, axis='y')
# Optimal cleaning threshold
axes[2, 1].plot(econ_summary['threshold'], econ_summary['net_cost'], 'go-',
linewidth=3, markersize=8)
axes[2, 1].set_xlabel('Cleaning Threshold (mm)')
axes[2, 1].set_ylabel('Total Annual Cost ($)')
axes[2, 1].set_title('Total Cost vs Cleaning Threshold')
axes[2, 1].grid(True)
# Find and mark optimal threshold
optimal_idx = econ_summary['net_cost'].idxmin()
optimal_threshold = econ_summary.loc[optimal_idx, 'threshold']
optimal_cost = econ_summary.loc[optimal_idx, 'net_cost']
axes[2, 1].scatter([optimal_threshold], [optimal_cost],
color='red', s=100, zorder=5)
axes[2, 1].annotate(f'Optimal: {optimal_threshold}mm\n${optimal_cost:.0f}/year',
xy=(optimal_threshold, optimal_cost),
xytext=(optimal_threshold + 1, optimal_cost + 200),
arrowprops=dict(arrowstyle='->', color='red'),
fontsize=10, fontweight='bold')
plt.tight_layout()
plt.show()
# Print comprehensive results
print(f"\nComprehensive Soiling Analysis Results")
print("="*50)
print(f"Location: {lat:.2f}°N, {lon:.2f}°W")
print(f"Annual precipitation: {weather_data['precip'].sum():.1f} mm")
print(f"Cleaning events (3mm threshold): {(weather_data['precip'] >= 3).sum()}")
print(f"Cleaning events (10mm threshold): {(weather_data['precip'] >= 10).sum()}")
print(f"\nEnergy Loss Summary:")
for scenario, results in annual_energy_impacts.items():
print(f"{scenario:15s}: {results['energy_loss_pct']:5.2f}% "
f"(avg soiling ratio: {results['avg_soiling_ratio']*100:.1f}%)")
print(f"\nOptimal Cleaning Strategy:")
print(f"Threshold: {optimal_threshold} mm")
print(f"Total annual cost: ${optimal_cost:.0f}")
print(f"Revenue loss: ${econ_summary.loc[optimal_idx, 'revenue_loss']:.0f}")
print(f"Cleaning costs: ${econ_summary.loc[optimal_idx, 'cleaning_costs']:.0f}")import pvlib
from pvlib import snow, iotools, solarposition, irradiance
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# Location with significant snowfall
lat, lon = 44.0582, -121.3153 # Bend, Oregon
elevation = 1116 # meters
# Generate winter weather data (simplified)
winter_dates = pd.date_range('2022-11-01', '2023-03-31', freq='D')
np.random.seed(42)
# Simulate winter conditions
temp_base = -2 + 8 * np.sin(2 * np.pi * winter_dates.dayofyear / 365)
temp_noise = np.random.normal(0, 3, len(winter_dates))
temp_air = temp_base + temp_noise
# Snowfall occurs when temp < 2°C
snowfall_prob = 1 / (1 + np.exp(2 * (temp_air - 0))) # Sigmoid probability
snowfall_events = np.random.random(len(winter_dates)) < snowfall_prob * 0.3
snowfall_amounts = np.where(snowfall_events,
np.random.exponential(3, len(winter_dates)), 0)
# Snow depth simulation (simplified accumulation/melt)
snow_depth = np.zeros(len(winter_dates))
for i in range(1, len(winter_dates)):
# Accumulation
snow_depth[i] = snow_depth[i-1] + snowfall_amounts[i]
# Melting when temp > 0°C
if temp_air[i] > 0:
melt_rate = 0.3 * temp_air[i] # cm/day per °C above 0
snow_depth[i] = max(0, snow_depth[i] - melt_rate)
# Simulate solar irradiance
clear_sky_winter = pvlib.clearsky.ineichen(
winter_dates, lat, lon, altitude=elevation
)
# System parameters
surface_tilt = 35 # degrees (steeper for snow shedding)
surface_azimuth = 180 # south-facing
# Calculate solar position and POA irradiance
solar_pos = solarposition.get_solarposition(winter_dates, lat, lon)
poa_irradiance = irradiance.get_total_irradiance(
surface_tilt, surface_azimuth,
solar_pos['zenith'], solar_pos['azimuth'],
clear_sky_winter['dni'], clear_sky_winter['ghi'], clear_sky_winter['dhi']
)['poa_global']
# Create weather DataFrame
weather_winter = pd.DataFrame({
'temp_air': temp_air,
'snowfall': snowfall_amounts,
'snow_depth': snow_depth,
'poa_irradiance': poa_irradiance
}, index=winter_dates)
# Snow models comparison
print("Calculating snow coverage models...")
# NREL fully covered model
snow_coverage_basic = snow.fully_covered_nrel(
snowfall=weather_winter['snowfall'],
threshold_snowfall=1.0 # 1 cm threshold
)
# NREL coverage model with melting
snow_coverage_advanced = snow.coverage_nrel(
snowfall=weather_winter['snowfall'],
poa_irradiance=weather_winter['poa_irradiance'],
temp_air=weather_winter['temp_air'],
surface_tilt=surface_tilt,
threshold_snowfall=1.0,
threshold_temp=2.0
)
# Townsend seasonal model
seasonal_snow_total = weather_winter['snowfall'].sum()
seasonal_snow_events = (weather_winter['snowfall'] > 0).sum()
townsend_loss = snow.loss_townsend(
snow_total=seasonal_snow_total,
snow_events=seasonal_snow_events,
surface_tilt=surface_tilt,
relative_humidity=70, # Assumed average
wind_speed=3.0 # Assumed average m/s
)
print(f"Townsend seasonal snow loss: {townsend_loss*100:.1f}%")
# Calculate energy impacts
num_strings = 10 # Example system configuration
# DC losses from snow coverage
dc_loss_basic = snow.dc_loss_nrel(snow_coverage_basic, num_strings)
dc_loss_advanced = snow.dc_loss_nrel(snow_coverage_advanced, num_strings)
# Energy calculations
baseline_energy = weather_winter['poa_irradiance'].sum() / 1000 # kWh/m²
energy_basic = (weather_winter['poa_irradiance'] * (1 - dc_loss_basic)).sum() / 1000
energy_advanced = (weather_winter['poa_irradiance'] * (1 - dc_loss_advanced)).sum() / 1000
energy_loss_basic = (baseline_energy - energy_basic) / baseline_energy * 100
energy_loss_advanced = (baseline_energy - energy_advanced) / baseline_energy * 100
print(f"\nWinter Energy Analysis:")
print(f"Baseline energy (no snow): {baseline_energy:.1f} kWh/m²")
print(f"Basic model energy: {energy_basic:.1f} kWh/m² (loss: {energy_loss_basic:.1f}%)")
print(f"Advanced model energy: {energy_advanced:.1f} kWh/m² (loss: {energy_loss_advanced:.1f}%)")
# Plot snow analysis
fig, axes = plt.subplots(4, 1, figsize=(15, 12))
# Weather conditions
axes[0].plot(weather_winter.index, weather_winter['temp_air'],
'b-', label='Air Temperature', linewidth=1.5)
axes[0].axhline(y=0, color='k', linestyle='--', alpha=0.5)
axes[0].set_ylabel('Temperature (°C)')
axes[0].set_title('Winter Weather Conditions')
axes[0].legend()
axes[0].grid(True)
# Add snowfall as bars on secondary axis
ax0_twin = axes[0].twinx()
snow_events_idx = weather_winter['snowfall'] > 0
ax0_twin.bar(weather_winter.index[snow_events_idx],
weather_winter['snowfall'][snow_events_idx],
width=1, alpha=0.3, color='lightblue', label='Snowfall')
ax0_twin.set_ylabel('Snowfall (cm)', color='lightblue')
ax0_twin.legend(loc='upper right')
# Snow depth and coverage
axes[1].plot(weather_winter.index, weather_winter['snow_depth'],
'g-', label='Snow Depth', linewidth=2)
axes[1].set_ylabel('Snow Depth (cm)')
axes[1].set_title('Snow Accumulation and Melt')
axes[1].legend()
axes[1].grid(True)
# Snow coverage comparison
axes[2].plot(weather_winter.index, snow_coverage_basic * 100,
'r-', label='Basic Model', linewidth=2)
axes[2].plot(weather_winter.index, snow_coverage_advanced * 100,
'b-', label='Advanced Model', linewidth=2)
axes[2].set_ylabel('Snow Coverage (%)')
axes[2].set_title('Snow Coverage Models Comparison')
axes[2].legend()
axes[2].grid(True)
# Energy impact
daily_energy_baseline = weather_winter['poa_irradiance'] / 1000
daily_energy_basic = weather_winter['poa_irradiance'] * (1 - dc_loss_basic) / 1000
daily_energy_advanced = weather_winter['poa_irradiance'] * (1 - dc_loss_advanced) / 1000
axes[3].plot(weather_winter.index, daily_energy_baseline,
'k--', label='No Snow', linewidth=2, alpha=0.7)
axes[3].plot(weather_winter.index, daily_energy_basic,
'r-', label='Basic Model', linewidth=1.5)
axes[3].plot(weather_winter.index, daily_energy_advanced,
'b-', label='Advanced Model', linewidth=1.5)
axes[3].set_ylabel('Daily Energy (kWh/m²)')
axes[3].set_xlabel('Date')
axes[3].set_title('Daily Energy Production with Snow Losses')
axes[3].legend()
axes[3].grid(True)
plt.tight_layout()
plt.show()
# Monthly analysis
monthly_stats = weather_winter.groupby(weather_winter.index.month).agg({
'temp_air': ['mean', 'min', 'max'],
'snowfall': 'sum',
'snow_depth': 'mean',
'poa_irradiance': 'sum'
}).round(2)
monthly_coverage = pd.DataFrame({
'basic_coverage': snow_coverage_basic.groupby(snow_coverage_basic.index.month).mean(),
'advanced_coverage': snow_coverage_advanced.groupby(snow_coverage_advanced.index.month).mean(),
'basic_loss': dc_loss_basic.groupby(dc_loss_basic.index.month).mean(),
'advanced_loss': dc_loss_advanced.groupby(dc_loss_advanced.index.month).mean()
}) * 100
print(f"\nMonthly Snow Impact Summary:")
print("="*40)
for month in monthly_coverage.index:
month_name = pd.Timestamp(2023, month, 1).strftime('%B')
print(f"{month_name}:")
print(f" Avg Coverage: Basic {monthly_coverage.loc[month, 'basic_coverage']:.1f}%, "
f"Advanced {monthly_coverage.loc[month, 'advanced_coverage']:.1f}%")
print(f" Avg DC Loss: Basic {monthly_coverage.loc[month, 'basic_loss']:.1f}%, "
f"Advanced {monthly_coverage.loc[month, 'advanced_loss']:.1f}%")
# Tilt angle sensitivity analysis
tilt_angles = np.arange(15, 61, 5)
tilt_analysis = []
for tilt in tilt_angles:
# Recalculate POA for this tilt
poa_tilt = irradiance.get_total_irradiance(
tilt, surface_azimuth,
solar_pos['zenith'], solar_pos['azimuth'],
clear_sky_winter['dni'], clear_sky_winter['ghi'], clear_sky_winter['dhi']
)['poa_global']
# Snow coverage with this tilt
snow_coverage_tilt = snow.coverage_nrel(
snowfall=weather_winter['snowfall'],
poa_irradiance=poa_tilt,
temp_air=weather_winter['temp_air'],
surface_tilt=tilt,
threshold_snowfall=1.0,
threshold_temp=2.0
)
# Energy impact
dc_loss_tilt = snow.dc_loss_nrel(snow_coverage_tilt, num_strings)
energy_tilt = (poa_tilt * (1 - dc_loss_tilt)).sum() / 1000
baseline_tilt = poa_tilt.sum() / 1000
loss_pct = (baseline_tilt - energy_tilt) / baseline_tilt * 100
tilt_analysis.append({
'tilt': tilt,
'baseline_energy': baseline_tilt,
'actual_energy': energy_tilt,
'snow_loss_pct': loss_pct,
'avg_coverage': snow_coverage_tilt.mean() * 100
})
tilt_df = pd.DataFrame(tilt_analysis)
print(f"\nTilt Angle Optimization for Snow:")
print("="*35)
optimal_tilt_idx = tilt_df['snow_loss_pct'].idxmin()
optimal_tilt = tilt_df.loc[optimal_tilt_idx, 'tilt']
min_snow_loss = tilt_df.loc[optimal_tilt_idx, 'snow_loss_pct']
print(f"Optimal tilt angle: {optimal_tilt}°")
print(f"Minimum snow loss: {min_snow_loss:.1f}%")
# Compare with latitude-based rule of thumb
latitude_tilt = lat
lat_idx = np.argmin(np.abs(tilt_df['tilt'] - latitude_tilt))
lat_snow_loss = tilt_df.loc[lat_idx, 'snow_loss_pct']
print(f"Latitude tilt ({latitude_tilt:.0f}°): {lat_snow_loss:.1f}% snow loss")
print(f"Improvement with optimal tilt: {lat_snow_loss - min_snow_loss:.1f} percentage points")import pvlib
from pvlib import pvsystem, temperature, atmosphere, solarposition
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# System and location parameters
lat, lon = 40.0150, -105.2705 # Boulder, CO
system_params = {
'module_power': 400, # W
'modules_per_string': 12,
'strings': 8,
'system_power': 38400, # W total
'surface_tilt': 30, # degrees
'surface_azimuth': 180, # south
'inverter_efficiency': 0.96
}
# Create annual time series
times = pd.date_range('2023-01-01', '2023-12-31 23:00', freq='H')
# Calculate solar position and clear sky
solar_pos = solarposition.get_solarposition(times, lat, lon)
clear_sky = pvlib.clearsky.ineichen(times, lat, lon, altitude=1655)
# Calculate POA irradiance
poa = pvlib.irradiance.get_total_irradiance(
system_params['surface_tilt'],
system_params['surface_azimuth'],
solar_pos['zenith'],
solar_pos['azimuth'],
clear_sky['dni'],
clear_sky['ghi'],
clear_sky['dhi']
)
# Simulate weather variations
np.random.seed(42)
weather_variations = pd.DataFrame({
'ghi_factor': 0.85 + 0.3 * np.random.random(len(times)), # Cloud variations
'temp_air': 15 + 20 * np.sin(2 * np.pi * times.dayofyear / 365) + np.random.normal(0, 3, len(times)),
'wind_speed': 2 + 3 * np.random.exponential(1, len(times)),
'relative_humidity': 30 + 40 * np.random.random(len(times)),
'precipitation': np.random.exponential(1, len(times)) * (np.random.random(len(times)) < 0.1)
}, index=times)
# Apply weather variations to irradiance
poa_actual = poa['poa_global'] * weather_variations['ghi_factor']
# Calculate atmospheric parameters for spectral losses
airmass_rel = atmosphere.get_relative_airmass(solar_pos['zenith'])
airmass_abs = atmosphere.get_absolute_airmass(airmass_rel, pressure=85000)
precipitable_water = 1.5 + 0.5 * np.sin(2 * np.pi * times.dayofyear / 365) # Seasonal variation
# Define all loss factors
print("Calculating comprehensive loss factors...")
# 1. PVWatts standard losses
pvwatts_loss_factors = pvsystem.pvwatts_losses(
soiling=2.0, # 2% soiling
shading=1.0, # 1% far shading
snow=0.5, # 0.5% snow (minimal for this location)
mismatch=2.0, # 2% module mismatch
wiring=2.0, # 2% DC wiring
connections=0.5, # 0.5% connections
lid=1.5, # 1.5% light-induced degradation
nameplate_rating=1.0, # 1% nameplate rating
age=0.0, # New system
availability=3.0 # 3% system availability
)
total_pvwatts_derate = pvwatts_loss_factors['total']
print(f"PVWatts total derate factor: {total_pvwatts_derate:.3f}")
# 2. Temperature losses
cell_temp = temperature.sapm_cell(
poa_actual,
weather_variations['temp_air'],
weather_variations['wind_speed'],
a=-3.47, b=-0.0594, deltaT=3 # Typical glass/cell/polymer values
)
temp_coeff = -0.004 # %/°C, typical for c-Si
temp_loss_factor = 1 + temp_coeff * (cell_temp - 25)
# 3. Spectral losses (simplified)
from pvlib import spectrum
sapm_spectral_params = {
'a0': 0.928, 'a1': 0.068, 'a2': -0.0077,
'a3': 0.0001, 'a4': -0.000002
}
spectral_factor = spectrum.spectral_factor_sapm(airmass_abs, sapm_spectral_params)
# 4. Incidence angle losses
aoi = pvlib.irradiance.aoi(
system_params['surface_tilt'],
system_params['surface_azimuth'],
solar_pos['zenith'],
solar_pos['azimuth']
)
iam_factor = pvlib.iam.ashrae(aoi, b=0.05)
# 5. DC ohmic losses
vmp_ref = 37.8 # V at STC
imp_ref = 10.58 # A at STC
dc_ohmic_percent = 1.5 # 1.5% loss
dc_resistance = pvsystem.dc_ohms_from_percent(
vmp_ref, imp_ref, dc_ohmic_percent,
modules_per_string=system_params['modules_per_string'],
strings=system_params['strings']
)
# Simplified current calculation (would normally use full IV model)
estimated_current = poa_actual / 1000 * imp_ref * system_params['strings']
dc_ohmic_loss_factor = 1 - pvsystem.dc_ohmic_losses(dc_resistance, estimated_current) / (estimated_current * vmp_ref * system_params['modules_per_string'])
# 6. Soiling losses (time-varying)
soiling_factor = pvsystem.soiling.kimber(
rainfall=weather_variations['precipitation'],
cleaning_threshold=5, # mm
soiling_loss_rate=0.002, # per day
max_loss_rate=0.15
)
# 7. Degradation (age-related)
system_age = 5 # years
degradation_rate = 0.005 # 0.5% per year
degradation_factor = (1 - degradation_rate) ** system_age
# Combine all loss factors
combined_losses = pd.DataFrame({
'baseline_poa': poa_actual,
'temp_factor': temp_loss_factor,
'spectral_factor': spectral_factor,
'iam_factor': iam_factor,
'dc_ohmic_factor': dc_ohmic_loss_factor,
'soiling_factor': soiling_factor,
'degradation_factor': degradation_factor,
'pvwatts_derate': total_pvwatts_derate
}, index=times)
# Calculate cumulative effects
combined_losses['effective_irradiance'] = (
combined_losses['baseline_poa'] *
combined_losses['temp_factor'] *
combined_losses['spectral_factor'] *
combined_losses['iam_factor'] *
combined_losses['dc_ohmic_factor'] *
combined_losses['soiling_factor'] *
combined_losses['degradation_factor'] *
combined_losses['pvwatts_derate']
)
# Calculate individual loss impacts
loss_impacts = {}
for factor in ['temp_factor', 'spectral_factor', 'iam_factor',
'dc_ohmic_factor', 'soiling_factor']:
annual_avg = combined_losses[factor].mean()
loss_impacts[factor] = (1 - annual_avg) * 100
# Add fixed losses
loss_impacts['pvwatts_losses'] = (1 - total_pvwatts_derate) * 100
loss_impacts['degradation'] = (1 - degradation_factor) * 100
# Energy analysis
baseline_energy = combined_losses['baseline_poa'].sum() / 1000 # kWh/m²/year
effective_energy = combined_losses['effective_irradiance'].sum() / 1000
total_loss_pct = (baseline_energy - effective_energy) / baseline_energy * 100
print(f"\nAnnual Energy Analysis:")
print("="*30)
print(f"Baseline POA energy: {baseline_energy:.1f} kWh/m²/year")
print(f"Effective energy: {effective_energy:.1f} kWh/m²/year")
print(f"Total energy loss: {total_loss_pct:.1f}%")
print(f"\nIndividual Loss Factor Impacts:")
print("="*35)
for factor, impact in sorted(loss_impacts.items(), key=lambda x: x[1], reverse=True):
factor_name = factor.replace('_factor', '').replace('_', ' ').title()
print(f"{factor_name:15s}: {impact:5.2f}%")
# Plot comprehensive loss analysis
fig, axes = plt.subplots(4, 2, figsize=(16, 14))
# Time series of major loss factors
sample_days = slice('2023-06-15', '2023-06-22') # Summer week
daily_data = combined_losses[sample_days]
axes[0, 0].plot(daily_data.index, daily_data['temp_factor'],
label='Temperature', linewidth=2)
axes[0, 0].plot(daily_data.index, daily_data['spectral_factor'],
label='Spectral', linewidth=2)
axes[0, 0].plot(daily_data.index, daily_data['iam_factor'],
label='IAM', linewidth=2)
axes[0, 0].set_ylabel('Loss Factor')
axes[0, 0].set_title('Time-Varying Loss Factors (Summer Week)')
axes[0, 0].legend()
axes[0, 0].grid(True)
# Monthly averages
monthly_factors = combined_losses.resample('M').mean()
months = monthly_factors.index.month
axes[0, 1].plot(months, monthly_factors['temp_factor'], 'ro-',
label='Temperature', linewidth=2)
axes[0, 1].plot(months, monthly_factors['spectral_factor'], 'bs-',
label='Spectral', linewidth=2)
axes[0, 1].plot(months, monthly_factors['iam_factor'], 'g^-',
label='IAM', linewidth=2)
axes[0, 1].set_xlabel('Month')
axes[0, 1].set_ylabel('Monthly Avg Factor')
axes[0, 1].set_title('Seasonal Loss Factor Variations')
axes[0, 1].legend()
axes[0, 1].grid(True)
# Soiling time series
axes[1, 0].plot(combined_losses.index, combined_losses['soiling_factor'],
'brown', linewidth=1)
axes[1, 0].set_ylabel('Soiling Factor')
axes[1, 0].set_title('Soiling Losses Throughout Year')
axes[1, 0].grid(True)
# Add precipitation events
precip_events = weather_variations['precipitation'] > 3
axes[1, 0].scatter(weather_variations.index[precip_events],
[0.95] * precip_events.sum(),
c='blue', alpha=0.6, s=10, label='Rain Events')
axes[1, 0].legend()
# Loss impact waterfall chart
factors = list(loss_impacts.keys())
impacts = list(loss_impacts.values())
cumulative = np.cumsum([0] + impacts)
bars = axes[1, 1].bar(range(len(factors)), impacts,
color='lightcoral', alpha=0.7)
axes[1, 1].set_xticks(range(len(factors)))
axes[1, 1].set_xticklabels([f.replace('_', '\n') for f in factors],
rotation=45, ha='right')
axes[1, 1].set_ylabel('Loss Impact (%)')
axes[1, 1].set_title('Individual Loss Impacts')
axes[1, 1].grid(True, axis='y')
# Add value labels
for bar, impact in zip(bars, impacts):
height = bar.get_height()
axes[1, 1].text(bar.get_x() + bar.get_width()/2., height + 0.1,
f'{impact:.1f}%', ha='center', va='bottom', fontsize=9)
# Temperature correlation
temp_bins = np.arange(-10, 41, 5)
temp_binned = pd.cut(cell_temp, temp_bins)
temp_grouped = combined_losses.groupby(temp_binned)['temp_factor'].mean()
axes[2, 0].plot(temp_grouped.index.map(lambda x: x.mid), temp_grouped,
'ro-', linewidth=2, markersize=6)
axes[2, 0].set_xlabel('Cell Temperature (°C)')
axes[2, 0].set_ylabel('Temperature Factor')
axes[2, 0].set_title('Temperature Loss Relationship')
axes[2, 0].grid(True)
# Add theoretical curve
temp_range = np.arange(-5, 40, 1)
theoretical = 1 + temp_coeff * (temp_range - 25)
axes[2, 0].plot(temp_range, theoretical, 'b--',
linewidth=2, label='Theoretical')
axes[2, 0].legend()
# AOI correlation
aoi_bins = np.arange(0, 91, 10)
aoi_binned = pd.cut(aoi, aoi_bins)
aoi_grouped = combined_losses.groupby(aoi_binned)['iam_factor'].mean()
axes[2, 1].plot(aoi_grouped.index.map(lambda x: x.mid), aoi_grouped,
'go-', linewidth=2, markersize=6)
axes[2, 1].set_xlabel('Angle of Incidence (degrees)')
axes[2, 1].set_ylabel('IAM Factor')
axes[2, 1].set_title('Incidence Angle Loss Relationship')
axes[2, 1].grid(True)
# Monthly energy comparison
monthly_energy = pd.DataFrame({
'baseline': combined_losses['baseline_poa'].resample('M').sum() / 1000,
'with_losses': combined_losses['effective_irradiance'].resample('M').sum() / 1000
})
monthly_energy['loss_pct'] = (1 - monthly_energy['with_losses'] / monthly_energy['baseline']) * 100
x_months = range(1, 13)
width = 0.35
bars1 = axes[3, 0].bar([x - width/2 for x in x_months], monthly_energy['baseline'],
width, label='Baseline', alpha=0.7)
bars2 = axes[3, 0].bar([x + width/2 for x in x_months], monthly_energy['with_losses'],
width, label='With Losses', alpha=0.7)
axes[3, 0].set_xlabel('Month')
axes[3, 0].set_ylabel('Monthly Energy (kWh/m²)')
axes[3, 0].set_title('Monthly Energy: Baseline vs With Losses')
axes[3, 0].set_xticks(x_months)
axes[3, 0].legend()
axes[3, 0].grid(True, axis='y')
# System-level annual summary
system_baseline_kwh = baseline_energy * system_params['system_power'] / 1000
system_actual_kwh = effective_energy * system_params['system_power'] / 1000
system_loss_kwh = system_baseline_kwh - system_actual_kwh
economic_impact = system_loss_kwh * 0.12 # Assume $0.12/kWh
# Summary pie chart
loss_breakdown = {
'Temperature': loss_impacts['temp_factor'],
'PVWatts Losses': loss_impacts['pvwatts_losses'],
'Soiling': loss_impacts['soiling_factor'],
'Spectral': loss_impacts['spectral_factor'],
'IAM': loss_impacts['iam_factor'],
'DC Ohmic': loss_impacts['dc_ohmic_factor'],
'Degradation': loss_impacts['degradation']
}
# Only show significant losses (>0.5%)
significant_losses = {k: v for k, v in loss_breakdown.items() if v > 0.5}
other_losses = sum(v for k, v in loss_breakdown.items() if v <= 0.5)
if other_losses > 0:
significant_losses['Other'] = other_losses
axes[3, 1].pie(significant_losses.values(), labels=significant_losses.keys(),
autopct='%1.1f%%', startangle=90)
axes[3, 1].set_title('Annual Loss Breakdown')
plt.tight_layout()
plt.show()
# Print comprehensive summary
print(f"\nSystem Performance Summary:")
print("="*40)
print(f"System size: {system_params['system_power']/1000:.1f} kW")
print(f"Baseline annual energy: {system_baseline_kwh:.0f} kWh")
print(f"Actual annual energy: {system_actual_kwh:.0f} kWh")
print(f"Annual energy loss: {system_loss_kwh:.0f} kWh ({total_loss_pct:.1f}%)")
print(f"Economic impact: ${economic_impact:.0f}/year")
print(f"\nPerformance Ratio: {system_actual_kwh/system_baseline_kwh:.3f}")
print(f"Capacity Factor: {system_actual_kwh/(system_params['system_power']*8760/1000):.1f}%")
print(f"\nTop Loss Contributors:")
top_losses = sorted(loss_impacts.items(), key=lambda x: x[1], reverse=True)[:5]
for i, (factor, impact) in enumerate(top_losses, 1):
print(f"{i}. {factor.replace('_', ' ').title()}: {impact:.2f}%")Install with Tessl CLI
npx tessl i tessl/pypi-pvlib