Vega-Altair: A declarative statistical visualization library for Python.
—
Quality
Pending
Does it follow best practices?
Impact
Pending
No eval scenarios have been run
Data transformation operations that can be applied to datasets within chart specifications, including filtering, aggregation, binning, and calculations. Transformations allow data preprocessing without modifying the original dataset.
Transform methods for filtering data based on predicates and conditions.
def transform_filter(self, predicate):
"""
Filter data based on a predicate expression.
Parameters:
- predicate: Filter expression (string, dict, or Predicate object)
Returns:
Chart: New chart with filter transformation
"""
def transform_sample(self, sample=1000):
"""
Sample random subset of data.
Parameters:
- sample: Number of rows to sample
Returns:
Chart: New chart with sample transformation
"""Transform methods for aggregating and summarizing data.
def transform_aggregate(self, aggregate=None, groupby=None, **kwargs):
"""
Aggregate data with summary statistics.
Parameters:
- aggregate: List of aggregation specifications
- groupby: List of fields to group by
Returns:
Chart: New chart with aggregate transformation
"""
def transform_joinaggregate(self, joinaggregate=None, groupby=None, **kwargs):
"""
Join aggregated values back to individual records.
Parameters:
- joinaggregate: List of aggregate calculations
- groupby: List of fields to group by
Returns:
Chart: New chart with joinaggregate transformation
"""
def transform_window(
self,
window=None,
frame=None,
groupby=None,
sort=None,
ignorePeers=None,
**kwargs
):
"""
Apply sliding window calculations.
Parameters:
- window: List of window operations
- frame: Window frame specification
- groupby: List of fields to group by
- sort: Sort specification for window
- ignorePeers: Whether to ignore peer values
Returns:
Chart: New chart with window transformation
"""Transform methods for reshaping and restructuring data.
def transform_fold(self, fold, as_=None, **kwargs):
"""
Fold specified fields into key-value pairs.
Parameters:
- fold: List of field names to fold
- as_: Output field names [key, value]
Returns:
Chart: New chart with fold transformation
"""
def transform_pivot(
self,
pivot,
value,
groupby=None,
limit=None,
op=None,
**kwargs
):
"""
Pivot data from long to wide format.
Parameters:
- pivot: Field to use for new column names
- value: Field containing values to pivot
- groupby: Fields to group by
- limit: Maximum number of pivot columns
- op: Aggregation operation for duplicate values
Returns:
Chart: New chart with pivot transformation
"""
def transform_flatten(self, flatten, as_=None, **kwargs):
"""
Flatten array fields into separate records.
Parameters:
- flatten: List of array fields to flatten
- as_: Output field names
Returns:
Chart: New chart with flatten transformation
"""Transform methods for creating calculated fields and derived data.
def transform_calculate(self, calculate=None, as_=None, **kwargs):
"""
Calculate new fields using expressions.
Parameters:
- calculate: Expression string or list of calculations
- as_: Output field name(s)
Returns:
Chart: New chart with calculate transformation
"""
def transform_timeunit(self, timeUnit=None, field=None, as_=None, **kwargs):
"""
Apply time unit transformations to temporal data.
Parameters:
- timeUnit: Time unit specification
- field: Input temporal field
- as_: Output field name
Returns:
Chart: New chart with timeunit transformation
"""Transform methods for binning continuous data into discrete groups.
def transform_bin(
self,
field=None,
as_=None,
bin=None,
anchor=None,
base=None,
divide=None,
extent=None,
maxbins=None,
minstep=None,
nice=None,
step=None,
steps=None,
**kwargs
):
"""
Bin continuous data into discrete groups.
Parameters:
- field: Field to bin
- as_: Output field names [start, end]
- bin: Binning parameters (bool or BinParams)
- anchor: Anchor value for bin boundaries
- base: Base value for logarithmic binning
- divide: Division factors for nice bins
- extent: Data extent for binning
- maxbins: Maximum number of bins
- minstep: Minimum step size
- nice: Whether to use nice bin boundaries
- step: Explicit step size
- steps: List of allowable step sizes
Returns:
Chart: New chart with bin transformation
"""Transform methods for enriching data through lookups and joins.
def transform_lookup(
self,
lookup=None,
from_=None,
as_=None,
default=None,
**kwargs
):
"""
Lookup and join data from external sources.
Parameters:
- lookup: Field in current data to lookup
- from_: External data source specification
- as_: Output field names
- default: Default value for missing lookups
Returns:
Chart: New chart with lookup transformation
"""
def transform_impute(
self,
impute=None,
key=None,
value=None,
method=None,
frame=None,
groupby=None,
keyvals=None,
**kwargs
):
"""
Impute missing values using various methods.
Parameters:
- impute: Field to impute
- key: Key field for imputation
- value: Value to impute
- method: Imputation method ('value', 'mean', 'median', 'max', 'min')
- frame: Window frame for imputation
- groupby: Fields to group by
- keyvals: Explicit key values for imputation
Returns:
Chart: New chart with impute transformation
"""Transform methods for statistical analysis and modeling.
def transform_regression(
self,
regression=None,
on=None,
method=None,
order=None,
extent=None,
params=None,
as_=None,
**kwargs
):
"""
Fit regression models to data.
Parameters:
- regression: Y field for regression
- on: X field for regression
- method: Regression method ('linear', 'log', 'exp', 'pow', 'quad', 'poly')
- order: Polynomial order for 'poly' method
- extent: X extent for predictions
- params: Whether to output model parameters
- as_: Output field names
Returns:
Chart: New chart with regression transformation
"""
def transform_loess(
self,
loess=None,
on=None,
bandwidth=None,
as_=None,
**kwargs
):
"""
Apply LOESS smoothing regression.
Parameters:
- loess: Y field for smoothing
- on: X field for smoothing
- bandwidth: LOESS bandwidth parameter
- as_: Output field names
Returns:
Chart: New chart with loess transformation
"""
def transform_quantile(
self,
quantile=None,
probs=None,
step=None,
as_=None,
**kwargs
):
"""
Calculate quantile values.
Parameters:
- quantile: Field to calculate quantiles for
- probs: List of quantile probabilities
- step: Step size for quantile sequence
- as_: Output field names
Returns:
Chart: New chart with quantile transformation
"""
def transform_density(
self,
density=None,
bandwidth=None,
extent=None,
as_=None,
counts=None,
cumulative=None,
**kwargs
):
"""
Estimate probability density functions.
Parameters:
- density: Field to estimate density for
- bandwidth: Kernel bandwidth
- extent: Data extent for estimation
- as_: Output field names
- counts: Whether to output counts instead of densities
- cumulative: Whether to output cumulative distribution
Returns:
Chart: New chart with density transformation
"""Transform methods for spatial and geographic data processing.
def transform_extent(self, extent=None, param=None, **kwargs):
"""
Calculate spatial extent of geographic features.
Parameters:
- extent: Geographic field
- param: Parameter to store extent
Returns:
Chart: New chart with extent transformation
"""Transform methods for arranging and stacking data for specific visualizations.
def transform_stack(
self,
stack=None,
groupby=None,
sort=None,
offset=None,
as_=None,
**kwargs
):
"""
Stack data for cumulative visualizations.
Parameters:
- stack: Field to stack
- groupby: Fields to group by
- sort: Sort specification
- offset: Stack offset ('zero', 'center', 'normalize')
- as_: Output field names [start, end]
Returns:
Chart: New chart with stack transformation
"""import altair as alt
# Filter with expression string
chart = alt.Chart(data).mark_point().encode(
x='x:Q',
y='y:Q'
).transform_filter(
'datum.value > 100'
)
# Filter with field predicate
chart = alt.Chart(data).mark_point().encode(
x='x:Q',
y='y:Q'
).transform_filter(
alt.datum.category == 'A'
)# Basic aggregation
chart = alt.Chart(data).mark_bar().encode(
x='category:N',
y='mean_value:Q'
).transform_aggregate(
mean_value='mean(value)',
groupby=['category']
)
# Window calculation
chart = alt.Chart(data).mark_line().encode(
x='date:T',
y='cumulative_sum:Q'
).transform_window(
cumulative_sum='sum(value)',
frame=[None, 0]
)# Fold wide data to long format
chart = alt.Chart(data).mark_bar().encode(
x='key:N',
y='value:Q',
color='key:N'
).transform_fold(
['col1', 'col2', 'col3'],
as_=['key', 'value']
)
# Pivot long data to wide format
chart = alt.Chart(data).mark_rect().encode(
x='category:N',
y='metric:N',
color='value:Q'
).transform_pivot(
'metric',
value='value',
groupby=['category']
)# Simple calculation
chart = alt.Chart(data).mark_point().encode(
x='x:Q',
y='calculated_y:Q'
).transform_calculate(
calculated_y='datum.y * 2 + 10'
)
# Multiple calculations
chart = alt.Chart(data).mark_point().encode(
x='x:Q',
y='ratio:Q',
size='total:Q'
).transform_calculate(
ratio='datum.numerator / datum.denominator',
total='datum.numerator + datum.denominator'
)# Linear regression
chart = alt.Chart(data).mark_line().encode(
x='x:Q',
y='y:Q'
).transform_regression(
'y', 'x',
method='linear'
)
# LOESS smoothing
chart = alt.Chart(data).mark_line().encode(
x='x:Q',
y='y:Q'
).transform_loess(
'y', 'x',
bandwidth=0.3
)from typing import Union, Dict, Any, Optional, List
# Predicate types
Predicate = Union[str, Dict[str, Any]]
# Aggregation operation
AggregateOp = Union[
'argmax', 'argmin', 'average', 'count', 'distinct', 'max', 'mean',
'median', 'min', 'missing', 'q1', 'q3', 'ci0', 'ci1', 'stderr',
'stdev', 'stdevp', 'sum', 'valid', 'values', 'variance', 'variancep'
]
# Window operation
WindowOp = Union[AggregateOp, 'row_number', 'rank', 'dense_rank', 'percent_rank',
'cume_dist', 'ntile', 'lag', 'lead', 'first_value', 'last_value', 'nth_value']
# Imputation method
ImputeMethod = Union['value', 'mean', 'median', 'max', 'min']
# Stack offset
StackOffset = Union['zero', 'center', 'normalize']
# Regression method
RegressionMethod = Union['linear', 'log', 'exp', 'pow', 'quad', 'poly']
# Transform specifications
AggregateTransform = Dict[str, Any]
FilterTransform = Dict[str, Any]
CalculateTransform = Dict[str, Any]
BinTransform = Dict[str, Any]
WindowTransform = Dict[str, Any]Install with Tessl CLI
npx tessl i tessl/pypi-altair