JavaScript client library for the Arduino IoT Cloud REST API enabling programmatic management of devices, things, properties, and time-series data
Advanced time series data querying with batch operations, sampling, and historical data analysis. The SeriesV2Api provides powerful capabilities for retrieving and analyzing IoT sensor data over time.
High-performance batch querying for multiple properties and time ranges simultaneously.
class SeriesV2Api {
/**
* Execute batch query for multiple time series with aggregation
* @param batchQueryRequestsMediaV1 - Batch query configuration with multiple requests
* @param opts - Optional parameters including organization ID
* @returns Promise<ArduinoSeriesResponse> - Aggregated time series data
*/
seriesV2BatchQuery(batchQueryRequestsMediaV1: BatchQueryRequestsMediaV1, opts?: any): Promise<ArduinoSeriesResponse>;
/**
* Execute batch query for raw (non-aggregated) time series data
* @param batchQueryRawRequestsMediaV1 - Raw batch query configuration
* @param opts - Optional parameters including organization ID
* @returns Promise<ArduinoSeriesRawResponse> - Raw time series data
*/
seriesV2BatchQueryRaw(batchQueryRawRequestsMediaV1: BatchQueryRawRequestsMediaV1, opts?: any): Promise<ArduinoSeriesRawResponse>;
/**
* Get the last value for multiple properties in a single batch operation
* @param batchLastValueRequestsMediaV1 - Batch last value request configuration
* @param opts - Optional parameters including organization ID
* @returns Promise<ArduinoSeriesRawLastValueResponse> - Latest values for requested properties
*/
seriesV2BatchQueryRawLastValue(batchLastValueRequestsMediaV1: BatchLastValueRequestsMediaV1, opts?: any): Promise<ArduinoSeriesRawLastValueResponse>;
/**
* Execute batch query with statistical sampling for large datasets
* @param batchQuerySampledRequestsMediaV1 - Sampled batch query configuration
* @param opts - Optional parameters including organization ID
* @returns Promise<ArduinoSeriesSampledResponse> - Statistically sampled data
*/
seriesV2BatchQuerySampling(batchQuerySampledRequestsMediaV1: BatchQuerySampledRequestsMediaV1, opts?: any): Promise<ArduinoSeriesSampledResponse>;
/**
* Request historic data export for long-term analysis
* @param historicDataRequest - Historic data export request configuration
* @param opts - Optional parameters including organization ID
* @returns Promise<ArduinoSeriesResponse> - Historic data export information
*/
seriesV2HistoricData(historicDataRequest: HistoricDataRequest, opts?: any): Promise<ArduinoSeriesResponse>;
}Legacy time series functionality for backward compatibility. Use SeriesV2Api for new integrations.
class SeriesV1Api {
/**
* Execute batch query for time-series data (legacy)
* @param batchQueryRequestsMediaV1 - Batch query requests
* @returns Promise<ArduinoSeriesBatch> - Batch of time-series data
*/
seriesV1BatchQuery(batchQueryRequestsMediaV1: BatchQueryRequestsMediaV1): Promise<ArduinoSeriesBatch>;
/**
* Execute batch query for raw time-series data (legacy)
* @param batchQueryRawRequestsMediaV1 - Raw batch query requests
* @returns Promise<ArduinoSeriesRawBatch> - Raw batch of time-series data
*/
seriesV1BatchQueryRaw(batchQueryRawRequestsMediaV1: BatchQueryRawRequestsMediaV1): Promise<ArduinoSeriesRawBatch>;
}Migration Note: V1 API has limited functionality compared to V2. V2 provides enhanced querying capabilities, better performance, statistical sampling, and improved data structures.
interface BatchQueryRequestsMediaV1 {
requests?: BatchQueryRequestMediaV1[];
}
interface BatchQueryRequestMediaV1 {
aggregation?: string;
from?: Date;
q?: string;
series_limit?: number;
to?: Date;
}
interface BatchQueryRawRequestsMediaV1 {
requests?: BatchQueryRawRequestMediaV1[];
}
interface BatchQueryRawRequestMediaV1 {
from?: Date;
q?: string;
series_limit?: number;
to?: Date;
}
interface BatchLastValueRequestsMediaV1 {
requests?: BatchQueryRawLastValueRequestMediaV1[];
}
interface BatchQueryRawLastValueRequestMediaV1 {
q?: string;
}
interface BatchQuerySampledRequestsMediaV1 {
requests?: BatchQuerySampledRequestMediaV1[];
}
interface BatchQuerySampledRequestMediaV1 {
aggregation?: string;
from?: Date;
interval?: number;
q?: string;
series_limit?: number;
to?: Date;
}
interface HistoricDataRequest {
from?: Date;
properties?: string[];
thingIds?: string[];
to?: Date;
}
interface ArduinoSeriesResponse {
aggregation?: string;
countValues?: number;
data?: TimeseriesDataPoint[];
fromDate?: Date;
interval?: number;
query?: string;
serieLimit?: number;
status?: string;
thingId?: string;
toDate?: Date;
values?: any[];
}
interface ArduinoSeriesRawResponse {
data?: TimeseriesDataPoint[];
query?: string;
status?: string;
}
interface ArduinoSeriesSampledResponse {
aggregation?: string;
data?: TimeseriesDataPoint[];
interval?: number;
query?: string;
status?: string;
}
interface ArduinoSeriesRawLastValueResponse {
data?: ArduinoSeriesRawBatchLastvalue[];
query?: string;
status?: string;
}
interface TimeseriesDataPoint {
timestamp?: Date;
value?: any;
}
interface ArduinoSeriesRawBatchLastvalue {
lastValue?: any;
lastValueUpdatedAt?: Date;
propertyId?: string;
thingId?: string;
}Comprehensive Time Series Analytics Examples:
import ArduinoIotClient from '@arduino/arduino-iot-client';
const seriesApi = new ArduinoIotClient.SeriesV2Api();
// Multi-property batch analysis
async function analyzeSensorData(thingIds, propertyIds, timeRange) {
try {
const { from, to } = timeRange;
// Create batch query for multiple properties with different aggregations
const batchRequest = {
requests: [
// Temperature data with hourly averages
{
q: `property.id in ["${propertyIds.temperature}"] AND thing.id in ["${thingIds[0]}"]`,
from: from,
to: to,
aggregation: 'AVG',
series_limit: 1000
},
// Humidity data with 30-minute averages
{
q: `property.id in ["${propertyIds.humidity}"] AND thing.id in ["${thingIds[0]}"]`,
from: from,
to: to,
aggregation: 'AVG',
series_limit: 1000
},
// Pressure data with daily min/max
{
q: `property.id in ["${propertyIds.pressure}"] AND thing.id in ["${thingIds[0]}"]`,
from: from,
to: to,
aggregation: 'MAX',
series_limit: 100
}
]
};
const results = await seriesApi.seriesV2BatchQuery(batchRequest);
console.log(`Batch query returned ${results.data?.length || 0} data points`);
return results;
} catch (error) {
console.error('Batch query failed:', error);
throw error;
}
}
// Real-time dashboard data
async function getDashboardData(propertyIds) {
try {
// Get last values for all dashboard properties
const lastValueRequest = {
requests: propertyIds.map(pid => ({
q: `property.id = "${pid}"`
}))
};
const lastValues = await seriesApi.seriesV2BatchQueryRawLastValue(lastValueRequest);
console.log('Latest property values:');
lastValues.data?.forEach((item, index) => {
console.log(`Property ${propertyIds[index]}: ${item.lastValue}`);
});
// Get recent trend data for charts (last 4 hours)
const trendFrom = new Date(Date.now() - 4 * 3600000);
const trendTo = new Date();
const trendRequest = {
requests: propertyIds.map(pid => ({
q: `property.id = "${pid}"`,
from: trendFrom,
to: trendTo,
aggregation: 'AVG'
}))
};
const trendData = await seriesApi.seriesV2BatchQuery(trendRequest);
return {
lastValues: lastValues.data,
trendData: trendData.data
};
} catch (error) {
console.error('Dashboard data query failed:', error);
throw error;
}
}
// Statistical analysis with sampling
async function performStatisticalAnalysis(thingId, propertyId, days = 30) {
try {
const from = new Date(Date.now() - days * 24 * 3600000);
const to = new Date();
// Get sampled data for statistical analysis
const sampledRequest = {
requests: [{
q: `property.id = "${propertyId}" AND thing.id = "${thingId}"`,
from: from,
to: to,
aggregation: 'AVG',
interval: 3600, // 1-hour intervals
series_limit: days * 24 // Max one point per hour
}]
};
const sampledData = await seriesApi.seriesV2BatchQuerySampling(sampledRequest);
if (sampledData.data && sampledData.data.length > 0) {
const values = sampledData.data.map(point => parseFloat(point.value));
// Calculate statistics
const sortedValues = [...values].sort((a, b) => a - b);
const stats = {
count: values.length,
min: sortedValues[0],
max: sortedValues[sortedValues.length - 1],
mean: values.reduce((sum, val) => sum + val, 0) / values.length,
median: sortedValues[Math.floor(sortedValues.length / 2)],
q25: sortedValues[Math.floor(sortedValues.length * 0.25)],
q75: sortedValues[Math.floor(sortedValues.length * 0.75)],
stdDev: 0
};
// Calculate standard deviation
const variance = values.reduce((sum, val) => sum + Math.pow(val - stats.mean, 2), 0) / values.length;
stats.stdDev = Math.sqrt(variance);
console.log('Statistical Analysis Results:');
console.log(`Data points: ${stats.count}`);
console.log(`Range: ${stats.min.toFixed(2)} - ${stats.max.toFixed(2)}`);
console.log(`Mean: ${stats.mean.toFixed(2)} ± ${stats.stdDev.toFixed(2)}`);
console.log(`Median: ${stats.median.toFixed(2)}`);
console.log(`IQR: ${stats.q25.toFixed(2)} - ${stats.q75.toFixed(2)}`);
return stats;
}
return null;
} catch (error) {
console.error('Statistical analysis failed:', error);
throw error;
}
}
// Anomaly detection
async function detectAnomalies(thingId, propertyId, hours = 24) {
try {
const from = new Date(Date.now() - hours * 3600000);
const to = new Date();
// Get raw data for anomaly detection
const rawRequest = {
requests: [{
q: `property.id = "${propertyId}" AND thing.id = "${thingId}"`,
from: from,
to: to,
series_limit: 1000
}]
};
const rawData = await seriesApi.seriesV2BatchQueryRaw(rawRequest);
if (rawData.data && rawData.data.length > 0) {
const values = rawData.data.map(point => parseFloat(point.value));
const timestamps = rawData.data.map(point => new Date(point.timestamp));
// Calculate moving average and standard deviation
const windowSize = Math.min(10, Math.floor(values.length / 4));
const anomalies = [];
for (let i = windowSize; i < values.length; i++) {
const window = values.slice(i - windowSize, i);
const mean = window.reduce((sum, val) => sum + val, 0) / window.length;
const stdDev = Math.sqrt(
window.reduce((sum, val) => sum + Math.pow(val - mean, 2), 0) / window.length
);
const currentValue = values[i];
const zScore = Math.abs((currentValue - mean) / stdDev);
// Detect outliers (z-score > 2)
if (zScore > 2) {
anomalies.push({
timestamp: timestamps[i],
value: currentValue,
zScore: zScore.toFixed(2),
expected: mean.toFixed(2)
});
}
}
console.log(`Detected ${anomalies.length} anomalies in the last ${hours} hours`);
anomalies.forEach(anomaly => {
console.log(`${anomaly.timestamp.toISOString()}: ${anomaly.value} (z-score: ${anomaly.zScore})`);
});
return anomalies;
}
return [];
} catch (error) {
console.error('Anomaly detection failed:', error);
throw error;
}
}
// Historic data export
async function exportHistoricData(thingIds, propertyIds, exportPeriod) {
try {
const { from, to } = exportPeriod;
const exportRequest = {
thingIds: thingIds,
properties: propertyIds,
from: from,
to: to
};
const exportInfo = await seriesApi.seriesV2HistoricData(exportRequest);
console.log('Historic data export initiated:');
console.log(`Status: ${exportInfo.status}`);
console.log(`Time range: ${from.toISOString()} to ${to.toISOString()}`);
console.log(`Properties: ${propertyIds.length}`);
console.log(`Things: ${thingIds.length}`);
return exportInfo;
} catch (error) {
console.error('Historic data export failed:', error);
throw error;
}
}
// Comprehensive analytics dashboard
async function createAnalyticsDashboard(thingIds, propertyIds) {
try {
const now = new Date();
const lastHour = new Date(now - 3600000);
const lastDay = new Date(now - 24 * 3600000);
const lastWeek = new Date(now - 7 * 24 * 3600000);
console.log('Creating comprehensive analytics dashboard...');
// 1. Real-time status
console.log('\n=== Real-time Status ===');
const realtimeData = await getDashboardData(propertyIds);
// 2. Last hour trends
console.log('\n=== Last Hour Analysis ===');
const hourlyTrends = await analyzeSensorData(thingIds, {
temperature: propertyIds[0],
humidity: propertyIds[1],
pressure: propertyIds[2]
}, { from: lastHour, to: now });
// 3. Daily statistics
console.log('\n=== Daily Statistics ===');
for (const propertyId of propertyIds) {
await performStatisticalAnalysis(thingIds[0], propertyId, 1);
}
// 4. Anomaly detection
console.log('\n=== Anomaly Detection ===');
for (const propertyId of propertyIds) {
await detectAnomalies(thingIds[0], propertyId, 24);
}
// 5. Weekly export
console.log('\n=== Weekly Data Export ===');
await exportHistoricData(thingIds, propertyIds, {
from: lastWeek,
to: now
});
console.log('\nAnalytics dashboard completed successfully');
} catch (error) {
if (error.status === 400) {
console.error('Bad request - check query parameters');
} else if (error.status === 429) {
console.error('Rate limit exceeded - slow down queries');
} else {
console.error('Analytics error:', error.detail || error.message);
}
}
}
// Usage example
async function timeSeriesAnalyticsDemo() {
const thingIds = ['your-thing-id-1', 'your-thing-id-2'];
const propertyIds = ['temp-prop-id', 'humidity-prop-id', 'pressure-prop-id'];
await createAnalyticsDashboard(thingIds, propertyIds);
}Install with Tessl CLI
npx tessl i tessl/npm-arduino--arduino-iot-client