or run

npx @tessl/cli init
Log in

Version

Tile

Overview

Evals

Files

Files

docs

analytics-targets.mdapi-targets.mdcicd-targets.mdcompute-targets.mdindex.mdmessaging-targets.mdorchestration-targets.mdsystem-targets.md

analytics-targets.mddocs/

0

# Analytics and Logging Targets

1

2

Targets for data streaming, analytics, and logging services that can receive and process EventBridge events.

3

4

## Capabilities

5

6

### Kinesis Stream Target

7

8

Send events to Amazon Kinesis Data Streams for real-time processing.

9

10

```typescript { .api }

11

/**

12

* Use a Kinesis stream as a target for Amazon EventBridge rules

13

*/

14

class KinesisStream implements events.IRuleTarget {

15

constructor(stream: kinesis.IStream, props?: KinesisStreamProps);

16

17

/**

18

* Returns a RuleTarget that can be used to put records to this Kinesis stream

19

* as a result from an EventBridge event

20

*/

21

bind(rule: events.IRule, id?: string): events.RuleTargetConfig;

22

}

23

24

interface KinesisStreamProps {

25

/**

26

* Partition key path for the Kinesis record

27

* The partition key determines which shard the record goes to

28

* @default event ID from EventBridge

29

*/

30

readonly partitionKeyPath?: string;

31

32

/**

33

* The message to send to the Kinesis stream

34

* @default the entire EventBridge event

35

*/

36

readonly message?: events.RuleTargetInput;

37

}

38

```

39

40

**Usage Example:**

41

42

```typescript

43

import * as kinesis from "@aws-cdk/aws-kinesis";

44

import * as events from "@aws-cdk/aws-events";

45

import * as targets from "@aws-cdk/aws-events-targets";

46

47

// Create Kinesis stream

48

const stream = new kinesis.Stream(this, "EventStream", {

49

streamName: "application-events",

50

shardCount: 3,

51

retentionPeriod: Duration.days(7),

52

});

53

54

// Rule for user activity events

55

const userActivityRule = new events.Rule(this, "UserActivityRule", {

56

eventPattern: {

57

source: ["myapp.users"],

58

detailType: ["User Login", "User Logout", "Page View", "Action Performed"],

59

},

60

});

61

62

// Send to Kinesis with user ID as partition key

63

userActivityRule.addTarget(new targets.KinesisStream(stream, {

64

partitionKeyPath: "$.detail.userId",

65

message: events.RuleTargetInput.fromObject({

66

eventId: events.EventField.fromPath("$.id"),

67

timestamp: events.EventField.fromPath("$.time"),

68

eventType: events.EventField.fromPath("$.detail-type"),

69

userId: events.EventField.fromPath("$.detail.userId"),

70

sessionId: events.EventField.fromPath("$.detail.sessionId"),

71

metadata: events.EventField.fromPath("$.detail.metadata"),

72

source: events.EventField.fromPath("$.source"),

73

region: events.EventField.fromPath("$.region"),

74

}),

75

}));

76

77

// Rule for system metrics with different partition strategy

78

const metricsRule = new events.Rule(this, "MetricsRule", {

79

eventPattern: {

80

source: ["myapp.metrics"],

81

detailType: ["Performance Metric", "Error Count", "Transaction Volume"],

82

},

83

});

84

85

metricsRule.addTarget(new targets.KinesisStream(stream, {

86

partitionKeyPath: "$.detail.metricType",

87

message: events.RuleTargetInput.fromPath("$.detail"),

88

}));

89

90

// Send entire event for audit logging

91

const auditRule = new events.Rule(this, "AuditRule", {

92

eventPattern: {

93

source: ["myapp.security"],

94

},

95

});

96

97

auditRule.addTarget(new targets.KinesisStream(stream, {

98

partitionKeyPath: "$.detail.resourceId",

99

// Default message is the entire event

100

}));

101

```

102

103

### CloudWatch LogGroup Target

104

105

Send events to Amazon CloudWatch Logs for logging and monitoring.

106

107

```typescript { .api }

108

/**

109

* Use a CloudWatch LogGroup as a target for Amazon EventBridge rules

110

*/

111

class CloudWatchLogGroup implements events.IRuleTarget {

112

constructor(logGroup: logs.ILogGroup, props?: LogGroupProps);

113

114

/**

115

* Returns a RuleTarget that can be used to log events to this CloudWatch LogGroup

116

* as a result from an EventBridge event

117

*/

118

bind(rule: events.IRule, id?: string): events.RuleTargetConfig;

119

}

120

121

interface LogGroupProps extends TargetBaseProps {

122

/**

123

* The event to send to the CloudWatch LogGroup

124

* @default the entire EventBridge event

125

*/

126

readonly event?: events.RuleTargetInput;

127

}

128

```

129

130

**Usage Example:**

131

132

```typescript

133

import * as logs from "@aws-cdk/aws-logs";

134

import * as events from "@aws-cdk/aws-events";

135

import * as targets from "@aws-cdk/aws-events-targets";

136

import * as sqs from "@aws-cdk/aws-sqs";

137

138

// Create CloudWatch Log Groups

139

const applicationLogGroup = new logs.LogGroup(this, "ApplicationLogs", {

140

logGroupName: "/aws/events/application",

141

retention: logs.RetentionDays.ONE_MONTH,

142

});

143

144

const securityLogGroup = new logs.LogGroup(this, "SecurityLogs", {

145

logGroupName: "/aws/events/security",

146

retention: logs.RetentionDays.ONE_YEAR,

147

});

148

149

const errorLogGroup = new logs.LogGroup(this, "ErrorLogs", {

150

logGroupName: "/aws/events/errors",

151

retention: logs.RetentionDays.THREE_MONTHS,

152

});

153

154

// Create dead letter queue for failed log deliveries

155

const logDlq = new sqs.Queue(this, "LogDeadLetterQueue", {

156

queueName: "log-delivery-failures",

157

});

158

159

// Rule for application events with structured logging

160

const appEventsRule = new events.Rule(this, "ApplicationEventsRule", {

161

eventPattern: {

162

source: ["myapp"],

163

detailType: [

164

"Order Created",

165

"Payment Processed",

166

"User Registration",

167

"Data Export",

168

],

169

},

170

});

171

172

appEventsRule.addTarget(new targets.CloudWatchLogGroup(applicationLogGroup, {

173

deadLetterQueue: logDlq,

174

retryAttempts: 3,

175

maxEventAge: Duration.hours(1),

176

event: events.RuleTargetInput.fromObject({

177

timestamp: events.EventField.fromPath("$.time"),

178

level: "INFO",

179

eventType: events.EventField.fromPath("$.detail-type"),

180

source: events.EventField.fromPath("$.source"),

181

account: events.EventField.fromPath("$.account"),

182

region: events.EventField.fromPath("$.region"),

183

details: events.EventField.fromPath("$.detail"),

184

eventId: events.EventField.fromPath("$.id"),

185

}),

186

}));

187

188

// Rule for security events with detailed logging

189

const securityRule = new events.Rule(this, "SecurityEventsRule", {

190

eventPattern: {

191

source: ["aws.guardduty", "aws.securityhub", "myapp.security"],

192

detailType: [

193

"GuardDuty Finding",

194

"Security Hub Findings - Imported",

195

"Authentication Failure",

196

"Privilege Escalation",

197

],

198

},

199

});

200

201

securityRule.addTarget(new targets.CloudWatchLogGroup(securityLogGroup, {

202

event: events.RuleTargetInput.fromObject({

203

"@timestamp": events.EventField.fromPath("$.time"),

204

level: "WARN",

205

eventType: events.EventField.fromPath("$.detail-type"),

206

source: events.EventField.fromPath("$.source"),

207

account: events.EventField.fromPath("$.account"),

208

region: events.EventField.fromPath("$.region"),

209

severity: events.EventField.fromPath("$.detail.severity"),

210

finding: events.EventField.fromPath("$.detail"),

211

eventId: events.EventField.fromPath("$.id"),

212

}),

213

}));

214

215

// Rule for error events

216

const errorRule = new events.Rule(this, "ErrorEventsRule", {

217

eventPattern: {

218

source: ["myapp"],

219

detailType: ["Error Occurred", "Exception Thrown", "Service Failure"],

220

},

221

});

222

223

errorRule.addTarget(new targets.CloudWatchLogGroup(errorLogGroup, {

224

event: events.RuleTargetInput.fromObject({

225

"@timestamp": events.EventField.fromPath("$.time"),

226

level: "ERROR",

227

eventType: events.EventField.fromPath("$.detail-type"),

228

source: events.EventField.fromPath("$.source"),

229

errorMessage: events.EventField.fromPath("$.detail.error.message"),

230

errorCode: events.EventField.fromPath("$.detail.error.code"),

231

stackTrace: events.EventField.fromPath("$.detail.error.stackTrace"),

232

context: events.EventField.fromPath("$.detail.context"),

233

eventId: events.EventField.fromPath("$.id"),

234

}),

235

}));

236

237

// Simple logging for AWS service events

238

const awsEventsRule = new events.Rule(this, "AWSEventsRule", {

239

eventPattern: {

240

source: [{ prefix: "aws." }],

241

},

242

});

243

244

awsEventsRule.addTarget(new targets.CloudWatchLogGroup(applicationLogGroup));

245

```

246

247

### Kinesis Data Firehose Target

248

249

Send events to Amazon Kinesis Data Firehose for data delivery to analytics services.

250

251

```typescript { .api }

252

/**

253

* Use a Kinesis Data Firehose stream as a target for Amazon EventBridge rules

254

*/

255

class KinesisFirehoseStream implements events.IRuleTarget {

256

constructor(stream: firehose.CfnDeliveryStream, props?: KinesisFirehoseStreamProps);

257

258

/**

259

* Returns a RuleTarget that can be used to put records to this Kinesis Data Firehose stream

260

* as a result from an EventBridge event

261

*/

262

bind(rule: events.IRule, id?: string): events.RuleTargetConfig;

263

}

264

265

interface KinesisFirehoseStreamProps {

266

/**

267

* The message to send to the Kinesis Data Firehose stream

268

* @default the entire EventBridge event

269

*/

270

readonly message?: events.RuleTargetInput;

271

}

272

```

273

274

**Usage Example:**

275

276

```typescript

277

import * as firehose from "@aws-cdk/aws-kinesisfirehose";

278

import * as s3 from "@aws-cdk/aws-s3";

279

import * as iam from "@aws-cdk/aws-iam";

280

import * as events from "@aws-cdk/aws-events";

281

import * as targets from "@aws-cdk/aws-events-targets";

282

283

// Create S3 bucket for data storage

284

const dataLakeBucket = new s3.Bucket(this, "DataLakeBucket", {

285

bucketName: "my-analytics-data-lake",

286

lifecycleRules: [{

287

id: "archive-old-data",

288

transitions: [{

289

storageClass: s3.StorageClass.GLACIER,

290

transitionAfter: Duration.days(90),

291

}],

292

}],

293

});

294

295

// Create IAM role for Firehose

296

const firehoseRole = new iam.Role(this, "FirehoseRole", {

297

assumedBy: new iam.ServicePrincipal("firehose.amazonaws.com"),

298

inlinePolicies: {

299

S3Access: new iam.PolicyDocument({

300

statements: [

301

new iam.PolicyStatement({

302

actions: [

303

"s3:AbortMultipartUpload",

304

"s3:GetBucketLocation",

305

"s3:GetObject",

306

"s3:ListBucket",

307

"s3:ListBucketMultipartUploads",

308

"s3:PutObject",

309

],

310

resources: [

311

dataLakeBucket.bucketArn,

312

`${dataLakeBucket.bucketArn}/*`,

313

],

314

}),

315

],

316

}),

317

},

318

});

319

320

// Create Kinesis Data Firehose delivery stream

321

const deliveryStream = new firehose.CfnDeliveryStream(this, "EventDeliveryStream", {

322

deliveryStreamName: "event-analytics-stream",

323

deliveryStreamType: "DirectPut",

324

extendedS3DestinationConfiguration: {

325

bucketArn: dataLakeBucket.bucketArn,

326

roleArn: firehoseRole.roleArn,

327

prefix: "events/year=!{timestamp:yyyy}/month=!{timestamp:MM}/day=!{timestamp:dd}/hour=!{timestamp:HH}/",

328

errorOutputPrefix: "errors/",

329

bufferingHints: {

330

sizeInMBs: 5,

331

intervalInSeconds: 300,

332

},

333

compressionFormat: "GZIP",

334

dataFormatConversionConfiguration: {

335

enabled: true,

336

outputFormatConfiguration: {

337

serializer: {

338

parquetSerDe: {},

339

},

340

},

341

},

342

},

343

});

344

345

// Rule for analytics events

346

const analyticsRule = new events.Rule(this, "AnalyticsRule", {

347

eventPattern: {

348

source: ["myapp"],

349

detailType: [

350

"User Activity",

351

"Transaction",

352

"Performance Metric",

353

"Business Event",

354

],

355

},

356

});

357

358

// Send structured data to Firehose

359

analyticsRule.addTarget(new targets.KinesisFirehoseStream(deliveryStream, {

360

message: events.RuleTargetInput.fromObject({

361

eventId: events.EventField.fromPath("$.id"),

362

timestamp: events.EventField.fromPath("$.time"),

363

eventType: events.EventField.fromPath("$.detail-type"),

364

source: events.EventField.fromPath("$.source"),

365

account: events.EventField.fromPath("$.account"),

366

region: events.EventField.fromPath("$.region"),

367

// Flatten the detail for easier querying

368

userId: events.EventField.fromPath("$.detail.userId"),

369

sessionId: events.EventField.fromPath("$.detail.sessionId"),

370

deviceType: events.EventField.fromPath("$.detail.deviceType"),

371

userAgent: events.EventField.fromPath("$.detail.userAgent"),

372

ipAddress: events.EventField.fromPath("$.detail.ipAddress"),

373

// Include full detail as JSON string for complex analysis

374

detailJson: events.EventField.fromPath("$.detail"),

375

}),

376

}));

377

378

// Send raw events for comprehensive analysis

379

const rawEventsRule = new events.Rule(this, "RawEventsRule", {

380

eventPattern: {

381

source: [{ prefix: "myapp." }],

382

},

383

});

384

385

rawEventsRule.addTarget(new targets.KinesisFirehoseStream(deliveryStream));

386

```

387

388

## Analytics Patterns

389

390

### Stream Processing Architecture

391

392

```typescript

393

// Multi-stream setup for different data types

394

const realTimeStream = new kinesis.Stream(this, "RealTimeStream", {

395

shardCount: 5,

396

retentionPeriod: Duration.hours(24),

397

});

398

399

const batchStream = new kinesis.Stream(this, "BatchStream", {

400

shardCount: 2,

401

retentionPeriod: Duration.days(7),

402

});

403

404

// High-frequency events go to real-time stream

405

const realtimeRule = new events.Rule(this, "RealtimeRule", {

406

eventPattern: {

407

source: ["myapp.realtime"],

408

detailType: ["Click", "Page View", "API Call"],

409

},

410

});

411

412

realtimeRule.addTarget(new targets.KinesisStream(realTimeStream, {

413

partitionKeyPath: "$.detail.sessionId",

414

}));

415

416

// Lower-frequency events go to batch stream

417

const batchRule = new events.Rule(this, "BatchRule", {

418

eventPattern: {

419

source: ["myapp.batch"],

420

detailType: ["Daily Report", "Weekly Summary", "Monthly Metrics"],

421

},

422

});

423

424

batchRule.addTarget(new targets.KinesisStream(batchStream, {

425

partitionKeyPath: "$.detail.reportType",

426

}));

427

```

428

429

### Log Aggregation Patterns

430

431

```typescript

432

// Centralized logging with different retention policies

433

const debugLogGroup = new logs.LogGroup(this, "DebugLogs", {

434

retention: logs.RetentionDays.THREE_DAYS,

435

});

436

437

const infoLogGroup = new logs.LogGroup(this, "InfoLogs", {

438

retention: logs.RetentionDays.ONE_WEEK,

439

});

440

441

const errorLogGroup = new logs.LogGroup(this, "ErrorLogs", {

442

retention: logs.RetentionDays.SIX_MONTHS,

443

});

444

445

// Route events by severity

446

const debugRule = new events.Rule(this, "DebugRule", {

447

eventPattern: {

448

detail: { level: ["DEBUG"] },

449

},

450

});

451

452

const infoRule = new events.Rule(this, "InfoRule", {

453

eventPattern: {

454

detail: { level: ["INFO"] },

455

},

456

});

457

458

const errorRule = new events.Rule(this, "ErrorRule", {

459

eventPattern: {

460

detail: { level: ["ERROR", "FATAL"] },

461

},

462

});

463

464

debugRule.addTarget(new targets.CloudWatchLogGroup(debugLogGroup));

465

infoRule.addTarget(new targets.CloudWatchLogGroup(infoLogGroup));

466

errorRule.addTarget(new targets.CloudWatchLogGroup(errorLogGroup));

467

```