0
# Database Dialects
1
2
Database-specific query builders providing specialized features and syntax for different SQL databases. Each dialect extends the base Query functionality with database-specific optimizations, functions, and SQL syntax variations.
3
4
## Capabilities
5
6
### MySQL Dialect
7
8
MySQL-specific query builder with support for MySQL syntax and features.
9
10
```python { .api }
11
class MySQLQuery(Query):
12
"""MySQL-specific query builder."""
13
14
def on_duplicate_key_update(self, **kwargs) -> MySQLQuery:
15
"""Add ON DUPLICATE KEY UPDATE clause for INSERT statements."""
16
17
def on_duplicate_key_ignore(self) -> MySQLQuery:
18
"""Add ON DUPLICATE KEY IGNORE clause for INSERT statements."""
19
```
20
21
**Usage Examples:**
22
23
```python
24
from pypika import MySQLQuery, Table
25
26
users = Table('users')
27
28
# ON DUPLICATE KEY UPDATE
29
query = (MySQLQuery.into(users)
30
.columns(users.id, users.name, users.email)
31
.insert(1, 'John Doe', 'john@example.com')
32
.on_duplicate_key_update(
33
name='John Doe Updated',
34
email='john.updated@example.com'
35
))
36
37
# ON DUPLICATE KEY IGNORE
38
query = (MySQLQuery.into(users)
39
.columns(users.name, users.email)
40
.insert('Jane Doe', 'jane@example.com')
41
.on_duplicate_key_ignore())
42
43
# MySQL-specific functions
44
from pypika.functions import Concat
45
query = MySQLQuery.from_(users).select(
46
Concat(users.first_name, ' ', users.last_name).as_('full_name')
47
)
48
```
49
50
### PostgreSQL Dialect
51
52
PostgreSQL-specific query builder with advanced PostgreSQL features.
53
54
```python { .api }
55
class PostgreSQLQuery(Query):
56
"""PostgreSQL-specific query builder."""
57
58
def on_conflict(self, *target_fields) -> OnConflictQueryBuilder:
59
"""Add ON CONFLICT clause for INSERT statements."""
60
61
def returning(self, *terms) -> PostgreSQLQuery:
62
"""Add RETURNING clause."""
63
64
def distinct_on(self, *terms) -> PostgreSQLQuery:
65
"""Add DISTINCT ON clause."""
66
67
class OnConflictQueryBuilder:
68
def do_nothing(self) -> PostgreSQLQuery:
69
"""Add DO NOTHING to ON CONFLICT."""
70
71
def do_update(self, **kwargs) -> PostgreSQLQuery:
72
"""Add DO UPDATE to ON CONFLICT."""
73
```
74
75
**Usage Examples:**
76
77
```python
78
from pypika import PostgreSQLQuery, Table
79
80
users = Table('users')
81
orders = Table('orders')
82
83
# ON CONFLICT DO NOTHING
84
query = (PostgreSQLQuery.into(users)
85
.columns(users.email, users.name)
86
.insert('john@example.com', 'John Doe')
87
.on_conflict(users.email)
88
.do_nothing())
89
90
# ON CONFLICT DO UPDATE
91
query = (PostgreSQLQuery.into(users)
92
.columns(users.email, users.name, users.last_login)
93
.insert('john@example.com', 'John Doe', 'NOW()')
94
.on_conflict(users.email)
95
.do_update(
96
name='John Doe Updated',
97
last_login='NOW()'
98
))
99
100
# RETURNING clause
101
query = (PostgreSQLQuery.into(users)
102
.columns(users.name, users.email)
103
.insert('John Doe', 'john@example.com')
104
.returning(users.id, users.created_at))
105
106
# UPDATE with RETURNING
107
query = (PostgreSQLQuery.update(users)
108
.set(users.last_login, 'NOW()')
109
.where(users.id == 1)
110
.returning(users.id, users.last_login))
111
112
# DISTINCT ON
113
query = (PostgreSQLQuery.from_(orders)
114
.distinct_on(orders.customer_id)
115
.select(orders.customer_id, orders.order_date, orders.total)
116
.orderby(orders.customer_id, orders.order_date.desc()))
117
118
# JSON operations (PostgreSQL specific)
119
from pypika import JSON
120
profile = users.profile # JSON field
121
query = (PostgreSQLQuery.from_(users)
122
.select(
123
users.id,
124
profile.get_text_value('name').as_('profile_name'),
125
profile.get_path_json_value(['address', 'city']).as_('city')
126
)
127
.where(profile.has_key('email')))
128
```
129
130
### Oracle Dialect
131
132
Oracle Database-specific query builder with Oracle SQL features.
133
134
```python { .api }
135
class OracleQuery(Query):
136
"""Oracle Database-specific query builder."""
137
```
138
139
**Usage Examples:**
140
141
```python
142
from pypika import OracleQuery, Table
143
from pypika.pseudocolumns import RowNum, SysDate
144
145
users = Table('users')
146
147
# Oracle-specific pseudocolumns
148
query = (OracleQuery.from_(users)
149
.select(users.name, users.email, RowNum.as_('row_number'))
150
.where(RowNum <= 10))
151
152
# Using SYSDATE
153
query = (OracleQuery.from_(users)
154
.select('*')
155
.where(users.created_at > SysDate - 30))
156
157
# Oracle date formatting
158
from pypika.functions import ToChar
159
query = (OracleQuery.from_(users)
160
.select(
161
users.name,
162
ToChar(users.created_at, 'YYYY-MM-DD').as_('created_date')
163
))
164
```
165
166
### Microsoft SQL Server Dialect
167
168
MSSQL-specific query builder with SQL Server features.
169
170
```python { .api }
171
class MSSQLQuery(Query):
172
"""Microsoft SQL Server-specific query builder."""
173
174
def top(self, limit) -> MSSQLQuery:
175
"""Add TOP clause."""
176
```
177
178
**Usage Examples:**
179
180
```python
181
from pypika import MSSQLQuery, Table
182
183
users = Table('users')
184
orders = Table('orders')
185
186
# TOP clause (SQL Server style)
187
query = (MSSQLQuery.from_(users)
188
.top(10)
189
.select('*')
190
.orderby(users.created_at.desc()))
191
192
# WITH clause and CTE
193
user_stats = (MSSQLQuery.from_(orders)
194
.select(orders.user_id, Count('*').as_('order_count'))
195
.groupby(orders.user_id))
196
197
query = (MSSQLQuery.with_(user_stats, 'user_stats')
198
.from_(users)
199
.join('user_stats').on(users.id == 'user_stats.user_id')
200
.select(users.name, 'user_stats.order_count'))
201
```
202
203
### ClickHouse Dialect
204
205
Yandex ClickHouse-specific query builder with ClickHouse optimizations.
206
207
```python { .api }
208
class ClickHouseQuery(Query):
209
"""ClickHouse-specific query builder."""
210
```
211
212
**Usage Examples:**
213
214
```python
215
from pypika import ClickHouseQuery, Table
216
217
events = Table('events')
218
219
# ClickHouse-specific functions
220
from pypika.clickhouse.dates_and_times import ToYYYYMM, AddDays
221
from pypika.clickhouse.array import HasAny, Length
222
from pypika.clickhouse.type_conversion import ToString, ToInt64
223
224
# Date/time functions
225
query = (ClickHouseQuery.from_(events)
226
.select(
227
ToYYYYMM(events.created_at).as_('year_month'),
228
AddDays(events.created_at, 30).as_('plus_30_days')
229
))
230
231
# Array functions
232
query = (ClickHouseQuery.from_(events)
233
.select(
234
events.user_id,
235
Length(events.tags).as_('tag_count')
236
)
237
.where(HasAny(events.tags, ['important', 'urgent'])))
238
239
# Type conversion functions
240
query = (ClickHouseQuery.from_(events)
241
.select(
242
ToString(events.user_id).as_('user_id_str'),
243
ToInt64(events.score).as_('score_int')
244
))
245
246
# Conditional functions
247
from pypika.clickhouse.condition import If, MultiIf
248
query = (ClickHouseQuery.from_(events)
249
.select(
250
events.user_id,
251
If(events.score > 100, 'high', 'low').as_('score_category'),
252
MultiIf(
253
events.score > 200, 'very_high',
254
events.score > 100, 'high',
255
events.score > 50, 'medium',
256
'low'
257
).as_('detailed_category')
258
))
259
```
260
261
### SQLite Dialect
262
263
SQLite-specific query builder with SQLite features and limitations.
264
265
```python { .api }
266
class SQLLiteQuery(Query):
267
"""SQLite-specific query builder."""
268
```
269
270
**Usage Examples:**
271
272
```python
273
from pypika import SQLLiteQuery, Table
274
275
users = Table('users')
276
277
# SQLite-specific date handling
278
from pypika.functions import Date, DateTime
279
query = (SQLLiteQuery.from_(users)
280
.select(
281
users.name,
282
Date(users.created_at).as_('created_date'),
283
DateTime(users.created_at, '+1 month').as_('next_month')
284
))
285
286
# SQLite LIMIT/OFFSET
287
query = (SQLLiteQuery.from_(users)
288
.select('*')
289
.orderby(users.id)
290
.limit(20)
291
.offset(100))
292
293
# SQLite aggregate functions
294
from pypika.functions import Count, Sum, Avg
295
query = (SQLLiteQuery.from_(users)
296
.select(
297
Count('*').as_('total_users'),
298
Avg(users.age).as_('avg_age')
299
))
300
```
301
302
### Redshift Dialect
303
304
Amazon Redshift-specific query builder with Redshift optimizations.
305
306
```python { .api }
307
class RedshiftQuery(Query):
308
"""Amazon Redshift-specific query builder."""
309
```
310
311
**Usage Examples:**
312
313
```python
314
from pypika import RedshiftQuery, Table
315
316
events = Table('events')
317
users = Table('users')
318
319
# Redshift-specific window functions
320
from pypika.analytics import RowNumber, Rank
321
query = (RedshiftQuery.from_(events)
322
.select(
323
events.user_id,
324
events.event_time,
325
RowNumber().over(events.user_id).orderby(events.event_time).as_('event_sequence'),
326
Rank().over().orderby(events.score.desc()).as_('global_rank')
327
))
328
329
# Redshift date functions
330
from pypika.functions import DateDiff, DateAdd
331
query = (RedshiftQuery.from_(users)
332
.select(
333
users.id,
334
DateDiff('day', users.created_at, 'CURRENT_DATE').as_('account_age_days'),
335
DateAdd('month', 1, users.created_at).as_('one_month_later')
336
))
337
```
338
339
### Vertica Dialect
340
341
Vertica-specific query builder with Vertica analytical features.
342
343
```python { .api }
344
class VerticaQuery(Query):
345
"""Vertica-specific query builder."""
346
```
347
348
**Usage Examples:**
349
350
```python
351
from pypika import VerticaQuery, Table
352
353
sales = Table('sales')
354
products = Table('products')
355
356
# Vertica analytical functions
357
from pypika.analytics import Sum, Avg, FirstValue, LastValue
358
query = (VerticaQuery.from_(sales)
359
.select(
360
sales.product_id,
361
sales.sale_date,
362
sales.amount,
363
Sum(sales.amount).over(sales.product_id).orderby(sales.sale_date).as_('running_total'),
364
Avg(sales.amount).over().orderby(sales.sale_date).rows_between(-6, 0).as_('7_day_avg'),
365
FirstValue(sales.amount).over(sales.product_id).orderby(sales.sale_date).as_('first_sale')
366
))
367
368
# Vertica time series functions
369
from pypika.functions import TimestampAdd
370
query = (VerticaQuery.from_(sales)
371
.select(
372
sales.product_id,
373
TimestampAdd('hour', 1, sales.sale_timestamp).as_('plus_one_hour')
374
))
375
```
376
377
### Snowflake Dialect
378
379
Snowflake-specific query builder with cloud data warehouse optimizations.
380
381
```python { .api }
382
class SnowflakeQuery(Query):
383
"""Snowflake-specific query builder."""
384
```
385
386
**Usage Examples:**
387
388
```python
389
from pypika import SnowflakeQuery, Table
390
391
users = Table('users')
392
orders = Table('orders')
393
394
# Basic Snowflake query
395
query = (SnowflakeQuery.from_(users)
396
.select(users.id, users.name, users.email)
397
.where(users.active == True))
398
399
# Snowflake-specific date functions
400
from pypika.functions import DateDiff, DateAdd
401
query = (SnowflakeQuery.from_(users)
402
.select(
403
users.id,
404
DateDiff('day', users.created_at, 'CURRENT_DATE').as_('account_age_days'),
405
DateAdd('month', 1, users.created_at).as_('one_month_later')
406
))
407
408
# Window functions with Snowflake syntax
409
from pypika.analytics import RowNumber, Avg
410
query = (SnowflakeQuery.from_(orders)
411
.select(
412
orders.customer_id,
413
orders.order_date,
414
orders.amount,
415
RowNumber().over(orders.customer_id).orderby(orders.order_date).as_('order_sequence'),
416
Avg(orders.amount).over().orderby(orders.order_date).rows_between(-2, 0).as_('3_order_avg')
417
))
418
```
419
420
### Dialect Selection
421
422
Choosing the appropriate dialect based on your database system.
423
424
**Usage Examples:**
425
426
```python
427
from pypika import (
428
MySQLQuery, PostgreSQLQuery, OracleQuery, MSSQLQuery,
429
ClickHouseQuery, RedshiftQuery, SQLLiteQuery, VerticaQuery,
430
Dialects
431
)
432
433
# Dialect-specific table creation
434
def create_user_table(dialect='postgresql'):
435
users = Table('users')
436
437
if dialect == 'postgresql':
438
return (PostgreSQLQuery.create_table(users)
439
.columns(
440
Column('id', 'SERIAL', primary_key=True),
441
Column('email', 'VARCHAR(255)', nullable=False, unique=True),
442
Column('profile', 'JSONB'),
443
Column('created_at', 'TIMESTAMP', default='NOW()')
444
))
445
446
elif dialect == 'mysql':
447
return (MySQLQuery.create_table(users)
448
.columns(
449
Column('id', 'INT AUTO_INCREMENT', primary_key=True),
450
Column('email', 'VARCHAR(255)', nullable=False, unique=True),
451
Column('profile', 'JSON'),
452
Column('created_at', 'TIMESTAMP', default='CURRENT_TIMESTAMP')
453
))
454
455
elif dialect == 'sqlite':
456
return (SQLLiteQuery.create_table(users)
457
.columns(
458
Column('id', 'INTEGER', primary_key=True, autoincrement=True),
459
Column('email', 'TEXT', nullable=False, unique=True),
460
Column('profile', 'TEXT'),
461
Column('created_at', 'DATETIME', default='CURRENT_TIMESTAMP')
462
))
463
464
# Dynamic dialect selection
465
def get_query_class(database_type):
466
dialect_map = {
467
'postgresql': PostgreSQLQuery,
468
'mysql': MySQLQuery,
469
'oracle': OracleQuery,
470
'mssql': MSSQLQuery,
471
'clickhouse': ClickHouseQuery,
472
'redshift': RedshiftQuery,
473
'sqlite': SQLLiteQuery,
474
'vertica': VerticaQuery
475
}
476
return dialect_map.get(database_type, Query)
477
478
# Usage with dynamic selection
479
database_type = 'postgresql' # From configuration
480
QueryClass = get_query_class(database_type)
481
users = Table('users')
482
483
query = QueryClass.from_(users).select('*').where(users.active == True)
484
```
485
486
### Dialect Enumerations
487
488
```python { .api }
489
class Dialects:
490
VERTICA: str
491
CLICKHOUSE: str
492
ORACLE: str
493
MSSQL: str
494
MYSQL: str
495
POSTGRESQL: str
496
REDSHIFT: str
497
SQLLITE: str
498
SNOWFLAKE: str
499
```
500
501
**Usage Examples:**
502
503
```python
504
from pypika import Dialects, Interval
505
506
# Using dialect enumeration
507
current_dialect = Dialects.POSTGRESQL
508
509
# Dialect-specific interval formatting
510
if current_dialect == Dialects.POSTGRESQL:
511
interval = Interval(days=30, dialect=Dialects.POSTGRESQL)
512
elif current_dialect == Dialects.MYSQL:
513
interval = Interval(days=30, dialect=Dialects.MYSQL)
514
515
# Dialect-aware function selection
516
def get_date_function(dialect):
517
if dialect == Dialects.POSTGRESQL:
518
return 'NOW()'
519
elif dialect == Dialects.MYSQL:
520
return 'NOW()'
521
elif dialect == Dialects.ORACLE:
522
return 'SYSDATE'
523
elif dialect == Dialects.MSSQL:
524
return 'GETDATE()'
525
else:
526
return 'CURRENT_TIMESTAMP'
527
```