0
# S3 Operations
1
2
Async S3 operations including file transfers, object operations, and enhanced functionality for working with Amazon S3. aioboto3 provides async versions of boto3's S3 transfer methods with progress callbacks and advanced configuration options.
3
4
## Capabilities
5
6
### File Upload Operations
7
8
Async file upload operations with progress tracking and transfer configuration options.
9
10
```python { .api }
11
async def upload_file(
12
filename: str,
13
bucket: str,
14
key: str,
15
callback = None,
16
config = None
17
):
18
"""
19
Upload a file to S3.
20
21
Parameters:
22
- filename: Local file path to upload
23
- bucket: S3 bucket name
24
- key: S3 object key (path within bucket)
25
- callback: Optional progress callback function
26
- config: S3TransferConfig for advanced transfer settings
27
"""
28
29
async def upload_fileobj(
30
fileobj,
31
bucket: str,
32
key: str,
33
callback = None,
34
config = None
35
):
36
"""
37
Upload a file-like object to S3.
38
39
Parameters:
40
- fileobj: File-like object to upload (must support read())
41
- bucket: S3 bucket name
42
- key: S3 object key
43
- callback: Optional progress callback function
44
- config: S3TransferConfig for advanced transfer settings
45
"""
46
```
47
48
### File Download Operations
49
50
Async file download operations with progress tracking and error handling.
51
52
```python { .api }
53
async def download_file(
54
bucket: str,
55
key: str,
56
filename: str,
57
callback = None,
58
config = None
59
):
60
"""
61
Download a file from S3.
62
63
Parameters:
64
- bucket: S3 bucket name
65
- key: S3 object key to download
66
- filename: Local file path to save to
67
- callback: Optional progress callback function
68
- config: S3TransferConfig for advanced transfer settings
69
"""
70
71
async def download_fileobj(
72
bucket: str,
73
key: str,
74
fileobj,
75
callback = None,
76
config = None
77
):
78
"""
79
Download an S3 object to a file-like object.
80
81
Parameters:
82
- bucket: S3 bucket name
83
- key: S3 object key to download
84
- fileobj: File-like object to write to (must support write())
85
- callback: Optional progress callback function
86
- config: S3TransferConfig for advanced transfer settings
87
"""
88
```
89
90
### Copy Operations
91
92
Async S3 object copy operations within and between buckets.
93
94
```python { .api }
95
async def copy(
96
copy_source: dict,
97
bucket: str,
98
key: str,
99
callback = None,
100
config = None
101
):
102
"""
103
Copy an S3 object from one location to another.
104
105
Parameters:
106
- copy_source: Dictionary specifying source bucket and key
107
{'Bucket': 'source-bucket', 'Key': 'source-key'}
108
- bucket: Destination bucket name
109
- key: Destination object key
110
- callback: Optional progress callback function
111
- config: S3TransferConfig for advanced transfer settings
112
"""
113
```
114
115
### Enhanced S3 Resource Methods
116
117
Extended functionality for S3 bucket and object resources with async support.
118
119
```python { .api }
120
class S3Bucket:
121
async def upload_file(self, filename: str, key: str, **kwargs):
122
"""Upload file to this bucket."""
123
124
async def download_file(self, key: str, filename: str, **kwargs):
125
"""Download file from this bucket."""
126
127
async def upload_fileobj(self, fileobj, key: str, **kwargs):
128
"""Upload file object to this bucket."""
129
130
async def download_fileobj(self, key: str, fileobj, **kwargs):
131
"""Download object to file object from this bucket."""
132
133
async def copy(self, copy_source: dict, key: str, **kwargs):
134
"""Copy object to this bucket."""
135
136
@property
137
def objects(self) -> S3ObjectCollection:
138
"""Collection of all objects in this bucket."""
139
140
class S3Object:
141
async def upload_file(self, filename: str, **kwargs):
142
"""Upload file to this object."""
143
144
async def download_file(self, filename: str, **kwargs):
145
"""Download this object to file."""
146
147
async def upload_fileobj(self, fileobj, **kwargs):
148
"""Upload file object to this object."""
149
150
async def download_fileobj(self, fileobj, **kwargs):
151
"""Download this object to file object."""
152
153
class S3ObjectSummary:
154
async def load(self):
155
"""Load object metadata asynchronously."""
156
157
class S3ObjectCollection:
158
"""Async iterable collection of S3 objects."""
159
def __aiter__(self):
160
"""Return async iterator for objects."""
161
162
async def __anext__(self):
163
"""Get next object in collection."""
164
165
async def pages(self):
166
"""Return async iterator of pages of objects."""
167
168
def all(self):
169
"""Return iterator for all objects in collection."""
170
171
def filter(self, **kwargs):
172
"""Return filtered collection based on parameters."""
173
174
def limit(self, count: int):
175
"""Limit the number of objects returned."""
176
177
def page_size(self, count: int):
178
"""Set the page size for pagination."""
179
```
180
181
### Transfer Configuration
182
183
Configuration options for S3 transfer operations using boto3's S3TransferConfig.
184
185
```python { .api }
186
from boto3.s3.transfer import S3TransferConfig
187
188
# Transfer configuration is used with all transfer methods
189
config = S3TransferConfig(
190
multipart_threshold=1024 * 25, # 25MB
191
max_concurrency=10,
192
multipart_chunksize=1024 * 25,
193
use_threads=True
194
)
195
```
196
197
## Usage Examples
198
199
### Basic File Operations
200
201
```python
202
import aioboto3
203
import asyncio
204
205
async def basic_s3_operations():
206
session = aioboto3.Session()
207
208
async with session.client('s3', region_name='us-east-1') as s3:
209
# Upload a file
210
await s3.upload_file(
211
'/path/to/local/file.txt',
212
'my-bucket',
213
'uploads/file.txt'
214
)
215
216
# Download a file
217
await s3.download_file(
218
'my-bucket',
219
'uploads/file.txt',
220
'/path/to/downloaded/file.txt'
221
)
222
223
# List objects
224
response = await s3.list_objects_v2(Bucket='my-bucket')
225
for obj in response.get('Contents', []):
226
print(f"Object: {obj['Key']}, Size: {obj['Size']}")
227
```
228
229
### File Object Operations
230
231
```python
232
from io import BytesIO
233
234
async def fileobj_operations():
235
session = aioboto3.Session()
236
237
async with session.client('s3', region_name='us-east-1') as s3:
238
# Upload from memory
239
data = b"Hello, World!"
240
fileobj = BytesIO(data)
241
242
await s3.upload_fileobj(
243
fileobj,
244
'my-bucket',
245
'data/hello.txt'
246
)
247
248
# Download to memory
249
download_fileobj = BytesIO()
250
await s3.download_fileobj(
251
'my-bucket',
252
'data/hello.txt',
253
download_fileobj
254
)
255
256
download_fileobj.seek(0)
257
content = download_fileobj.read()
258
print(f"Downloaded content: {content}")
259
```
260
261
### Progress Tracking
262
263
```python
264
async def upload_with_progress():
265
session = aioboto3.Session()
266
267
def progress_callback(bytes_transferred):
268
print(f"Transferred: {bytes_transferred} bytes")
269
270
async with session.client('s3', region_name='us-east-1') as s3:
271
await s3.upload_file(
272
'/path/to/large/file.zip',
273
'my-bucket',
274
'uploads/large-file.zip',
275
Callback=progress_callback
276
)
277
```
278
279
### Advanced Transfer Configuration
280
281
```python
282
from boto3.s3.transfer import S3TransferConfig
283
284
async def configured_transfer():
285
session = aioboto3.Session()
286
287
# Configure transfer for large files
288
config = S3TransferConfig(
289
multipart_threshold=1024 * 25, # 25MB threshold for multipart
290
max_concurrency=10, # Up to 10 concurrent transfers
291
multipart_chunksize=1024 * 25, # 25MB chunks
292
use_threads=True # Use threading for concurrency
293
)
294
295
async with session.client('s3', region_name='us-east-1') as s3:
296
await s3.upload_file(
297
'/path/to/very/large/file.zip',
298
'my-bucket',
299
'uploads/large-file.zip',
300
Config=config
301
)
302
```
303
304
### Copy Operations
305
306
```python
307
async def copy_objects():
308
session = aioboto3.Session()
309
310
async with session.client('s3', region_name='us-east-1') as s3:
311
# Copy object within same bucket
312
copy_source = {
313
'Bucket': 'my-bucket',
314
'Key': 'original/file.txt'
315
}
316
317
await s3.copy(
318
copy_source,
319
'my-bucket',
320
'copies/file.txt'
321
)
322
323
# Copy object between buckets
324
copy_source = {
325
'Bucket': 'source-bucket',
326
'Key': 'data/file.txt'
327
}
328
329
await s3.copy(
330
copy_source,
331
'destination-bucket',
332
'imported/file.txt'
333
)
334
```
335
336
### Using S3 Resources
337
338
```python
339
async def s3_resource_operations():
340
session = aioboto3.Session()
341
342
async with session.resource('s3', region_name='us-east-1') as s3:
343
bucket = await s3.Bucket('my-bucket')
344
345
# Upload using bucket resource
346
await bucket.upload_file(
347
'/path/to/file.txt',
348
'uploads/file.txt'
349
)
350
351
# Work with object resource
352
obj = await s3.Object('my-bucket', 'uploads/file.txt')
353
await obj.download_file('/path/to/downloaded/file.txt')
354
355
# List objects in bucket
356
async for obj_summary in bucket.objects.all():
357
print(f"Object: {obj_summary.key}")
358
```
359
360
### Error Handling
361
362
```python
363
import botocore.exceptions
364
365
async def handle_s3_errors():
366
session = aioboto3.Session()
367
368
try:
369
async with session.client('s3', region_name='us-east-1') as s3:
370
await s3.upload_file(
371
'/nonexistent/file.txt',
372
'my-bucket',
373
'uploads/file.txt'
374
)
375
376
except FileNotFoundError:
377
print("Local file not found")
378
379
except botocore.exceptions.ClientError as e:
380
error_code = e.response['Error']['Code']
381
382
if error_code == 'NoSuchBucket':
383
print("Bucket does not exist")
384
elif error_code == 'AccessDenied':
385
print("Access denied to bucket or object")
386
elif error_code == 'NoSuchKey':
387
print("Object key does not exist")
388
else:
389
print(f"S3 error: {error_code}")
390
391
except Exception as e:
392
print(f"Unexpected error: {e}")
393
```