0
# File Storage
1
2
Complete file upload, download, and management operations with support for resumable uploads, batch operations, and comprehensive bucket management.
3
4
## Capabilities
5
6
### File Upload Operations
7
8
High-level upload functions supporting various data sources with resumable upload capabilities.
9
10
```python { .api }
11
def put_file_v2(up_token: str, key: str, file_path: str, params: dict = None, mime_type: str = 'application/octet-stream', check_crc: bool = False, progress_handler = None, upload_progress_recorder = None, keep_last_modified: bool = False, part_size: int = None, version: str = 'v2', bucket_name: str = None, metadata: dict = None, regions = None, accelerate_uploading: bool = False) -> tuple:
12
"""
13
Upload file from local path (current version - recommended).
14
15
Args:
16
up_token: Upload token from Auth.upload_token()
17
key: File key in bucket
18
file_path: Local file path
19
params: Custom upload parameters
20
mime_type: File MIME type
21
check_crc: Enable CRC32 checksum verification
22
progress_handler: Callback for upload progress (func(uploaded, total))
23
upload_progress_recorder: UploadProgressRecorder for resumable uploads
24
keep_last_modified: Preserve original file modification time
25
part_size: Chunk size for resumable upload (default: 4MB)
26
version: Upload version ('v1' or 'v2')
27
bucket_name: Target bucket name
28
metadata: Custom metadata dictionary
29
regions: Region configuration
30
accelerate_uploading: Enable upload acceleration
31
32
Returns:
33
(dict, ResponseInfo): Upload result and response info
34
"""
35
36
def put_data(up_token: str, key: str, data: bytes, params: dict = None, mime_type: str = 'application/octet-stream', check_crc: bool = False, progress_handler = None, fname: str = None, hostscache_dir: str = None, metadata: dict = None, regions = None, accelerate_uploading: bool = False) -> tuple:
37
"""
38
Upload binary data directly.
39
40
Args:
41
up_token: Upload token
42
key: File key in bucket
43
data: Binary data to upload
44
params: Custom upload parameters
45
mime_type: Data MIME type
46
check_crc: Enable CRC32 verification
47
progress_handler: Progress callback function
48
fname: Original filename for metadata
49
hostscache_dir: Host cache directory
50
metadata: Custom metadata
51
regions: Region configuration
52
accelerate_uploading: Enable acceleration
53
54
Returns:
55
(dict, ResponseInfo): Upload result and response info
56
"""
57
58
def put_stream_v2(up_token: str, key: str, input_stream, file_name: str, data_size: int, params: dict = None, mime_type: str = None, progress_handler = None, upload_progress_recorder = None, modify_time: int = None, keep_last_modified: bool = False, part_size: int = None, version: str = 'v2', bucket_name: str = None, metadata: dict = None, regions = None, accelerate_uploading: bool = False) -> tuple:
59
"""
60
Upload from input stream (current version - recommended).
61
62
Args:
63
up_token: Upload token
64
key: File key in bucket
65
input_stream: Input stream object (file-like)
66
file_name: Original filename
67
data_size: Total data size in bytes
68
params: Custom upload parameters
69
mime_type: Stream MIME type
70
progress_handler: Progress callback function
71
upload_progress_recorder: Progress recorder for resumable uploads
72
modify_time: File modification timestamp
73
keep_last_modified: Preserve modification time
74
part_size: Chunk size for upload
75
version: Upload version
76
bucket_name: Target bucket name
77
metadata: Custom metadata
78
regions: Region configuration
79
accelerate_uploading: Enable acceleration
80
81
Returns:
82
(dict, ResponseInfo): Upload result and response info
83
"""
84
85
def put_file(up_token: str, key: str, file_path: str, **kwargs) -> tuple:
86
"""
87
Upload file from local path (deprecated - use put_file_v2).
88
89
Args:
90
up_token: Upload token
91
key: File key
92
file_path: Local file path
93
**kwargs: Additional parameters
94
95
Returns:
96
(dict, ResponseInfo): Upload result and response info
97
"""
98
99
def put_stream(up_token: str, key: str, input_stream, file_name: str, data_size: int, **kwargs) -> tuple:
100
"""
101
Upload from input stream (deprecated - use put_stream_v2).
102
103
Args:
104
up_token: Upload token
105
key: File key
106
input_stream: Input stream
107
file_name: Original filename
108
data_size: Data size
109
**kwargs: Additional parameters
110
111
Returns:
112
(dict, ResponseInfo): Upload result and response info
113
"""
114
```
115
116
### Bucket and File Management
117
118
Comprehensive bucket and file management operations through the `BucketManager` class.
119
120
```python { .api }
121
class BucketManager:
122
def __init__(self, auth: Auth, zone = None, regions = None, query_regions_endpoints = None, preferred_scheme: str = 'http'):
123
"""
124
Initialize bucket manager.
125
126
Args:
127
auth: Auth instance for authentication
128
zone: Upload zone configuration (deprecated, use regions)
129
regions: Region configuration
130
query_regions_endpoints: Custom region query endpoints
131
preferred_scheme: Preferred URL scheme ('http' or 'https')
132
"""
133
134
def list(self, bucket: str, prefix: str = None, marker: str = None, limit: int = None, delimiter: str = None) -> tuple:
135
"""
136
List files in bucket.
137
138
Args:
139
bucket: Bucket name
140
prefix: Key prefix filter
141
marker: Pagination marker (from previous response)
142
limit: Maximum number of files to return
143
delimiter: Delimiter for hierarchical listing
144
145
Returns:
146
(list, bool, str): (files_list, has_more, next_marker)
147
"""
148
149
def stat(self, bucket: str, key: str) -> tuple:
150
"""
151
Get file information.
152
153
Args:
154
bucket: Bucket name
155
key: File key
156
157
Returns:
158
(dict, ResponseInfo): File info and response info
159
"""
160
161
def delete(self, bucket: str, key: str) -> tuple:
162
"""
163
Delete file.
164
165
Args:
166
bucket: Bucket name
167
key: File key
168
169
Returns:
170
(dict, ResponseInfo): Result and response info
171
"""
172
173
def rename(self, bucket: str, key: str, key_to: str, force: str = 'false') -> tuple:
174
"""
175
Rename file within bucket.
176
177
Args:
178
bucket: Bucket name
179
key: Current file key
180
key_to: New file key
181
force: Force overwrite if target exists
182
183
Returns:
184
(dict, ResponseInfo): Result and response info
185
"""
186
187
def move(self, bucket: str, key: str, bucket_to: str, key_to: str, force: str = 'false') -> tuple:
188
"""
189
Move file to different bucket/key.
190
191
Args:
192
bucket: Source bucket name
193
key: Source file key
194
bucket_to: Destination bucket name
195
key_to: Destination file key
196
force: Force overwrite if target exists
197
198
Returns:
199
(dict, ResponseInfo): Result and response info
200
"""
201
202
def copy(self, bucket: str, key: str, bucket_to: str, key_to: str, force: str = 'false') -> tuple:
203
"""
204
Copy file to different bucket/key.
205
206
Args:
207
bucket: Source bucket name
208
key: Source file key
209
bucket_to: Destination bucket name
210
key_to: Destination file key
211
force: Force overwrite if target exists
212
213
Returns:
214
(dict, ResponseInfo): Result and response info
215
"""
216
217
def fetch(self, url: str, bucket: str, key: str = None, hostscache_dir: str = None) -> tuple:
218
"""
219
Fetch file from external URL.
220
221
Args:
222
url: External file URL
223
bucket: Target bucket name
224
key: Target file key (auto-generated if None)
225
hostscache_dir: Host cache directory
226
227
Returns:
228
(dict, ResponseInfo): Result and response info
229
"""
230
231
def prefetch(self, bucket: str, key: str, hostscache_dir: str = None) -> tuple:
232
"""
233
Prefetch file from origin (mirror storage).
234
235
Args:
236
bucket: Bucket name
237
key: File key
238
hostscache_dir: Host cache directory
239
240
Returns:
241
(dict, ResponseInfo): Result and response info
242
"""
243
244
def change_mime(self, bucket: str, key: str, mime: str) -> tuple:
245
"""
246
Change file MIME type.
247
248
Args:
249
bucket: Bucket name
250
key: File key
251
mime: New MIME type
252
253
Returns:
254
(dict, ResponseInfo): Result and response info
255
"""
256
257
def change_type(self, bucket: str, key: str, storage_type: int) -> tuple:
258
"""
259
Change file storage type.
260
261
Args:
262
bucket: Bucket name
263
key: File key
264
storage_type: Storage type (0=Standard, 1=IA, 2=Archive)
265
266
Returns:
267
(dict, ResponseInfo): Result and response info
268
"""
269
270
def change_status(self, bucket: str, key: str, status: int, cond: dict = None) -> tuple:
271
"""
272
Change file status.
273
274
Args:
275
bucket: Bucket name
276
key: File key
277
status: New status
278
cond: Conditional parameters
279
280
Returns:
281
(dict, ResponseInfo): Result and response info
282
"""
283
284
def delete_after_days(self, bucket: str, key: str, days: int) -> tuple:
285
"""
286
Set file deletion schedule.
287
288
Args:
289
bucket: Bucket name
290
key: File key
291
days: Days until deletion
292
293
Returns:
294
(dict, ResponseInfo): Result and response info
295
"""
296
297
def restore_ar(self, bucket: str, key: str, freeze_after_days: int) -> tuple:
298
"""
299
Restore archived file.
300
301
Args:
302
bucket: Bucket name
303
key: File key
304
freeze_after_days: Days to keep restored before re-archiving
305
306
Returns:
307
(dict, ResponseInfo): Result and response info
308
"""
309
310
def buckets(self) -> tuple:
311
"""
312
List all accessible buckets.
313
314
Returns:
315
(list, ResponseInfo): Bucket list and response info
316
"""
317
318
def mkbucketv3(self, bucket_name: str, region: str) -> tuple:
319
"""
320
Create new bucket.
321
322
Args:
323
bucket_name: Name for new bucket
324
region: Target region identifier
325
326
Returns:
327
(dict, ResponseInfo): Result and response info
328
"""
329
330
def bucket_info(self, bucket_name: str) -> tuple:
331
"""
332
Get bucket information.
333
334
Args:
335
bucket_name: Bucket name
336
337
Returns:
338
(dict, ResponseInfo): Bucket info and response info
339
"""
340
341
def list_domains(self, bucket: str) -> tuple:
342
"""
343
List bucket domains.
344
345
Args:
346
bucket: Bucket name
347
348
Returns:
349
(list, ResponseInfo): Domain list and response info
350
"""
351
352
def batch(self, operations: list) -> tuple:
353
"""
354
Execute batch operations.
355
356
Args:
357
operations: List of operation commands
358
359
Returns:
360
(list, ResponseInfo): Results list and response info
361
"""
362
```
363
364
### Batch Operations
365
366
Helper functions to build batch operation commands for efficient bulk operations.
367
368
```python { .api }
369
def build_batch_copy(source_bucket: str, key_pairs: list, target_bucket: str, force: str = 'false') -> list:
370
"""
371
Build batch copy operations.
372
373
Args:
374
source_bucket: Source bucket name
375
key_pairs: List of (source_key, target_key) tuples
376
target_bucket: Target bucket name
377
force: Force overwrite existing files
378
379
Returns:
380
List of batch operation commands
381
"""
382
383
def build_batch_rename(bucket: str, key_pairs: list, force: str = 'false') -> list:
384
"""
385
Build batch rename operations.
386
387
Args:
388
bucket: Bucket name
389
key_pairs: List of (old_key, new_key) tuples
390
force: Force overwrite existing files
391
392
Returns:
393
List of batch operation commands
394
"""
395
396
def build_batch_move(source_bucket: str, key_pairs: list, target_bucket: str, force: str = 'false') -> list:
397
"""
398
Build batch move operations.
399
400
Args:
401
source_bucket: Source bucket name
402
key_pairs: List of (source_key, target_key) tuples
403
target_bucket: Target bucket name
404
force: Force overwrite existing files
405
406
Returns:
407
List of batch operation commands
408
"""
409
410
def build_batch_stat(bucket: str, keys: list) -> list:
411
"""
412
Build batch stat operations.
413
414
Args:
415
bucket: Bucket name
416
keys: List of file keys
417
418
Returns:
419
List of batch operation commands
420
"""
421
422
def build_batch_delete(bucket: str, keys: list) -> list:
423
"""
424
Build batch delete operations.
425
426
Args:
427
bucket: Bucket name
428
keys: List of file keys to delete
429
430
Returns:
431
List of batch operation commands
432
"""
433
434
def build_batch_restore_ar(bucket: str, keys: list) -> list:
435
"""
436
Build batch restore archive operations.
437
438
Args:
439
bucket: Bucket name
440
keys: List of archived file keys
441
442
Returns:
443
List of batch operation commands
444
"""
445
```
446
447
### Upload Progress Recording
448
449
Persistent progress tracking for resumable uploads.
450
451
```python { .api }
452
class UploadProgressRecorder:
453
def __init__(self, record_folder: str = None):
454
"""
455
Initialize upload progress recorder.
456
457
Args:
458
record_folder: Directory to store progress records (default: temp dir)
459
"""
460
461
def has_upload_record(self, file_name: str, key: str) -> bool:
462
"""
463
Check if upload record exists.
464
465
Args:
466
file_name: Local file name
467
key: Upload key
468
469
Returns:
470
True if record exists
471
"""
472
473
def get_upload_record(self, file_name: str, key: str) -> dict:
474
"""
475
Get upload progress record.
476
477
Args:
478
file_name: Local file name
479
key: Upload key
480
481
Returns:
482
Progress record dictionary
483
"""
484
485
def set_upload_record(self, file_name: str, key: str, data: dict):
486
"""
487
Save upload progress record.
488
489
Args:
490
file_name: Local file name
491
key: Upload key
492
data: Progress data to save
493
"""
494
495
def delete_upload_record(self, file_name: str, key: str):
496
"""
497
Delete upload progress record.
498
499
Args:
500
file_name: Local file name
501
key: Upload key
502
"""
503
```
504
505
## Usage Examples
506
507
### Basic File Upload
508
509
```python
510
from qiniu import Auth, put_file_v2
511
512
# Initialize authentication
513
auth = Auth(access_key, secret_key)
514
token = auth.upload_token('my-bucket', 'my-file.jpg')
515
516
# Upload file
517
ret, info = put_file_v2(token, 'my-file.jpg', './local/path/image.jpg')
518
519
if info.ok():
520
print(f"Upload successful: {ret['key']} -> {ret['hash']}")
521
else:
522
print(f"Upload failed: {info.error}")
523
```
524
525
### Resumable Upload with Progress
526
527
```python
528
from qiniu import Auth, put_file_v2, UploadProgressRecorder
529
530
def progress_handler(uploaded_bytes, total_bytes):
531
progress = uploaded_bytes / total_bytes * 100
532
print(f"Upload progress: {progress:.2f}%")
533
534
auth = Auth(access_key, secret_key)
535
token = auth.upload_token('my-bucket')
536
537
# Enable resumable upload with progress tracking
538
recorder = UploadProgressRecorder()
539
ret, info = put_file_v2(
540
token,
541
'large-file.zip',
542
'./large-file.zip',
543
progress_handler=progress_handler,
544
upload_progress_recorder=recorder,
545
part_size=8 * 1024 * 1024 # 8MB chunks
546
)
547
```
548
549
### Upload Binary Data
550
551
```python
552
from qiniu import Auth, put_data
553
554
auth = Auth(access_key, secret_key)
555
token = auth.upload_token('my-bucket', 'data.json')
556
557
# Upload JSON data
558
import json
559
data = json.dumps({"message": "Hello Qiniu"}).encode('utf-8')
560
ret, info = put_data(token, 'data.json', data, mime_type='application/json')
561
```
562
563
### File Management Operations
564
565
```python
566
from qiniu import Auth, BucketManager
567
568
auth = Auth(access_key, secret_key)
569
bucket_manager = BucketManager(auth)
570
571
# Get file info
572
ret, info = bucket_manager.stat('my-bucket', 'my-file.jpg')
573
if info.ok():
574
print(f"File size: {ret['fsize']} bytes")
575
print(f"MIME type: {ret['mimeType']}")
576
577
# List files with prefix
578
files, has_more, marker = bucket_manager.list('my-bucket', prefix='images/', limit=100)
579
for file in files['items']:
580
print(f"File: {file['key']}, Size: {file['fsize']}")
581
582
# Copy file
583
ret, info = bucket_manager.copy('my-bucket', 'original.jpg', 'my-bucket', 'backup.jpg')
584
585
# Delete file
586
ret, info = bucket_manager.delete('my-bucket', 'old-file.jpg')
587
```
588
589
### Batch Operations
590
591
```python
592
from qiniu import Auth, BucketManager, build_batch_delete, build_batch_copy
593
594
auth = Auth(access_key, secret_key)
595
bucket_manager = BucketManager(auth)
596
597
# Batch delete multiple files
598
keys_to_delete = ['file1.jpg', 'file2.png', 'file3.pdf']
599
delete_ops = build_batch_delete('my-bucket', keys_to_delete)
600
results, info = bucket_manager.batch(delete_ops)
601
602
# Batch copy files
603
key_pairs = [('src1.jpg', 'dst1.jpg'), ('src2.png', 'dst2.png')]
604
copy_ops = build_batch_copy('source-bucket', key_pairs, 'target-bucket')
605
results, info = bucket_manager.batch(copy_ops)
606
607
# Check results
608
for i, result in enumerate(results):
609
if result['code'] == 200:
610
print(f"Operation {i} succeeded")
611
else:
612
print(f"Operation {i} failed: {result}")
613
```
614
615
### Fetch External Files
616
617
```python
618
from qiniu import Auth, BucketManager
619
620
auth = Auth(access_key, secret_key)
621
bucket_manager = BucketManager(auth)
622
623
# Fetch file from external URL
624
external_url = 'https://example.com/image.jpg'
625
ret, info = bucket_manager.fetch(external_url, 'my-bucket', 'fetched-image.jpg')
626
627
if info.ok():
628
print(f"Fetched file: {ret['key']}")
629
print(f"File size: {ret['fsize']} bytes")
630
```