0
# Storage
1
2
Storage abstractions and context management for persisting indices, documents, and vector stores with support for various backends. The storage system provides comprehensive persistence capabilities for maintaining index state, document collections, and retrieval contexts across application sessions.
3
4
## Capabilities
5
6
### Storage Context Management
7
8
Central storage context for coordinating persistence across different storage backends and maintaining consistency.
9
10
```python { .api }
11
class StorageContext:
12
"""
13
Storage context for managing persistence of indices, documents, and vector stores.
14
15
Parameters:
16
- docstore: Optional[BaseDocumentStore], document storage backend
17
- index_store: Optional[BaseIndexStore], index metadata storage
18
- vector_store: Optional[VectorStore], vector embedding storage
19
- property_graph_store: Optional[PropertyGraphStore], property graph storage
20
- graph_store: Optional[GraphStore], knowledge graph storage (deprecated)
21
- image_store: Optional[BaseImageStore], image storage backend
22
- persist_dir: Optional[str], directory for persistence
23
"""
24
def __init__(
25
self,
26
docstore: Optional[BaseDocumentStore] = None,
27
index_store: Optional[BaseIndexStore] = None,
28
vector_store: Optional[VectorStore] = None,
29
property_graph_store: Optional[PropertyGraphStore] = None,
30
graph_store: Optional[GraphStore] = None,
31
image_store: Optional[BaseImageStore] = None,
32
persist_dir: Optional[str] = None,
33
**kwargs
34
): ...
35
36
@classmethod
37
def from_defaults(
38
cls,
39
docstore: Optional[BaseDocumentStore] = None,
40
index_store: Optional[BaseIndexStore] = None,
41
vector_store: Optional[VectorStore] = None,
42
property_graph_store: Optional[PropertyGraphStore] = None,
43
graph_store: Optional[GraphStore] = None,
44
image_store: Optional[BaseImageStore] = None,
45
persist_dir: Optional[str] = None,
46
**kwargs
47
) -> "StorageContext":
48
"""
49
Create StorageContext with default storage implementations.
50
51
Parameters:
52
- docstore: Optional[BaseDocumentStore], document storage backend
53
- index_store: Optional[BaseIndexStore], index storage backend
54
- vector_store: Optional[VectorStore], vector storage backend
55
- property_graph_store: Optional[PropertyGraphStore], property graph storage
56
- graph_store: Optional[GraphStore], knowledge graph storage
57
- image_store: Optional[BaseImageStore], image storage backend
58
- persist_dir: Optional[str], persistence directory path
59
60
Returns:
61
- StorageContext, configured storage context with default implementations
62
"""
63
64
def persist(self, persist_dir: Optional[str] = None) -> None:
65
"""
66
Persist all storage components to disk.
67
68
Parameters:
69
- persist_dir: Optional[str], directory path for persistence
70
"""
71
72
@classmethod
73
def from_persist_dir(
74
cls,
75
persist_dir: str,
76
docstore_fname: str = "docstore.json",
77
index_store_fname: str = "index_store.json",
78
vector_store_fname: str = "vector_store.json",
79
property_graph_store_fname: str = "property_graph_store.json",
80
graph_store_fname: str = "graph_store.json",
81
image_store_fname: str = "image_store.json",
82
**kwargs
83
) -> "StorageContext":
84
"""
85
Load StorageContext from persisted directory.
86
87
Parameters:
88
- persist_dir: str, directory containing persisted storage files
89
- docstore_fname: str, filename for document store data
90
- index_store_fname: str, filename for index store data
91
- vector_store_fname: str, filename for vector store data
92
- property_graph_store_fname: str, filename for property graph data
93
- graph_store_fname: str, filename for graph store data
94
- image_store_fname: str, filename for image store data
95
96
Returns:
97
- StorageContext, loaded storage context from disk
98
"""
99
100
def to_dict(self) -> dict:
101
"""Convert StorageContext to dictionary representation."""
102
103
@classmethod
104
def from_dict(cls, config_dict: dict) -> "StorageContext":
105
"""Create StorageContext from dictionary configuration."""
106
```
107
108
### Document Storage
109
110
Persistent storage for document collections with metadata and relationship management.
111
112
```python { .api }
113
class BaseDocumentStore:
114
"""
115
Base interface for document storage implementations.
116
117
Document stores persist document objects and their metadata, supporting
118
efficient retrieval by document ID and batch operations.
119
"""
120
def add_documents(
121
self,
122
docs: Sequence[BaseNode],
123
allow_update: bool = True,
124
batch_size: Optional[int] = None,
125
store_text: bool = True
126
) -> None:
127
"""
128
Add documents to the store.
129
130
Parameters:
131
- docs: Sequence[BaseNode], documents to store
132
- allow_update: bool, whether to allow updating existing documents
133
- batch_size: Optional[int], batch size for bulk operations
134
- store_text: bool, whether to store document text content
135
"""
136
137
def get_document(
138
self,
139
doc_id: str,
140
raise_error: bool = True
141
) -> Optional[BaseNode]:
142
"""
143
Retrieve document by ID.
144
145
Parameters:
146
- doc_id: str, document identifier
147
- raise_error: bool, whether to raise error if document not found
148
149
Returns:
150
- Optional[BaseNode], retrieved document or None if not found
151
"""
152
153
def get_documents(
154
self,
155
doc_ids: List[str],
156
raise_error: bool = True
157
) -> List[BaseNode]:
158
"""
159
Retrieve multiple documents by IDs.
160
161
Parameters:
162
- doc_ids: List[str], list of document identifiers
163
- raise_error: bool, whether to raise error for missing documents
164
165
Returns:
166
- List[BaseNode], list of retrieved documents
167
"""
168
169
def delete_document(self, doc_id: str, raise_error: bool = True) -> None:
170
"""
171
Delete document from store.
172
173
Parameters:
174
- doc_id: str, document identifier to delete
175
- raise_error: bool, whether to raise error if document not found
176
"""
177
178
def delete_documents(
179
self,
180
doc_ids: List[str],
181
raise_error: bool = True
182
) -> None:
183
"""
184
Delete multiple documents from store.
185
186
Parameters:
187
- doc_ids: List[str], list of document identifiers to delete
188
- raise_error: bool, whether to raise error for missing documents
189
"""
190
191
def set_document_hash(self, doc_id: str, doc_hash: str) -> None:
192
"""Set hash for document to track changes."""
193
194
def get_document_hash(self, doc_id: str) -> Optional[str]:
195
"""Get hash for document."""
196
197
def get_all_document_hashes(self) -> Dict[str, str]:
198
"""Get hashes for all documents."""
199
200
@property
201
def docs(self) -> Dict[str, BaseNode]:
202
"""Get all documents as dictionary."""
203
204
def persist(
205
self,
206
persist_path: str,
207
fs: Optional[AbstractFileSystem] = None
208
) -> None:
209
"""
210
Persist document store to disk.
211
212
Parameters:
213
- persist_path: str, path for persistence
214
- fs: Optional[AbstractFileSystem], filesystem interface
215
"""
216
217
class SimpleDocumentStore(BaseDocumentStore):
218
"""
219
Simple in-memory document store implementation.
220
221
Parameters:
222
- simple_file_store: Optional[SimpleFileStore], file store backend
223
"""
224
def __init__(self, simple_file_store: Optional[SimpleFileStore] = None): ...
225
226
@classmethod
227
def from_persist_dir(
228
cls,
229
persist_dir: str,
230
namespace: Optional[str] = None,
231
fs: Optional[AbstractFileSystem] = None
232
) -> "SimpleDocumentStore":
233
"""Load SimpleDocumentStore from persisted directory."""
234
235
@classmethod
236
def from_persist_path(
237
cls,
238
persist_path: str,
239
fs: Optional[AbstractFileSystem] = None
240
) -> "SimpleDocumentStore":
241
"""Load SimpleDocumentStore from persist path."""
242
```
243
244
### Index Storage
245
246
Metadata storage for index structures and configurations with support for multiple index types.
247
248
```python { .api }
249
class BaseIndexStore:
250
"""
251
Base interface for index metadata storage.
252
253
Index stores maintain metadata about index structures, their configuration,
254
and relationships without storing the actual index data.
255
"""
256
def add_index_struct(self, index_struct: IndexStruct) -> None:
257
"""
258
Add index structure metadata to store.
259
260
Parameters:
261
- index_struct: IndexStruct, index structure metadata to store
262
"""
263
264
def delete_index_struct(self, key: str) -> None:
265
"""
266
Delete index structure from store.
267
268
Parameters:
269
- key: str, index structure key to delete
270
"""
271
272
def get_index_struct(self, struct_id: Optional[str] = None) -> Optional[IndexStruct]:
273
"""
274
Retrieve index structure by ID.
275
276
Parameters:
277
- struct_id: Optional[str], index structure identifier
278
279
Returns:
280
- Optional[IndexStruct], retrieved index structure or None
281
"""
282
283
def get_index_structs(self) -> List[IndexStruct]:
284
"""
285
Get all index structures.
286
287
Returns:
288
- List[IndexStruct], list of all stored index structures
289
"""
290
291
@property
292
def index_structs(self) -> Dict[str, IndexStruct]:
293
"""Get all index structures as dictionary."""
294
295
class SimpleIndexStore(BaseIndexStore):
296
"""
297
Simple in-memory index store implementation.
298
299
Parameters:
300
- simple_file_store: Optional[SimpleFileStore], file store backend
301
"""
302
def __init__(self, simple_file_store: Optional[SimpleFileStore] = None): ...
303
304
def persist(
305
self,
306
persist_path: str,
307
fs: Optional[AbstractFileSystem] = None
308
) -> None:
309
"""Persist index store to disk."""
310
311
@classmethod
312
def from_persist_dir(
313
cls,
314
persist_dir: str,
315
namespace: Optional[str] = None,
316
fs: Optional[AbstractFileSystem] = None
317
) -> "SimpleIndexStore":
318
"""Load SimpleIndexStore from persisted directory."""
319
```
320
321
### Vector Storage
322
323
Vector store interfaces and implementations for embedding storage and similarity search.
324
325
```python { .api }
326
class VectorStore:
327
"""
328
Base interface for vector storage implementations.
329
330
Vector stores handle embedding storage, similarity search, and metadata filtering
331
for semantic retrieval operations.
332
"""
333
def add(self, nodes: List[BaseNode]) -> List[str]:
334
"""
335
Add nodes with embeddings to vector store.
336
337
Parameters:
338
- nodes: List[BaseNode], nodes with embeddings to add
339
340
Returns:
341
- List[str], list of node IDs added to store
342
"""
343
344
def delete(self, ref_doc_id: str, **delete_kwargs: Any) -> None:
345
"""
346
Delete vectors by reference document ID.
347
348
Parameters:
349
- ref_doc_id: str, reference document ID to delete
350
- **delete_kwargs: additional deletion parameters
351
"""
352
353
def query(self, query: VectorStoreQuery, **kwargs: Any) -> VectorStoreQueryResult:
354
"""
355
Query vector store for similar vectors.
356
357
Parameters:
358
- query: VectorStoreQuery, query specification with embedding and filters
359
- **kwargs: additional query parameters
360
361
Returns:
362
- VectorStoreQueryResult, query results with nodes and similarities
363
"""
364
365
def persist(
366
self,
367
persist_path: str,
368
fs: Optional[AbstractFileSystem] = None
369
) -> None:
370
"""
371
Persist vector store to disk.
372
373
Parameters:
374
- persist_path: str, path for persistence
375
- fs: Optional[AbstractFileSystem], filesystem interface
376
"""
377
378
class SimpleVectorStore(VectorStore):
379
"""
380
Simple in-memory vector store implementation.
381
382
Parameters:
383
- data: Optional[SimpleVectorStoreData], vector store data
384
- simple_file_store: Optional[SimpleFileStore], file store backend
385
"""
386
def __init__(
387
self,
388
data: Optional[SimpleVectorStoreData] = None,
389
simple_file_store: Optional[SimpleFileStore] = None,
390
**kwargs
391
): ...
392
393
@classmethod
394
def from_persist_dir(
395
cls,
396
persist_dir: str,
397
namespace: Optional[str] = None,
398
fs: Optional[AbstractFileSystem] = None
399
) -> "SimpleVectorStore":
400
"""Load SimpleVectorStore from persisted directory."""
401
402
def get(self, text_id: str) -> List[float]:
403
"""Get embedding vector by text ID."""
404
405
def get_nodes(self, node_ids: Optional[List[str]] = None) -> List[BaseNode]:
406
"""Get nodes by IDs or all nodes if none specified."""
407
```
408
409
### Property Graph Storage
410
411
Storage for property graph structures with nodes, relationships, and properties.
412
413
```python { .api }
414
class PropertyGraphStore:
415
"""
416
Base interface for property graph storage implementations.
417
418
Property graph stores handle graph structures with typed nodes and relationships,
419
supporting complex graph queries and traversal operations.
420
"""
421
def get(
422
self,
423
properties: Optional[dict] = None,
424
ids: Optional[List[str]] = None
425
) -> List[LabelledNode]:
426
"""
427
Retrieve nodes by properties or IDs.
428
429
Parameters:
430
- properties: Optional[dict], property filters for node retrieval
431
- ids: Optional[List[str]], specific node IDs to retrieve
432
433
Returns:
434
- List[LabelledNode], matching nodes from the graph
435
"""
436
437
def get_rel_map(
438
self,
439
subjs: Optional[List[str]] = None,
440
depth: int = 2,
441
limit: int = 30
442
) -> List[Triplet]:
443
"""
444
Get relationship map starting from subject nodes.
445
446
Parameters:
447
- subjs: Optional[List[str]], starting subject node IDs
448
- depth: int, maximum traversal depth
449
- limit: int, maximum number of relationships to return
450
451
Returns:
452
- List[Triplet], relationship triplets in the graph
453
"""
454
455
def upsert_nodes(self, nodes: List[LabelledNode]) -> None:
456
"""
457
Insert or update nodes in the graph.
458
459
Parameters:
460
- nodes: List[LabelledNode], nodes to insert or update
461
"""
462
463
def upsert_relations(self, relations: List[Relation]) -> None:
464
"""
465
Insert or update relationships in the graph.
466
467
Parameters:
468
- relations: List[Relation], relationships to insert or update
469
"""
470
471
def delete(
472
self,
473
entity_names: Optional[List[str]] = None,
474
relation_names: Optional[List[str]] = None,
475
properties: Optional[dict] = None,
476
ids: Optional[List[str]] = None
477
) -> None:
478
"""
479
Delete nodes and relationships from graph.
480
481
Parameters:
482
- entity_names: Optional[List[str]], entity names to delete
483
- relation_names: Optional[List[str]], relation types to delete
484
- properties: Optional[dict], property-based deletion filters
485
- ids: Optional[List[str]], specific IDs to delete
486
"""
487
488
class SimplePropertyGraphStore(PropertyGraphStore):
489
"""
490
Simple in-memory property graph store implementation.
491
492
Parameters:
493
- simple_file_store: Optional[SimpleFileStore], file store backend
494
"""
495
def __init__(self, simple_file_store: Optional[SimpleFileStore] = None): ...
496
497
def persist(
498
self,
499
persist_path: str,
500
fs: Optional[AbstractFileSystem] = None
501
) -> None:
502
"""Persist property graph store to disk."""
503
504
@classmethod
505
def from_persist_dir(
506
cls,
507
persist_dir: str,
508
namespace: Optional[str] = None,
509
fs: Optional[AbstractFileSystem] = None
510
) -> "SimplePropertyGraphStore":
511
"""Load SimplePropertyGraphStore from persisted directory."""
512
```
513
514
### Graph Storage (Legacy)
515
516
Legacy graph storage interface for knowledge graphs and relationship management.
517
518
```python { .api }
519
class GraphStore:
520
"""
521
Legacy graph storage interface for knowledge graphs.
522
523
Note: This interface is deprecated in favor of PropertyGraphStore
524
for new applications.
525
"""
526
def get(self, subj: str) -> List[List[str]]:
527
"""Get relationships for a subject."""
528
529
def get_rel_map(
530
self,
531
subjs: Optional[List[str]] = None,
532
depth: int = 2
533
) -> Dict[str, List[List[str]]]:
534
"""Get relationship map for subjects."""
535
536
def upsert_triplet(self, subj: str, rel: str, obj: str) -> None:
537
"""Insert or update a relationship triplet."""
538
539
def delete(self, subj: str, rel: str, obj: str) -> None:
540
"""Delete a relationship triplet."""
541
542
class SimpleGraphStore(GraphStore):
543
"""Simple in-memory graph store implementation (legacy)."""
544
def __init__(self, simple_file_store: Optional[SimpleFileStore] = None): ...
545
```
546
547
### Image Storage
548
549
Storage interface for image data and multi-modal content management.
550
551
```python { .api }
552
class BaseImageStore:
553
"""
554
Base interface for image storage implementations.
555
556
Image stores handle storage and retrieval of image data with metadata
557
and support for different image formats and storage backends.
558
"""
559
def add_image(self, image_id: str, image_data: bytes) -> None:
560
"""
561
Add image to store.
562
563
Parameters:
564
- image_id: str, unique identifier for the image
565
- image_data: bytes, raw image data
566
"""
567
568
def get_image(self, image_id: str) -> Optional[bytes]:
569
"""
570
Retrieve image by ID.
571
572
Parameters:
573
- image_id: str, image identifier
574
575
Returns:
576
- Optional[bytes], image data or None if not found
577
"""
578
579
def delete_image(self, image_id: str) -> None:
580
"""
581
Delete image from store.
582
583
Parameters:
584
- image_id: str, image identifier to delete
585
"""
586
587
class SimpleImageStore(BaseImageStore):
588
"""Simple in-memory image store implementation."""
589
def __init__(self, simple_file_store: Optional[SimpleFileStore] = None): ...
590
```
591
592
### Loading Functions
593
594
Utility functions for loading indices and data structures from persistent storage.
595
596
```python { .api }
597
def load_index_from_storage(
598
storage_context: StorageContext,
599
index_id: Optional[str] = None,
600
**kwargs
601
) -> BaseIndex:
602
"""
603
Load a single index from storage context.
604
605
Parameters:
606
- storage_context: StorageContext, storage context containing index data
607
- index_id: Optional[str], specific index ID to load
608
- **kwargs: additional parameters for index construction
609
610
Returns:
611
- BaseIndex, loaded index instance
612
"""
613
614
def load_indices_from_storage(
615
storage_context: StorageContext,
616
index_ids: Optional[List[str]] = None,
617
**kwargs
618
) -> List[BaseIndex]:
619
"""
620
Load multiple indices from storage context.
621
622
Parameters:
623
- storage_context: StorageContext, storage context containing index data
624
- index_ids: Optional[List[str]], specific index IDs to load
625
- **kwargs: additional parameters for index construction
626
627
Returns:
628
- List[BaseIndex], list of loaded index instances
629
"""
630
631
def load_graph_from_storage(
632
storage_context: StorageContext,
633
root_id: str,
634
**kwargs
635
) -> ComposableGraph:
636
"""
637
Load composable graph from storage context.
638
639
Parameters:
640
- storage_context: StorageContext, storage context containing graph data
641
- root_id: str, root node ID for the graph
642
- **kwargs: additional parameters for graph construction
643
644
Returns:
645
- ComposableGraph, loaded graph instance
646
"""
647
```
648
649
## Usage Examples
650
651
### Basic Storage Context Setup
652
653
```python
654
from llama_index.core.storage import StorageContext
655
from llama_index.core import VectorStoreIndex, Document
656
657
# Create documents
658
documents = [
659
Document(text="Machine learning is a subset of artificial intelligence."),
660
Document(text="Deep learning uses neural networks with multiple layers."),
661
Document(text="Natural language processing helps computers understand text.")
662
]
663
664
# Create storage context with default implementations
665
storage_context = StorageContext.from_defaults(persist_dir="./storage")
666
667
# Create index with storage context
668
index = VectorStoreIndex.from_documents(
669
documents,
670
storage_context=storage_context
671
)
672
673
# Persist to disk
674
storage_context.persist()
675
print("Index and storage persisted to ./storage")
676
```
677
678
### Loading from Persistent Storage
679
680
```python
681
# Load storage context from disk
682
loaded_storage_context = StorageContext.from_persist_dir("./storage")
683
684
# Load index from storage
685
loaded_index = load_index_from_storage(loaded_storage_context)
686
687
# Use loaded index
688
query_engine = loaded_index.as_query_engine()
689
response = query_engine.query("What is machine learning?")
690
print(response.response)
691
```
692
693
### Custom Storage Configuration
694
695
```python
696
from llama_index.core.storage.docstore import SimpleDocumentStore
697
from llama_index.core.storage.index_store import SimpleIndexStore
698
from llama_index.core.vector_stores import SimpleVectorStore
699
700
# Create custom storage components
701
docstore = SimpleDocumentStore()
702
index_store = SimpleIndexStore()
703
vector_store = SimpleVectorStore()
704
705
# Create custom storage context
706
custom_storage_context = StorageContext.from_defaults(
707
docstore=docstore,
708
index_store=index_store,
709
vector_store=vector_store,
710
persist_dir="./custom_storage"
711
)
712
713
# Create index with custom storage
714
custom_index = VectorStoreIndex.from_documents(
715
documents,
716
storage_context=custom_storage_context
717
)
718
719
# Persist custom configuration
720
custom_storage_context.persist()
721
```
722
723
### Document Store Operations
724
725
```python
726
from llama_index.core.storage.docstore import SimpleDocumentStore
727
from llama_index.core.schema import TextNode
728
729
# Create document store
730
docstore = SimpleDocumentStore()
731
732
# Create and add nodes
733
nodes = [
734
TextNode(text="First document content", node_id="doc_1"),
735
TextNode(text="Second document content", node_id="doc_2"),
736
TextNode(text="Third document content", node_id="doc_3")
737
]
738
739
docstore.add_documents(nodes)
740
741
# Retrieve documents
742
doc_1 = docstore.get_document("doc_1")
743
print(f"Retrieved: {doc_1.text}")
744
745
# Retrieve multiple documents
746
docs = docstore.get_documents(["doc_1", "doc_2"])
747
print(f"Retrieved {len(docs)} documents")
748
749
# Get all documents
750
all_docs = docstore.docs
751
print(f"Total documents in store: {len(all_docs)}")
752
753
# Delete document
754
docstore.delete_document("doc_3")
755
print(f"Remaining documents: {len(docstore.docs)}")
756
```
757
758
### Vector Store Operations
759
760
```python
761
from llama_index.core.vector_stores import SimpleVectorStore
762
from llama_index.core.vector_stores.types import VectorStoreQuery
763
764
# Create vector store
765
vector_store = SimpleVectorStore()
766
767
# Add nodes with embeddings (embeddings would be computed by embedding model)
768
import random
769
nodes_with_embeddings = []
770
for i, node in enumerate(nodes):
771
# Mock embeddings for demonstration
772
node.embedding = [random.random() for _ in range(384)]
773
nodes_with_embeddings.append(node)
774
775
# Add to vector store
776
vector_store.add(nodes_with_embeddings)
777
778
# Query vector store
779
query_embedding = [random.random() for _ in range(384)]
780
query = VectorStoreQuery(
781
query_embedding=query_embedding,
782
similarity_top_k=2
783
)
784
785
results = vector_store.query(query)
786
print(f"Found {len(results.nodes)} similar nodes")
787
for node in results.nodes:
788
print(f"Node ID: {node.node_id}, Text: {node.text[:50]}...")
789
```
790
791
### Property Graph Storage
792
793
```python
794
from llama_index.core.graph_stores import SimplePropertyGraphStore
795
from llama_index.core.graph_stores.types import LabelledNode, Relation
796
797
# Create property graph store
798
graph_store = SimplePropertyGraphStore()
799
800
# Create nodes with labels and properties
801
nodes = [
802
LabelledNode(
803
id="ml",
804
label="Topic",
805
properties={"name": "Machine Learning", "category": "AI"}
806
),
807
LabelledNode(
808
id="dl",
809
label="Topic",
810
properties={"name": "Deep Learning", "category": "AI"}
811
),
812
LabelledNode(
813
id="nn",
814
label="Concept",
815
properties={"name": "Neural Networks", "type": "algorithm"}
816
)
817
]
818
819
# Create relationships
820
relations = [
821
Relation(
822
source_id="dl",
823
target_id="ml",
824
label="IS_SUBSET_OF",
825
properties={"strength": 0.9}
826
),
827
Relation(
828
source_id="dl",
829
target_id="nn",
830
label="USES",
831
properties={"importance": "high"}
832
)
833
]
834
835
# Add to graph store
836
graph_store.upsert_nodes(nodes)
837
graph_store.upsert_relations(relations)
838
839
# Query graph
840
ml_related = graph_store.get(properties={"category": "AI"})
841
print(f"AI-related nodes: {len(ml_related)}")
842
843
# Get relationship map
844
rel_map = graph_store.get_rel_map(subjs=["dl"], depth=2)
845
print(f"Deep learning relationships: {len(rel_map)}")
846
```
847
848
### Multiple Index Management
849
850
```python
851
# Create multiple indices with shared storage
852
storage_context = StorageContext.from_defaults(persist_dir="./multi_index")
853
854
# Create different index types
855
vector_index = VectorStoreIndex.from_documents(
856
documents[:2],
857
storage_context=storage_context
858
)
859
860
from llama_index.core import SummaryIndex
861
summary_index = SummaryIndex.from_documents(
862
documents[2:],
863
storage_context=storage_context
864
)
865
866
# Persist all indices
867
storage_context.persist()
868
869
# Load all indices
870
loaded_storage = StorageContext.from_persist_dir("./multi_index")
871
loaded_indices = load_indices_from_storage(loaded_storage)
872
873
print(f"Loaded {len(loaded_indices)} indices")
874
for i, index in enumerate(loaded_indices):
875
print(f"Index {i}: {type(index).__name__}")
876
```
877
878
### Storage Context Serialization
879
880
```python
881
# Create and configure storage context
882
storage_context = StorageContext.from_defaults()
883
884
# Convert to dictionary
885
storage_dict = storage_context.to_dict()
886
print("Storage context configuration:")
887
for key, value in storage_dict.items():
888
print(f" {key}: {type(value).__name__}")
889
890
# Recreate from dictionary
891
restored_context = StorageContext.from_dict(storage_dict)
892
print("Storage context restored from configuration")
893
```
894
895
## Configuration & Types
896
897
```python { .api }
898
# Storage configuration constants
899
DEFAULT_PERSIST_DIR = "./storage"
900
DEFAULT_DOCSTORE_FNAME = "docstore.json"
901
DEFAULT_INDEX_STORE_FNAME = "index_store.json"
902
DEFAULT_VECTOR_STORE_FNAME = "vector_store.json"
903
DEFAULT_GRAPH_STORE_FNAME = "graph_store.json"
904
DEFAULT_PROPERTY_GRAPH_STORE_FNAME = "property_graph_store.json"
905
DEFAULT_IMAGE_STORE_FNAME = "image_store.json"
906
907
# File system types
908
from fsspec import AbstractFileSystem
909
910
# Storage backend types
911
StorageBackend = Union[str, AbstractFileSystem]
912
913
# Index structure types
914
class IndexStruct:
915
"""Base class for index structure metadata."""
916
pass
917
```