Firebase Admin Python SDK enables server-side Python developers to integrate Firebase services into their applications from privileged environments.
—
Firebase ML model management for deploying and managing custom machine learning models in Firebase projects. Supports model lifecycle management including creation, publishing, and version control.
Create, update, retrieve, and manage ML models in Firebase projects with comprehensive model lifecycle support.
def create_model(model, app=None):
"""
Create a new ML model in Firebase.
Args:
model: Model instance with model configuration
app: Firebase app instance (optional)
Returns:
Model: Created model instance with Firebase-assigned ID
"""
def update_model(model, app=None):
"""
Update an existing ML model.
Args:
model: Model instance with updated configuration
app: Firebase app instance (optional)
Returns:
Model: Updated model instance
"""
def get_model(model_id, app=None):
"""
Get an ML model by ID.
Args:
model_id: Model ID string
app: Firebase app instance (optional)
Returns:
Model: Model instance for the given ID
Raises:
NotFoundError: If the model doesn't exist
"""
def delete_model(model_id, app=None):
"""
Delete an ML model from Firebase.
Args:
model_id: Model ID string to delete
app: Firebase app instance (optional)
Raises:
NotFoundError: If the model doesn't exist
"""List and paginate through ML models in the Firebase project with filtering options.
def list_models(list_filter=None, page_size=None, page_token=None, app=None):
"""
List ML models in the project.
Args:
list_filter: Filter string for model selection (optional)
page_size: Maximum number of models to return (optional)
page_token: Token for pagination (optional)
app: Firebase app instance (optional)
Returns:
ListModelsPage: Page of model results with pagination info
"""Publish and unpublish models to control their availability for client applications.
def publish_model(model_id, app=None):
"""
Publish an ML model to make it available to client apps.
Args:
model_id: Model ID string to publish
app: Firebase app instance (optional)
Returns:
Model: Published model instance
Raises:
NotFoundError: If the model doesn't exist
"""
def unpublish_model(model_id, app=None):
"""
Unpublish an ML model to make it unavailable to client apps.
Args:
model_id: Model ID string to unpublish
app: Firebase app instance (optional)
Returns:
Model: Unpublished model instance
Raises:
NotFoundError: If the model doesn't exist
"""class Model:
"""Represents a Firebase ML model."""
def __init__(self, display_name=None, tags=None, model_format=None):
"""
Initialize a model.
Args:
display_name: Human-readable model name (optional)
tags: List of tags for model organization (optional)
model_format: ModelFormat instance specifying model type (optional)
"""
@property
def model_id(self):
"""The Firebase-assigned model ID."""
@property
def display_name(self):
"""The human-readable model name."""
@property
def tags(self):
"""List of tags associated with the model."""
@property
def model_format(self):
"""The model format specification."""
@property
def state(self):
"""The current state of the model."""
@property
def create_time(self):
"""When the model was created."""
@property
def update_time(self):
"""When the model was last updated."""
@property
def validation_error(self):
"""Validation error message if model is invalid."""
@property
def etag(self):
"""Entity tag for optimistic locking."""
class ListModelsPage:
"""Page of model results with pagination support."""
@property
def models(self):
"""List of Model instances in this page."""
@property
def next_page_token(self):
"""Token for the next page (None if no more pages)."""
@property
def has_next_page(self):
"""Whether there are more pages available."""
def get_next_page(self):
"""Get the next page of results."""class TFLiteFormat:
"""TensorFlow Lite model format."""
def __init__(self, model_source):
"""
Initialize TFLite format specification.
Args:
model_source: TFLiteSource instance specifying model location
"""
class TFLiteGCSModelSource:
"""TensorFlow Lite model stored in Google Cloud Storage."""
def __init__(self, gcs_tflite_uri):
"""
Initialize GCS model source.
Args:
gcs_tflite_uri: GCS URI to the .tflite model file
"""
class TFLiteAutoMLSource:
"""TensorFlow Lite model from AutoML."""
def __init__(self, automl_model):
"""
Initialize AutoML model source.
Args:
automl_model: AutoML model resource name
"""from firebase_admin import ml
# Create a TensorFlow Lite model from Cloud Storage
model_source = ml.TFLiteGCSModelSource('gs://my-bucket/model.tflite')
model_format = ml.TFLiteFormat(model_source=model_source)
# Create model with configuration
model = ml.Model(
display_name='Image Classification Model',
tags=['image', 'classification', 'v1.0'],
model_format=model_format
)
# Create the model in Firebase
created_model = ml.create_model(model)
print(f'Created model: {created_model.model_id}')
# Publish the model to make it available to client apps
published_model = ml.publish_model(created_model.model_id)
print(f'Published model: {published_model.model_id}')# Get a specific model
model = ml.get_model('model_123')
print(f'Model: {model.display_name}')
print(f'State: {model.state}')
print(f'Tags: {model.tags}')
# Update model metadata
model.display_name = 'Updated Classification Model'
model.tags = ['image', 'classification', 'v1.1', 'production']
updated_model = ml.update_model(model)
print(f'Updated model: {updated_model.display_name}')
# Unpublish model (make unavailable to clients)
unpublished_model = ml.unpublish_model(model.model_id)
print(f'Unpublished model state: {unpublished_model.state}')# List all models
models_page = ml.list_models()
for model in models_page.models:
print(f'Model: {model.display_name} (ID: {model.model_id})')
# List with pagination
models_page = ml.list_models(page_size=10)
while models_page:
for model in models_page.models:
print(f'Model: {model.display_name}')
if models_page.has_next_page:
models_page = models_page.get_next_page()
else:
break
# Filter models by tags or other criteria
filtered_models = ml.list_models(list_filter='tags:production')
for model in filtered_models.models:
print(f'Production model: {model.display_name}')# Create model from AutoML
automl_source = ml.TFLiteAutoMLSource(
automl_model='projects/my-project/locations/us-central1/models/my-automl-model'
)
automl_format = ml.TFLiteFormat(model_source=automl_source)
automl_model = ml.Model(
display_name='AutoML Classification Model',
tags=['automl', 'classification'],
model_format=automl_format
)
created_automl_model = ml.create_model(automl_model)
print(f'Created AutoML model: {created_automl_model.model_id}')from firebase_admin.exceptions import NotFoundError, InvalidArgumentError
try:
# Attempt to get a model
model = ml.get_model('nonexistent_model')
except NotFoundError:
print('Model not found')
try:
# Create model with invalid configuration
invalid_model = ml.Model(
display_name='', # Empty display name might be invalid
model_format=None # Missing model format
)
ml.create_model(invalid_model)
except InvalidArgumentError as e:
print(f'Invalid model configuration: {e}')
# Check for validation errors after creation
model = ml.get_model('model_123')
if model.validation_error:
print(f'Model validation error: {model.validation_error}')# Check model state before operations
model = ml.get_model('model_123')
if model.state == 'PUBLISHED':
print('Model is published and available to clients')
elif model.state == 'UNPUBLISHED':
print('Model exists but is not available to clients')
elif model.state == 'INVALID':
print(f'Model is invalid: {model.validation_error}')
# Conditional publishing based on state
if model.state == 'UNPUBLISHED':
published_model = ml.publish_model(model.model_id)
print(f'Model published: {published_model.state}')# Batch create multiple models
models_to_create = [
{
'display_name': 'Model A',
'gcs_uri': 'gs://my-bucket/model_a.tflite',
'tags': ['experimental', 'version_a']
},
{
'display_name': 'Model B',
'gcs_uri': 'gs://my-bucket/model_b.tflite',
'tags': ['experimental', 'version_b']
}
]
created_model_ids = []
for model_config in models_to_create:
source = ml.TFLiteGCSModelSource(model_config['gcs_uri'])
format_spec = ml.TFLiteFormat(model_source=source)
model = ml.Model(
display_name=model_config['display_name'],
tags=model_config['tags'],
model_format=format_spec
)
created_model = ml.create_model(model)
created_model_ids.append(created_model.model_id)
print(f'Created {len(created_model_ids)} models')
# Batch publish models
for model_id in created_model_ids:
ml.publish_model(model_id)
print(f'Published model: {model_id}')Models created and published through the Admin SDK become available to client applications:
// iOS client code to download and use the model
let conditions = ModelDownloadConditions(
allowsCellularAccess: false,
allowsBackgroundDownloading: true
)
let downloadProgress = ModelManager.modelManager().download(
remoteModel,
conditions: conditions
)// Android client code to download and use the model
FirebaseModelDownloader.getInstance()
.getModel("model_123", DownloadType.LOCAL_MODEL, conditions)
.addOnSuccessListener(model -> {
// Use the downloaded model
File modelFile = model.getFile();
});class Model:
"""Represents a Firebase ML model."""
@property
def model_id(self):
"""The Firebase-assigned model ID."""
@property
def display_name(self):
"""The human-readable model name."""
@property
def tags(self):
"""List of tags associated with the model."""
@property
def model_format(self):
"""The model format specification."""
@property
def state(self):
"""The current state of the model (PUBLISHED, UNPUBLISHED, INVALID)."""
@property
def create_time(self):
"""When the model was created."""
@property
def update_time(self):
"""When the model was last updated."""
class ListModelsPage:
"""Page of model results with pagination support."""
@property
def models(self):
"""List of Model instances in this page."""
@property
def next_page_token(self):
"""Token for the next page."""
def get_next_page(self):
"""Get the next page of results."""
class TFLiteFormat:
"""TensorFlow Lite model format specification."""
class TFLiteGCSModelSource:
"""TensorFlow Lite model stored in Google Cloud Storage."""
class TFLiteAutoMLSource:
"""TensorFlow Lite model from AutoML."""Install with Tessl CLI
npx tessl i tessl/pypi-firebase-admin