How to save Keras models directly to AWS S3

How to make sharing model files among data science teams simpler

Photo by Chris Ried on Unsplash
import numpy as np
from tensorflow import keras
inputs = keras.Input(shape=(32,))
outputs = keras.layers.Dense(1)(inputs)
model = keras.Model(inputs, outputs)
model.compile(optimizer="adam", loss="mean_squared_error")
model.save("my_model")
Easy folder structure generated using https://tree.nathanfriend.io/
model = keras.models.load_model("my_model")
inputs = keras.Input(shape=(32,))
outputs = keras.layers.Dense(1)(inputs)
model = keras.Model(inputs, outputs)
model.compile(optimizer="adam", loss="mean_squared_error")
with tempfile.TemporaryDirectory() as tempdir:
model.save(f"{tempdir}/{model_name}")
zipf = zipfile.ZipFile(f"{tempdir}/{model_name}.zip", "w", zipfile.ZIP_STORED)
zipdir(f"{tempdir}/{model_name}", zipf)
zipf.close()
def zipdir(path, ziph):
# Zipfile hook to zip up model folders
length = len(path)
for root, dirs, files in os.walk(path):
folder = root[length:] # Stop zipping parent folders
for file in files:
ziph.write(os.path.join(root, file), os.path.join(folder, file))
s3fs = s3fs.S3FileSystem(key=AWS_ACCESS_KEY, secret=AWS_SECRET_KEY)
s3fs.put(f"{tempdir}/{model_name}.zip", f"{BUCKET_NAME}/{model_name}.zip")
def s3_get_keras_model(model_name: str) -> keras.Model:
with tempfile.TemporaryDirectory() as tempdir:
s3fs = get_s3fs()
# Fetch and save the zip file to the temporary directory
s3fs.get(f"{BUCKET_NAME}/{model_name}.zip", f"{tempdir}/{model_name}.zip")
# Extract the model zip file within the temporary directory
with zipfile.ZipFile(f"{tempdir}/{model_name}.zip") as zip_ref:
zip_ref.extractall(f"{tempdir}/{model_name}")
# Load the keras model from the temporary directory
return keras.models.load_model(f"{tempdir}/{model_name}")

FOSS Mercenary. Guitarist. Eternal optimist.