Commit 298d55aa authored by Matthieu Dorier's avatar Matthieu Dorier

PFS-based backend working

parents
Please see [http://www.mcs.anl.gov/research/projects/mochi/contributing/] for details.
\ No newline at end of file
Copyright (c) 2018, UChicago Argonne, LLC
All Rights Reserved
SDS TOOLS (ANL-SF-16-009)
OPEN SOURCE LICENSE
Under the terms of Contract No. DE-AC02-06CH11357 with UChicago Argonne,
LLC, the U.S. Government retains certain rights in this software.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the names of UChicago Argonne, LLC or the Department of
Energy nor the names of its contributors may be used to endorse or
promote products derived from this software without specific prior
written permission.
******************************************************************************
DISCLAIMER
THE SOFTWARE IS SUPPLIED "AS IS" WITHOUT WARRANTY OF ANY KIND.
NEITHER THE UNTED STATES GOVERNMENT, NOR THE UNITED STATES DEPARTMENT
OF ENERGY, NOR UCHICAGO ARGONNE, LLC, NOR ANY OF THEIR EMPLOYEES,
MAKES ANY WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LEGAL LIABILITY
OR RESPONSIBILITY FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF ANY
INFORMATION, DATA, APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS
THAT ITS USE WOULD NOT INFRINGE PRIVATELY OWNED RIGHTS.
******************************************************************************
from workspace import Workspace
import os
import sys
import numpy as np
import json
class FileSystemBackend():
def __init__(self, path='.'):
self._path = path
def store_layer_data(self, name, i, layer):
"""
Stores a Keras layer for a given model.
Args:
name (str): Name of the model.
i (int): Index of the layer in the model.
layer (keras Layer): Layer to store.
Returns:
A dictionary of metadata describing the
stored weight arrays.
"""
weights = layer.get_weights()
metadata = []
basename = self._path+"/"+name+"/layer"+str(i)
os.makedirs(basename)
for j, w in enumerate(weights):
filename = basename+"/array"+str(j)
location = { 'type' : 'file',
'path' : filename }
md = { 'location' : location,
'dtype' : str(w.dtype),
'shape' : w.shape }
metadata.append(md)
with open(filename,"w+") as f:
w.tofile(f)
return metadata
def store_model_metadata(self, name, metadata):
"""
Stores the metadata associated with a given model.
Args:
name (str): Name of the model.
metadata (dict): Dictionary of metadata to store.
Returns:
True if the model was stored, False otherwise.
"""
basename = self._path+"/"+name
try:
os.makedirs(basename)
except OSError:
pass
filename = basename+"/config.json"
with open(filename,"w+") as f:
f.write(json.dumps(metadata, indent=2, sort_keys=True))
return True
def load_layer_data(self, name, i, layer, config):
"""
Loads the data of a given layer for a given model.
Args:
name (str): Name of the model.
i (int): Index of the layer to load.
layer (keras Layer): Reference to the layer to load.
config: List of weight metadata.
Returns:
Nothing. This function sets the weights in the provided
layer object.
"""
weights = []
for md in config:
location = md['location']
filename = location['path']
dtype = np.dtype(getattr(np, md['dtype']))
w = np.fromfile(filename, dtype=dtype)
w = w.reshape(md['shape'])
weights.append(w)
layer.set_weights(weights)
def load_model_metadata(self, name):
"""
Loads the metadata associated with a given model.
Args:
name (str): Name of the model.
Returns:
A metadata dictionary corresponding to the model,
or None if the model does not exists.
"""
basename = self._path+"/"+name
filename = basename+"/config.json"
with open(filename) as f:
return json.loads(f.read())
from keras.models import model_from_json
from pfs import FileSystemBackend
import json
class Workspace():
def __init__(self, path='.'):
self._backend = FileSystemBackend(path)
def store(self, name, model):
self.__store_model(name, model)
def load(self, name):
return self.__load_model(name)
def __store_model(self, name, model):
metadata = json.loads(model.to_json())
metadata['config']
for i, l in enumerate(model.layers):
layer_md = self._backend.store_layer_data(name, i, l)
metadata['config'][i]['data'] = layer_md
self._backend.store_model_metadata(name, metadata)
def __load_model(self, name):
metadata = self._backend.load_model_metadata(name)
model = model_from_json(json.dumps(metadata))
for i, l in enumerate(model.layers):
self._backend.load_layer_data(name, i, l, metadata['config'][i]['data'])
return model
from distutils.core import setup
setup(name='flamestore',
version='0.1',
author='Matthieu Dorier',
description="""Storage system for Keras models in the CANDLE workflow""",
packages=['flamestore']
)
# Import libraries and modules
import json
import numpy as np
np.random.seed(123) # for reproducibility
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from keras.datasets import mnist
from flamestore import Workspace
# Initialize workspace
ws = Workspace(".")
# Load pre-shuffled MNIST data into train and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()
# Preprocess input data
X_train = X_train.reshape(X_train.shape[0], 1, 28, 28)
X_test = X_test.reshape(X_test.shape[0], 1, 28, 28)
X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
# Preprocess class labels
Y_train = np_utils.to_categorical(y_train, 10)
Y_test = np_utils.to_categorical(y_test, 10)
# Define model architecture
model = Sequential()
model.add(Convolution2D(32, 3, 3, activation='relu', input_shape=(1,28,28)))
model.add(Convolution2D(32, 3, 3, activation='relu'))
model.add(MaxPooling2D(pool_size=(2,2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(10, activation='softmax'))
# Compile model
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
"""
# Fit model on training data
model.fit(X_train, Y_train,
batch_size=32, nb_epoch=10, verbose=1)
"""
# Evaluate model on test data
score = model.evaluate(X_test, Y_test, verbose=0)
print "Score of trained model: "+str(score)
# Store the model
ws.store("mymodel", model)
# Reload the model
reloaded_model = ws.load("mymodel")
# Recompile the loaded model
reloaded_model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
# Evaluate the reloaded model
score = reloaded_model.evaluate(X_test, Y_test, verbose=0)
print "Score of reloaded model: "+str(score)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment