1010from muvr .converters import neon2iosmlp
1111from muvr .training .default_models import generate_default_activity_model
1212from muvr .training .default_models import generate_default_exercise_model
13+ from muvr .dataset .labelmappers import generate_activity_labelmapper
14+ from muvr .dataset .labelmappers import generate_exercise_labelmapper
15+ from muvr .visualization .datastats import dataset_statistics
1316from pylab import *
1417
1518
@@ -51,11 +54,14 @@ def label_of_example(index):
5154 savefig (output_image )
5255
5356
54- def learn_model_from_data (dataset , working_directory , model_name , epoch ):
57+ def learn_model_from_data (dataset , working_directory , model_name , epoch , layer_filename ):
5558 """Use MLP to train the dataset and generate result in working_directory"""
5659 model_trainer = MLPMeasurementModelTrainer (working_directory , max_epochs = epoch )
5760
58- if model_name == "slacking" :
61+ if layer_filename :
62+ layers = neon2iosmlp .parsing_layer (read_file (layer_filename ))
63+ model = Model (layers = layers )
64+ elif model_name == "slacking" :
5965 print "Using slacking model"
6066 model = generate_default_activity_model (dataset .num_labels )
6167 else :
@@ -111,8 +117,24 @@ def show_evaluation(model, dataset):
111117 table [i + 1 ][0 ] = s
112118 table [0 ][0 ] = "actual \ predicted"
113119
120+ # Add 3 more last column: Total | Accuracy (%) | ExerciseId
121+ table [0 ].extend (["Total" , "Accuracy (%)" , "Exercise" ])
122+ exerId = 1
123+ while exerId < len (table ):
124+ row = table [exerId ]
125+ total = sum (row [1 :len (row )])
126+ print row [exerId ], " - " , total
127+ accuracy = "%.2f" % (float (row [exerId ]) / float (total ) * 100.0 )
128+ exerName = table [0 ][exerId ]
129+ table [exerId ].extend ([total , accuracy + "%" , exerName ])
130+ exerId += 1
114131 return table
115132
133+ def read_file (filename ):
134+ f = open (filename , 'r' )
135+ result = f .readline ().strip ()
136+ f .close ()
137+ return result
116138
117139def write_to_csv (filename , data ):
118140 """Write csv data to filename"""
@@ -122,25 +144,32 @@ def write_to_csv(filename, data):
122144 csvfile .close ()
123145
124146
125- def main (dataset_directory , working_directory , evaluation_file , visualise_image , model_name , test_directory , is_analysis , epoch ):
147+ def main (dataset_directory , working_directory , evaluation_file , visualise_image , model_name , test_directory , is_analysis , epoch , layer_filename ):
126148 """Main entry point."""
127149
150+ if model_name == "slacking" :
151+ mapping_label = generate_activity_labelmapper ()
152+ else :
153+ mapping_label = generate_exercise_labelmapper ()
154+
128155 # 1/ Load the dataset
129- dataset = CSVAccelerationDataset (dataset_directory , test_directory )
156+ dataset = CSVAccelerationDataset (dataset_directory , test_directory , label_mapper = mapping_label )
130157 print "Number of training examples:" , dataset .num_train_examples
131158 print "Number of test examples:" , dataset .num_test_examples
132159 print "Number of features:" , dataset .num_features
133160 print "Number of labels:" , dataset .num_labels
134161
162+ # 2a/ Write statistic of the dataset (in terms of window samples)
163+ stats = dataset_statistics (dataset )
164+ write_to_csv (os .path .join (working_directory , "dataset_stats.csv" ), stats )
165+
166+ # 2b/ Print statistic in term of csv files
135167 dataset .train_examples .print_statistic ("train" , dataset .label_id_mapping )
136168 dataset .test_examples .print_statistic ("test" , dataset .label_id_mapping )
137169
138- # 2/ Visualise the dataset
139- visualise_dataset (dataset , visualise_image )
140-
141170 if not is_analysis :
142171 # 3/ Train the dataset using MLP
143- mlpmodel , trained_model = learn_model_from_data (dataset , working_directory , model_name , epoch )
172+ mlpmodel , trained_model = learn_model_from_data (dataset , working_directory , model_name , epoch , layer_filename )
144173
145174 # 4/ Evaluate the trained model
146175 table = show_evaluation (trained_model , dataset )
@@ -158,11 +187,12 @@ def main(dataset_directory, working_directory, evaluation_file, visualise_image,
158187 parser .add_argument ('-v' , metavar = 'visualise' , default = './output/visualisation.png' , type = str , help = "visualisation dataset image output" )
159188 parser .add_argument ('-m' , metavar = 'modelname' , default = 'demo' , type = str , help = "prefix name of model" )
160189 parser .add_argument ('-loop' , metavar = 'epoch' , default = 30 , type = int , help = "number of training epoch" )
190+ parser .add_argument ('-shape' , metavar = 'shape' , type = str , help = "filename containing the shape of model" )
161191 parser .add_argument ('-analysis' , action = 'store_true' , default = False )
162192 args = parser .parse_args ()
163193
164194 #
165195 # A good example of command-line params is
166196 # -m core -d ../../muvr-training-data/labelled/core -o ../output/ -v ../output/v.png -e ../output/e.csv
167197 #
168- sys .exit (main (args .d , args .o , args .e , args .v , args .m , args .t , args .analysis , args .loop ))
198+ sys .exit (main (args .d , args .o , args .e , args .v , args .m , args .t , args .analysis , args .loop , args . shape ))
0 commit comments