From 76a72d15eef511f6514c4b6d93300a0696da385f Mon Sep 17 00:00:00 2001 From: Your Name Date: Wed, 25 Oct 2023 13:17:47 -0500 Subject: [PATCH 1/3] update mnist project based on mlflow2.7.1 --- python/keras_tf_mnist/0.json | 1578 +++++++++++++++++ python/keras_tf_mnist/MLproject | 12 +- python/keras_tf_mnist/conda.yaml | 2 +- .../convert_png_to_mlflow_json.py | 4 +- python/keras_tf_mnist/utils.py | 2 +- 5 files changed, 1589 insertions(+), 9 deletions(-) create mode 100644 python/keras_tf_mnist/0.json diff --git a/python/keras_tf_mnist/0.json b/python/keras_tf_mnist/0.json new file mode 100644 index 0000000..7f454fa --- /dev/null +++ b/python/keras_tf_mnist/0.json @@ -0,0 +1,1578 @@ +{ + "dataframe_split": { + "columns": [ + "col_0", + "col_1", + "col_2", + "col_3", + "col_4", + "col_5", + "col_6", + "col_7", + "col_8", + "col_9", + "col_10", + "col_11", + "col_12", + "col_13", + "col_14", + "col_15", + "col_16", + "col_17", + "col_18", + "col_19", + "col_20", + "col_21", + "col_22", + "col_23", + "col_24", + "col_25", + "col_26", + "col_27", + "col_28", + "col_29", + "col_30", + "col_31", + "col_32", + "col_33", + "col_34", + "col_35", + "col_36", + "col_37", + "col_38", + "col_39", + "col_40", + "col_41", + "col_42", + "col_43", + "col_44", + "col_45", + "col_46", + "col_47", + "col_48", + "col_49", + "col_50", + "col_51", + "col_52", + "col_53", + "col_54", + "col_55", + "col_56", + "col_57", + "col_58", + "col_59", + "col_60", + "col_61", + "col_62", + "col_63", + "col_64", + "col_65", + "col_66", + "col_67", + "col_68", + "col_69", + "col_70", + "col_71", + "col_72", + "col_73", + "col_74", + "col_75", + "col_76", + "col_77", + "col_78", + "col_79", + "col_80", + "col_81", + "col_82", + "col_83", + "col_84", + "col_85", + "col_86", + "col_87", + "col_88", + "col_89", + "col_90", + "col_91", + "col_92", + "col_93", + "col_94", + "col_95", + "col_96", + "col_97", + "col_98", + "col_99", + "col_100", + "col_101", + "col_102", + "col_103", + "col_104", + "col_105", + "col_106", + "col_107", + "col_108", + "col_109", + "col_110", + "col_111", + "col_112", + "col_113", + "col_114", + "col_115", + "col_116", + "col_117", + "col_118", + "col_119", + "col_120", + "col_121", + "col_122", + "col_123", + "col_124", + "col_125", + "col_126", + "col_127", + "col_128", + "col_129", + "col_130", + "col_131", + "col_132", + "col_133", + "col_134", + "col_135", + "col_136", + "col_137", + "col_138", + "col_139", + "col_140", + "col_141", + "col_142", + "col_143", + "col_144", + "col_145", + "col_146", + "col_147", + "col_148", + "col_149", + "col_150", + "col_151", + "col_152", + "col_153", + "col_154", + "col_155", + "col_156", + "col_157", + "col_158", + "col_159", + "col_160", + "col_161", + "col_162", + "col_163", + "col_164", + "col_165", + "col_166", + "col_167", + "col_168", + "col_169", + "col_170", + "col_171", + "col_172", + "col_173", + "col_174", + "col_175", + "col_176", + "col_177", + "col_178", + "col_179", + "col_180", + "col_181", + "col_182", + "col_183", + "col_184", + "col_185", + "col_186", + "col_187", + "col_188", + "col_189", + "col_190", + "col_191", + "col_192", + "col_193", + "col_194", + "col_195", + "col_196", + "col_197", + "col_198", + "col_199", + "col_200", + "col_201", + "col_202", + "col_203", + "col_204", + "col_205", + "col_206", + "col_207", + "col_208", + "col_209", + "col_210", + "col_211", + "col_212", + "col_213", + "col_214", + "col_215", + "col_216", + "col_217", + "col_218", + "col_219", + "col_220", + "col_221", + "col_222", + "col_223", + "col_224", + "col_225", + "col_226", + "col_227", + "col_228", + "col_229", + "col_230", + "col_231", + "col_232", + "col_233", + "col_234", + "col_235", + "col_236", + "col_237", + "col_238", + "col_239", + "col_240", + "col_241", + "col_242", + "col_243", + "col_244", + "col_245", + "col_246", + "col_247", + "col_248", + "col_249", + "col_250", + "col_251", + "col_252", + "col_253", + "col_254", + "col_255", + "col_256", + "col_257", + "col_258", + "col_259", + "col_260", + "col_261", + "col_262", + "col_263", + "col_264", + "col_265", + "col_266", + "col_267", + "col_268", + "col_269", + "col_270", + "col_271", + "col_272", + "col_273", + "col_274", + "col_275", + "col_276", + "col_277", + "col_278", + "col_279", + "col_280", + "col_281", + "col_282", + "col_283", + "col_284", + "col_285", + "col_286", + "col_287", + "col_288", + "col_289", + "col_290", + "col_291", + "col_292", + "col_293", + "col_294", + "col_295", + "col_296", + "col_297", + "col_298", + "col_299", + "col_300", + "col_301", + "col_302", + "col_303", + "col_304", + "col_305", + "col_306", + "col_307", + "col_308", + "col_309", + "col_310", + "col_311", + "col_312", + "col_313", + "col_314", + "col_315", + "col_316", + "col_317", + "col_318", + "col_319", + "col_320", + "col_321", + "col_322", + "col_323", + "col_324", + "col_325", + "col_326", + "col_327", + "col_328", + "col_329", + "col_330", + "col_331", + "col_332", + "col_333", + "col_334", + "col_335", + "col_336", + "col_337", + "col_338", + "col_339", + "col_340", + "col_341", + "col_342", + "col_343", + "col_344", + "col_345", + "col_346", + "col_347", + "col_348", + "col_349", + "col_350", + "col_351", + "col_352", + "col_353", + "col_354", + "col_355", + "col_356", + "col_357", + "col_358", + "col_359", + "col_360", + "col_361", + "col_362", + "col_363", + "col_364", + "col_365", + "col_366", + "col_367", + "col_368", + "col_369", + "col_370", + "col_371", + "col_372", + "col_373", + "col_374", + "col_375", + "col_376", + "col_377", + "col_378", + "col_379", + "col_380", + "col_381", + "col_382", + "col_383", + "col_384", + "col_385", + "col_386", + "col_387", + "col_388", + "col_389", + "col_390", + "col_391", + "col_392", + "col_393", + "col_394", + "col_395", + "col_396", + "col_397", + "col_398", + "col_399", + "col_400", + "col_401", + "col_402", + "col_403", + "col_404", + "col_405", + "col_406", + "col_407", + "col_408", + "col_409", + "col_410", + "col_411", + "col_412", + "col_413", + "col_414", + "col_415", + "col_416", + "col_417", + "col_418", + "col_419", + "col_420", + "col_421", + "col_422", + "col_423", + "col_424", + "col_425", + "col_426", + "col_427", + "col_428", + "col_429", + "col_430", + "col_431", + "col_432", + "col_433", + "col_434", + "col_435", + "col_436", + "col_437", + "col_438", + "col_439", + "col_440", + "col_441", + "col_442", + "col_443", + "col_444", + "col_445", + "col_446", + "col_447", + "col_448", + "col_449", + "col_450", + "col_451", + "col_452", + "col_453", + "col_454", + "col_455", + "col_456", + "col_457", + "col_458", + "col_459", + "col_460", + "col_461", + "col_462", + "col_463", + "col_464", + "col_465", + "col_466", + "col_467", + "col_468", + "col_469", + "col_470", + "col_471", + "col_472", + "col_473", + "col_474", + "col_475", + "col_476", + "col_477", + "col_478", + "col_479", + "col_480", + "col_481", + "col_482", + "col_483", + "col_484", + "col_485", + "col_486", + "col_487", + "col_488", + "col_489", + "col_490", + "col_491", + "col_492", + "col_493", + "col_494", + "col_495", + "col_496", + "col_497", + "col_498", + "col_499", + "col_500", + "col_501", + "col_502", + "col_503", + "col_504", + "col_505", + "col_506", + "col_507", + "col_508", + "col_509", + "col_510", + "col_511", + "col_512", + "col_513", + "col_514", + "col_515", + "col_516", + "col_517", + "col_518", + "col_519", + "col_520", + "col_521", + "col_522", + "col_523", + "col_524", + "col_525", + "col_526", + "col_527", + "col_528", + "col_529", + "col_530", + "col_531", + "col_532", + "col_533", + "col_534", + "col_535", + "col_536", + "col_537", + "col_538", + "col_539", + "col_540", + "col_541", + "col_542", + "col_543", + "col_544", + "col_545", + "col_546", + "col_547", + "col_548", + "col_549", + "col_550", + "col_551", + "col_552", + "col_553", + "col_554", + "col_555", + "col_556", + "col_557", + "col_558", + "col_559", + "col_560", + "col_561", + "col_562", + "col_563", + "col_564", + "col_565", + "col_566", + "col_567", + "col_568", + "col_569", + "col_570", + "col_571", + "col_572", + "col_573", + "col_574", + "col_575", + "col_576", + "col_577", + "col_578", + "col_579", + "col_580", + "col_581", + "col_582", + "col_583", + "col_584", + "col_585", + "col_586", + "col_587", + "col_588", + "col_589", + "col_590", + "col_591", + "col_592", + "col_593", + "col_594", + "col_595", + "col_596", + "col_597", + "col_598", + "col_599", + "col_600", + "col_601", + "col_602", + "col_603", + "col_604", + "col_605", + "col_606", + "col_607", + "col_608", + "col_609", + "col_610", + "col_611", + "col_612", + "col_613", + "col_614", + "col_615", + "col_616", + "col_617", + "col_618", + "col_619", + "col_620", + "col_621", + "col_622", + "col_623", + "col_624", + "col_625", + "col_626", + "col_627", + "col_628", + "col_629", + "col_630", + "col_631", + "col_632", + "col_633", + "col_634", + "col_635", + "col_636", + "col_637", + "col_638", + "col_639", + "col_640", + "col_641", + "col_642", + "col_643", + "col_644", + "col_645", + "col_646", + "col_647", + "col_648", + "col_649", + "col_650", + "col_651", + "col_652", + "col_653", + "col_654", + "col_655", + "col_656", + "col_657", + "col_658", + "col_659", + "col_660", + "col_661", + "col_662", + "col_663", + "col_664", + "col_665", + "col_666", + "col_667", + "col_668", + "col_669", + "col_670", + "col_671", + "col_672", + "col_673", + "col_674", + "col_675", + "col_676", + "col_677", + "col_678", + "col_679", + "col_680", + "col_681", + "col_682", + "col_683", + "col_684", + "col_685", + "col_686", + "col_687", + "col_688", + "col_689", + "col_690", + "col_691", + "col_692", + "col_693", + "col_694", + "col_695", + "col_696", + "col_697", + "col_698", + "col_699", + "col_700", + "col_701", + "col_702", + "col_703", + "col_704", + "col_705", + "col_706", + "col_707", + "col_708", + "col_709", + "col_710", + "col_711", + "col_712", + "col_713", + "col_714", + "col_715", + "col_716", + "col_717", + "col_718", + "col_719", + "col_720", + "col_721", + "col_722", + "col_723", + "col_724", + "col_725", + "col_726", + "col_727", + "col_728", + "col_729", + "col_730", + "col_731", + "col_732", + "col_733", + "col_734", + "col_735", + "col_736", + "col_737", + "col_738", + "col_739", + "col_740", + "col_741", + "col_742", + "col_743", + "col_744", + "col_745", + "col_746", + "col_747", + "col_748", + "col_749", + "col_750", + "col_751", + "col_752", + "col_753", + "col_754", + "col_755", + "col_756", + "col_757", + "col_758", + "col_759", + "col_760", + "col_761", + "col_762", + "col_763", + "col_764", + "col_765", + "col_766", + "col_767", + "col_768", + "col_769", + "col_770", + "col_771", + "col_772", + "col_773", + "col_774", + "col_775", + "col_776", + "col_777", + "col_778", + "col_779", + "col_780", + "col_781", + "col_782", + "col_783" + ], + "data": [ + [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 110, + 109, + 109, + 47, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 21, + 176, + 217, + 253, + 252, + 252, + 232, + 218, + 93, + 21, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 6, + 37, + 182, + 201, + 252, + 252, + 253, + 252, + 252, + 252, + 253, + 252, + 92, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 1, + 42, + 160, + 252, + 253, + 252, + 252, + 252, + 253, + 252, + 252, + 252, + 253, + 252, + 215, + 1, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 63, + 170, + 252, + 252, + 252, + 253, + 252, + 252, + 252, + 253, + 252, + 252, + 252, + 253, + 252, + 252, + 108, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 73, + 237, + 252, + 252, + 252, + 252, + 253, + 252, + 252, + 252, + 253, + 231, + 179, + 221, + 253, + 252, + 252, + 232, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 53, + 232, + 252, + 252, + 252, + 252, + 252, + 253, + 252, + 252, + 252, + 237, + 71, + 0, + 125, + 253, + 252, + 252, + 252, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 73, + 252, + 252, + 252, + 252, + 252, + 252, + 253, + 252, + 252, + 252, + 62, + 0, + 0, + 0, + 253, + 252, + 252, + 168, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 42, + 222, + 253, + 253, + 253, + 253, + 253, + 255, + 222, + 125, + 0, + 0, + 0, + 0, + 63, + 255, + 253, + 237, + 62, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 21, + 201, + 252, + 252, + 252, + 252, + 252, + 253, + 55, + 0, + 0, + 0, + 0, + 0, + 144, + 253, + 252, + 215, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 73, + 252, + 252, + 252, + 252, + 252, + 252, + 253, + 190, + 72, + 0, + 0, + 0, + 6, + 160, + 253, + 252, + 195, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 155, + 252, + 252, + 252, + 252, + 252, + 252, + 253, + 252, + 236, + 62, + 0, + 0, + 120, + 252, + 253, + 210, + 31, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 171, + 253, + 253, + 253, + 253, + 159, + 41, + 0, + 0, + 0, + 0, + 0, + 110, + 150, + 253, + 253, + 255, + 119, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 253, + 252, + 252, + 252, + 252, + 35, + 0, + 0, + 0, + 0, + 11, + 73, + 253, + 252, + 252, + 252, + 222, + 25, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 253, + 252, + 252, + 252, + 252, + 190, + 181, + 181, + 99, + 181, + 191, + 252, + 253, + 252, + 252, + 231, + 41, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 253, + 252, + 252, + 252, + 252, + 252, + 252, + 252, + 253, + 252, + 252, + 252, + 253, + 252, + 231, + 46, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 192, + 253, + 253, + 253, + 253, + 253, + 253, + 253, + 255, + 253, + 253, + 253, + 255, + 119, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 15, + 180, + 252, + 252, + 252, + 252, + 252, + 252, + 253, + 252, + 200, + 179, + 35, + 5, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 21, + 71, + 71, + 71, + 195, + 195, + 71, + 72, + 71, + 20, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ] + ] + } +} diff --git a/python/keras_tf_mnist/MLproject b/python/keras_tf_mnist/MLproject index 690607b..945d647 100644 --- a/python/keras_tf_mnist/MLproject +++ b/python/keras_tf_mnist/MLproject @@ -14,9 +14,9 @@ entry_points: tensorflow_autolog: {type: boolean, default: False} command: "python train.py --epochs {epochs} - --batch_size {batch_size} - --model_name {model_name} - --log_as_onnx {log_as_onnx} - --mlflow_custom_log {mlflow_custom_log} - --keras_autolog {keras_autolog} - --tensorflow_autolog {tensorflow_autolog}" + --batch-size {batch_size} + --model-name {model_name} + --log-as-onnx {log_as_onnx} + --mlflow-custom-log {mlflow_custom_log} + --keras-autolog {keras_autolog} + --tensorflow-autolog {tensorflow_autolog}" diff --git a/python/keras_tf_mnist/conda.yaml b/python/keras_tf_mnist/conda.yaml index 936cbe7..a93c0c9 100644 --- a/python/keras_tf_mnist/conda.yaml +++ b/python/keras_tf_mnist/conda.yaml @@ -4,7 +4,7 @@ channels: dependencies: - python=3.8 - pip: - - mlflow>=1.30.0 + - mlflow #- tensorflow==2.5.0 # Fails: ONNX training on Mac and Debian #- tensorflow==2.4.1 # Fails: ONNX training on Mac and Debian - tensorflow==2.3.0 # OK: ONNX training on Mac and Debian diff --git a/python/keras_tf_mnist/convert_png_to_mlflow_json.py b/python/keras_tf_mnist/convert_png_to_mlflow_json.py index 18c6337..1c61855 100644 --- a/python/keras_tf_mnist/convert_png_to_mlflow_json.py +++ b/python/keras_tf_mnist/convert_png_to_mlflow_json.py @@ -11,7 +11,9 @@ def main(path): data = data.reshape((1, 28 * 28)) columns = [ f"col_{c}" for c in range(0,data[0].shape[0]) ] dct = { "columns" : columns, "data" : [ data[0].tolist()] } - print(json.dumps(dct,indent=2)+"\n") + mlflow_v2_dict = {"dataframe_split":dct} + #print(json.dumps(dct,indent=2)+"\n") + print(json.dumps(mlflow_v2_dict,indent=2)+"\n") if __name__ == "__main__": main(sys.argv[1]) diff --git a/python/keras_tf_mnist/utils.py b/python/keras_tf_mnist/utils.py index a3969d7..f2861ca 100644 --- a/python/keras_tf_mnist/utils.py +++ b/python/keras_tf_mnist/utils.py @@ -83,7 +83,7 @@ def display_versions(): print("Versions:") print(" MLflow Version:", mlflow.__version__) print(" TensorFlow version:", tf.__version__) - print(" Keras version:", keras.__version__) + #print(" Keras version:", keras.__version__) print(" Python Version:", platform.python_version()) print(" Operating System:", platform.system()+" - "+platform.release()) print(" Tracking URI:", mlflow.tracking.get_tracking_uri()) From e103773ef218c127d1503c4deaa8e93396b1d604 Mon Sep 17 00:00:00 2001 From: Your Name Date: Fri, 17 Nov 2023 23:06:43 -0600 Subject: [PATCH 2/3] add example of room recogntion --- python/keras_room_recogntion/MLproject | 22 ++++ python/keras_room_recogntion/conda.yaml | 13 ++ python/keras_room_recogntion/train.py | 164 ++++++++++++++++++++++++ python/keras_room_recogntion/utils.py | 41 ++++++ 4 files changed, 240 insertions(+) create mode 100644 python/keras_room_recogntion/MLproject create mode 100644 python/keras_room_recogntion/conda.yaml create mode 100644 python/keras_room_recogntion/train.py create mode 100644 python/keras_room_recogntion/utils.py diff --git a/python/keras_room_recogntion/MLproject b/python/keras_room_recogntion/MLproject new file mode 100644 index 0000000..7f71a38 --- /dev/null +++ b/python/keras_room_recogntion/MLproject @@ -0,0 +1,22 @@ +name: mlflow-examples-keras_room_recognition + +conda_env: conda.yaml + +entry_points: + main: + parameters: + epochs: {type: int, default: 15} + batch_size: {type: int, default: 128} + model_name: {type: string, default: None} + log_as_onnx: {type: boolean, default: False} + mlflow_custom_log: {type: boolean, default: True} + keras_autolog: {type: boolean, default: False} + tensorflow_autolog: {type: boolean, default: False} + command: "python train.py + --epochs {epochs} + --batch-size {batch_size} + --model-name {model_name} + --log-as-onnx {log_as_onnx} + --mlflow-custom-log {mlflow_custom_log} + --keras-autolog {keras_autolog} + --tensorflow-autolog {tensorflow_autolog}" diff --git a/python/keras_room_recogntion/conda.yaml b/python/keras_room_recogntion/conda.yaml new file mode 100644 index 0000000..697435f --- /dev/null +++ b/python/keras_room_recogntion/conda.yaml @@ -0,0 +1,13 @@ +name: mlflow-examples-keras_room_recognition +channels: + - conda-forge +dependencies: + - python=3.8 + - pip: + - mlflow + #- tensorflow==2.5.0 # Fails: ONNX training on Mac and Debian + #- tensorflow==2.4.1 # Fails: ONNX training on Mac and Debian + - tensorflow==2.8.0 # OK: ONNX training on Mac and Debian + - Pillow + - scikit-image + - opencv-python diff --git a/python/keras_room_recogntion/train.py b/python/keras_room_recogntion/train.py new file mode 100644 index 0000000..147cb12 --- /dev/null +++ b/python/keras_room_recogntion/train.py @@ -0,0 +1,164 @@ +import numpy as np +import tensorflow as tf +import tensorflow.keras as keras +from keras.layers import Dense,Dropout,Activation,Flatten,Conv2D,MaxPooling2D +import mlflow +import mlflow.keras +import mlflow.tensorflow +import click +import utils +from utils import * + + + +np.random.seed(42) +tf.random.set_seed(42) + +datadir = "/home/ascc/LF_Workspace/Bayes_model/IROS23/ADL_HMM_BAYES/room_classifier/early_exit_model/watch_data/labelled/" +categories = ['bathroom','bedroom', 'kitchen','livingroom', 'hallway', 'door'] + +img_size = 299 +input_shape = (299, 299, 3) + +def build_model(): + #model = keras.models.Sequential() + #model.add(keras.layers.Dense(512, activation="relu", input_shape=(28 * 28,))) + #model.add(keras.layers.Dense(10, activation="softmax")) + #build model + dense_layer=4 + layer_size=128 + conv_layer=2 + + model = keras.models.Sequential() + model.add(Conv2D(layer_size,(3,3), input_shape = input_shape)) + model.add(Activation("relu")) + model.add(MaxPooling2D(pool_size=(2,2))) + for l in range(conv_layer): + model.add(Conv2D(layer_size,(3,3))) + model.add(Activation("relu")) + model.add(MaxPooling2D(pool_size=(2,2))) + model.add(Flatten()) + for l in range(dense_layer): + model.add(Dense(layer_size, activation='relu')) + model.add(Dropout(0.5)) + model.add(Dense(len(categories), activation='softmax')) + + model.compile(optimizer='adam',loss='categorical_crossentropy',metrics=['accuracy']) + model.summary() + return model + +def train(run, model_name, data_path, epochs, batch_size, mlflow_custom_log, log_as_onnx): + + channel = 3 + training_data=[] + create_training_data(categories,datadir,img_size,training_data) + random.shuffle(training_data) + X = [] + y = [] + for features,label in training_data: + X.append(features) + y.append(label) + + print(len(X)) + + X=np.array(X).reshape(-1,img_size,img_size,channel) #(cannot pass list directly, -1=(calculates the array size), size,1=gray scale) + #class_num=keras.utils.np_utils.to_categorical(y,num_classes=len(categories)) #one-hot encoder for cateorical values + y_class_num=keras.utils.to_categorical(y,num_classes=len(categories)) #one-hot encoder for cateorical values + + print('reshape:') + print(len(X)) + print(X.ndim) + + model = build_model() + + model.compile( + optimizer="rmsprop", + loss="categorical_crossentropy", + metrics=["accuracy"]) + model.summary() + model.fit(X, y_class_num, epochs=epochs, batch_size=32,validation_split=0.2) + print("model.type:",type(model)) + + # test_loss, test_acc = model.evaluate(x_test, y_test) + # print("test_acc:", test_acc) + # print("test_loss:", test_loss) + + if mlflow_custom_log: + mlflow.log_param("epochs", epochs) + mlflow.log_param("batch_size", batch_size) + +# mlflow.log_metric("test_acc", test_acc) +# mlflow.log_metric("test_loss", test_loss) + + # Save as TensorFlow SavedModel format (MLflow Keras default) + mlflow.keras.log_model(model, "keras-model", registered_model_name=model_name) + #mlflow.keras.log_model(model, "keras-model") + + # write model summary + summary = [] + model.summary(print_fn=summary.append) + summary = "\n".join(summary) + with open("model_summary.txt", "w") as f: + f.write(summary) + mlflow.log_artifact("model_summary.txt") + + elif model_name: + utils.register_model(run, model_name) + + # write model as json file + with open("model.json", "w") as f: + f.write(model.to_json()) + mlflow.log_artifact("model.json") + + # MLflow - log onnx model + if log_as_onnx: + import onnx_utils + mname = f"{model_name}_onnx" if model_name else None + onnx_utils.log_model(model, "onnx-model", mname) + + +@click.command() +@click.option("--experiment-name", help="Experiment name", default=None, type=str) +@click.option("--model-name", help="Registered model name", default=None, type=str) +@click.option("--data-path", help="Data path", default=None, type=str) +@click.option("--epochs", help="Epochs", default=5, type=int) +@click.option("--batch-size", help="Batch size", default=128, type=int) +@click.option("--mlflow-custom-log", help="Explicitly log params, metrics and model with mlflow.log_", default=True, type=bool) +@click.option("--keras-autolog", help="Automatically log params, metrics and model with mlflow.keras.autolog", default=False, type=bool) +@click.option("--tensorflow-autolog", help="Automatically log params, metrics and model with mlflow.tensorflow.autolog", default=False, type=bool) +@click.option("--mlflow-autolog", help="Automatically log params, metrics and model with mlflow.autolog", default=False, type=bool) +@click.option("--log-as-onnx", help="log_as_onnx", default=False, type=bool) + +def main(experiment_name, model_name, data_path, epochs, batch_size, mlflow_autolog, keras_autolog, tensorflow_autolog, mlflow_custom_log, log_as_onnx): + print("Options:") + for k,v in locals().items(): + print(f" {k}: {v}") + model_name = None if not model_name or model_name == "None" else model_name + if not mlflow_autolog and not keras_autolog and not tensorflow_autolog: + mlflow_custom_log = True + + if keras_autolog: + mlflow.keras.autolog() + if tensorflow_autolog: + mlflow.tensorflow.autolog() + if mlflow_autolog: + mlflow.autolog() + + if experiment_name: + mlflow.set_experiment(experiment_name) + + with mlflow.start_run() as run: + print("MLflow:") + print(" run_id:",run.info.run_id) + print(" experiment_id:",run.info.experiment_id) + mlflow.set_tag("version.mlflow", mlflow.__version__) + mlflow.set_tag("version.keras", keras.__version__) + mlflow.set_tag("version.tensorflow", tf.__version__) + mlflow.set_tag("mlflow_autolog", mlflow_autolog) + mlflow.set_tag("tensorflow_autolog", tensorflow_autolog) + mlflow.set_tag("keras_autolog", keras_autolog) + mlflow.set_tag("mlflow_custom_log", mlflow_custom_log) + train(run, model_name, data_path, epochs, batch_size, mlflow_custom_log, log_as_onnx) + +if __name__ == "__main__": + main() diff --git a/python/keras_room_recogntion/utils.py b/python/keras_room_recogntion/utils.py new file mode 100644 index 0000000..4adbcf6 --- /dev/null +++ b/python/keras_room_recogntion/utils.py @@ -0,0 +1,41 @@ +import random +from scipy import ndarray +import skimage as sk +from skimage import transform +from skimage import util +import numpy as np +import os +import cv2 + +def random_rotation(image_array: ndarray): + # pick a random degree of rotation between 25% on the left and 25% on the right + random_degree = random.uniform(-25, 25) + return sk.transform.rotate(image_array, random_degree) + +def random_noise(image_array: ndarray): + # add random noise to the image + return sk.util.random_noise(image_array) + +def horizontal_flip(image_array: ndarray): + # horizontal flip doesn't need skimage, it's easy as flipping the image array of pixels ! + return image_array[:, ::-1] + +def create_training_data(categories,datadir,img_size,training_data): + for category in categories: + path = os.path.join(datadir, category) # path to categories + class_num = categories.index(category) + for img in os.listdir(path): + try: + #img_array = cv2.imread(os.path.join(path,img), cv2.IMREAD_GRAYSCALE) #read image and convert to gray scale + img_array = cv2.imread(os.path.join(path,img), cv2.IMREAD_COLOR) #read image as colored + new_array = cv2.resize(img_array,(img_size,img_size)) + random_rotation(new_array) + training_data.append([new_array,class_num]) + random_noise(new_array) + training_data.append([new_array,class_num]) + # horizontal_flip(new_array) + # training_data.append([new_array,class_num]) + except Exception as e: + print("Error creating training data") + print(e) + pass From e06edf6fe99b9cef357cde82a8da2d425d3b4311 Mon Sep 17 00:00:00 2001 From: Your Name Date: Mon, 20 Nov 2023 12:28:54 -0600 Subject: [PATCH 3/3] update train --- python/keras_room_recogntion/note.txt | 3 +++ python/keras_room_recogntion/train.py | 31 ++++++++++++++++++++++----- 2 files changed, 29 insertions(+), 5 deletions(-) create mode 100644 python/keras_room_recogntion/note.txt diff --git a/python/keras_room_recogntion/note.txt b/python/keras_room_recogntion/note.txt new file mode 100644 index 0000000..18d80db --- /dev/null +++ b/python/keras_room_recogntion/note.txt @@ -0,0 +1,3 @@ +mlflow run . --experiment-name=keras_ascc --run-name runname_keras_ascc_room_recognition + +mlflow run . --experiment-name=keras_ascc --run-name runname_keras_ascc_room_recognition_test -P epochs=1 -P keras-autolog=True -P mlflow-autolog=True -P tensorflow-autolog=True diff --git a/python/keras_room_recogntion/train.py b/python/keras_room_recogntion/train.py index 147cb12..01974db 100644 --- a/python/keras_room_recogntion/train.py +++ b/python/keras_room_recogntion/train.py @@ -17,6 +17,8 @@ datadir = "/home/ascc/LF_Workspace/Bayes_model/IROS23/ADL_HMM_BAYES/room_classifier/early_exit_model/watch_data/labelled/" categories = ['bathroom','bedroom', 'kitchen','livingroom', 'hallway', 'door'] +test_datadir = "/home/ascc/LF_Workspace/Bayes_model/IROS23/ADL_HMM_BAYES/room_classifier/early_exit_model/watch_data/test" + img_size = 299 input_shape = (299, 299, 3) @@ -78,17 +80,36 @@ def train(run, model_name, data_path, epochs, batch_size, mlflow_custom_log, log model.summary() model.fit(X, y_class_num, epochs=epochs, batch_size=32,validation_split=0.2) print("model.type:",type(model)) + + + test_data=[] + create_training_data(categories,test_datadir,img_size,test_data) + random.shuffle(test_data) + x_test = [] + y_test = [] + for features,label in test_data: + x_test.append(features) + y_test.append(label) + + + x_test=np.array(x_test).reshape(-1,img_size,img_size,channel) #(cannot pass list directly, -1=(calculates the array size), size,1=gray scale) + y_test=keras.utils.to_categorical(y_test,num_classes=len(categories)) #one-hot encoder for cateorical values + + print('reshape:') + print(len(x_test)) + print(y_test.ndim) + - # test_loss, test_acc = model.evaluate(x_test, y_test) - # print("test_acc:", test_acc) - # print("test_loss:", test_loss) + test_loss, test_acc = model.evaluate(x_test, y_test) + print("test_acc:", test_acc) + print("test_loss:", test_loss) if mlflow_custom_log: mlflow.log_param("epochs", epochs) mlflow.log_param("batch_size", batch_size) -# mlflow.log_metric("test_acc", test_acc) -# mlflow.log_metric("test_loss", test_loss) + mlflow.log_metric("test_acc", test_acc) + mlflow.log_metric("test_loss", test_loss) # Save as TensorFlow SavedModel format (MLflow Keras default) mlflow.keras.log_model(model, "keras-model", registered_model_name=model_name)