diff --git a/parthpathak1998@gmail.com/Project/MM.csv b/parthpathak1998@gmail.com/Project/MM.csv
new file mode 100644
index 000000000..33218ef31
--- /dev/null
+++ b/parthpathak1998@gmail.com/Project/MM.csv
@@ -0,0 +1,831 @@
+BI-RADS,Age,Shape,Margin,Density,Severity
+5,67,3,5,3,1
+5,58,4,5,3,1
+4,28,1,1,3,0
+5,57,1,5,3,1
+5,76,1,4,3,1
+3,42,2,1,3,1
+4,36,3,1,2,0
+4,60,2,1,2,0
+4,54,1,1,3,0
+3,52,3,4,3,0
+4,59,2,1,3,1
+4,54,1,1,3,1
+5,56,4,3,1,1
+5,42,4,4,3,1
+4,59,2,4,3,1
+5,75,4,5,3,1
+5,45,4,5,3,1
+5,55,4,4,3,0
+4,46,1,5,2,0
+5,54,4,4,3,1
+5,57,4,4,3,1
+4,39,1,1,2,0
+4,81,1,1,3,0
+4,60,2,1,3,0
+5,67,3,4,2,1
+4,55,3,4,2,0
+4,78,1,1,1,0
+4,50,1,1,3,0
+5,62,3,5,2,1
+5,64,4,5,3,1
+5,67,4,5,3,1
+4,74,2,1,2,0
+5,80,3,5,3,1
+4,49,2,1,1,0
+5,52,4,3,3,1
+5,60,4,3,3,1
+4,57,2,5,3,0
+5,74,4,4,3,1
+4,49,1,1,3,0
+4,45,2,1,3,0
+4,64,2,1,3,0
+4,73,2,1,2,0
+5,68,4,3,3,1
+5,52,4,5,3,0
+5,66,4,4,3,1
+4,25,1,1,3,0
+5,74,1,1,2,1
+4,64,1,1,3,0
+5,60,4,3,2,1
+5,67,2,4,1,0
+4,67,4,5,3,0
+5,44,4,4,2,1
+3,68,1,1,3,1
+5,58,4,4,3,1
+5,62,1,5,3,1
+4,73,3,4,3,1
+4,80,4,4,3,1
+5,59,2,1,3,1
+5,54,4,4,3,1
+5,62,4,4,3,0
+4,33,2,1,3,0
+4,57,1,1,3,0
+4,45,4,4,3,0
+5,71,4,4,3,1
+5,59,4,4,2,0
+4,56,1,1,3,0
+4,57,2,1,2,0
+5,55,3,4,3,1
+5,84,4,5,3,0
+5,51,4,4,3,1
+4,24,2,1,2,0
+4,66,1,1,3,0
+5,33,4,4,3,0
+4,59,4,3,2,0
+5,40,4,5,3,1
+5,67,4,4,3,1
+5,75,4,3,3,1
+5,86,4,4,3,0
+5,66,4,4,3,1
+5,46,4,5,3,1
+4,59,4,4,3,1
+5,65,4,4,3,1
+4,53,1,1,3,0
+5,67,3,5,3,1
+5,80,4,5,3,1
+4,55,2,1,3,0
+4,47,1,1,2,0
+5,62,4,5,3,1
+5,63,4,4,3,1
+4,71,4,4,3,1
+4,41,1,1,3,0
+5,57,4,4,4,1
+5,71,4,4,4,1
+4,66,1,1,3,0
+4,47,2,4,2,0
+3,34,4,4,3,0
+4,59,3,4,3,0
+5,67,4,4,3,1
+4,41,2,1,3,0
+4,23,3,1,3,0
+4,42,2,1,3,0
+5,87,4,5,3,1
+4,68,1,1,3,1
+4,64,1,1,3,0
+5,54,3,5,3,1
+5,86,4,5,3,1
+4,21,2,1,3,0
+4,53,4,4,3,0
+4,44,4,4,3,0
+4,54,1,1,3,0
+5,63,4,5,3,1
+4,45,2,1,2,0
+5,71,4,5,3,0
+5,49,4,4,3,1
+4,49,4,4,3,0
+5,66,4,4,4,0
+4,19,1,1,3,0
+4,35,1,1,2,0
+5,74,4,5,3,1
+5,37,4,4,3,1
+5,81,3,4,3,1
+5,59,4,4,3,1
+4,34,1,1,3,0
+5,79,4,3,3,1
+5,60,3,1,3,0
+4,41,1,1,3,1
+4,50,1,1,3,0
+5,85,4,4,3,1
+4,46,1,1,3,0
+5,66,4,4,3,1
+4,73,3,1,2,0
+4,55,1,1,3,0
+4,49,2,1,3,0
+3,49,4,4,3,0
+4,51,4,5,3,1
+2,48,4,4,3,0
+4,58,4,5,3,0
+5,72,4,5,3,1
+4,46,2,3,3,0
+4,43,4,3,3,1
+4,46,1,1,1,0
+4,69,3,1,3,0
+5,43,2,1,3,1
+5,76,4,5,3,1
+4,46,1,1,3,0
+4,59,2,4,3,0
+4,57,1,1,3,0
+3,45,2,1,3,0
+3,43,2,1,3,0
+4,45,2,1,3,0
+5,57,4,5,3,1
+5,79,4,4,3,1
+5,54,2,1,3,1
+4,40,3,4,3,0
+5,63,4,4,3,1
+4,52,2,1,3,0
+4,38,1,1,3,0
+3,72,4,3,3,0
+5,80,4,3,3,1
+5,76,4,3,3,1
+4,62,3,1,3,0
+5,64,4,5,3,1
+5,42,4,5,3,0
+4,64,4,5,3,0
+4,63,4,4,3,1
+4,24,2,1,2,0
+5,72,4,4,3,1
+4,63,2,1,3,0
+4,46,1,1,3,0
+3,33,1,1,3,0
+5,76,4,4,3,1
+4,36,2,3,3,0
+4,40,2,1,3,0
+5,58,1,5,3,1
+4,43,2,1,3,0
+3,42,1,1,3,0
+4,32,1,1,3,0
+5,57,4,4,2,1
+4,37,1,1,3,0
+4,70,4,4,3,1
+5,56,4,2,3,1
+5,73,4,4,3,1
+5,77,4,5,3,1
+5,67,4,4,1,1
+5,71,4,3,3,1
+5,65,4,4,3,1
+4,43,1,1,3,0
+4,49,2,1,3,0
+5,76,4,2,3,1
+4,55,4,4,3,0
+5,72,4,5,3,1
+3,53,4,3,3,0
+5,75,4,4,3,1
+5,61,4,5,3,1
+5,67,4,4,3,1
+5,55,4,2,3,1
+5,66,4,4,3,1
+2,76,1,1,2,0
+4,57,4,4,3,1
+5,71,3,1,3,0
+5,70,4,5,3,1
+4,63,2,1,3,0
+5,40,1,4,3,1
+4,41,1,1,3,0
+4,47,2,1,2,0
+4,68,1,1,3,1
+4,64,4,3,3,1
+4,73,4,3,3,0
+4,39,4,3,3,0
+5,55,4,5,4,1
+5,53,3,4,4,0
+5,66,4,4,3,1
+4,43,3,1,2,0
+5,44,4,5,3,1
+4,77,4,4,3,1
+4,62,2,4,3,0
+5,80,4,4,3,1
+4,33,4,4,3,0
+4,50,4,5,3,1
+5,46,4,4,3,1
+5,49,4,5,3,1
+4,53,1,1,3,0
+3,46,2,1,2,0
+4,57,1,1,3,0
+4,54,3,1,3,0
+2,49,2,1,2,0
+4,47,3,1,3,0
+4,40,1,1,3,0
+4,45,1,1,3,0
+4,50,4,5,3,1
+5,54,4,4,3,1
+4,67,4,1,3,1
+4,77,4,4,3,1
+4,66,4,3,3,0
+4,36,2,3,3,0
+4,69,4,4,3,0
+4,48,1,1,3,0
+4,64,4,4,3,1
+4,71,4,2,3,1
+5,60,4,3,3,1
+4,24,1,1,3,0
+5,34,4,5,2,1
+4,79,1,1,2,0
+4,45,1,1,3,0
+4,37,2,1,2,0
+4,42,1,1,2,0
+4,72,4,4,3,1
+5,60,4,5,3,1
+5,85,3,5,3,1
+4,51,1,1,3,0
+5,54,4,5,3,1
+5,55,4,3,3,1
+4,64,4,4,3,0
+5,67,4,5,3,1
+5,75,4,3,3,1
+5,87,4,4,3,1
+4,46,4,4,3,1
+55,46,4,3,3,1
+5,61,1,1,3,1
+4,44,1,4,3,0
+4,32,1,1,3,0
+4,62,1,1,3,0
+5,59,4,5,3,1
+4,61,4,1,3,0
+5,78,4,4,3,1
+5,42,4,5,3,0
+4,45,1,2,3,0
+5,34,2,1,3,1
+4,27,3,1,3,0
+4,43,1,1,3,0
+5,83,4,4,3,1
+4,36,2,1,3,0
+4,37,2,1,3,0
+4,56,3,1,3,1
+5,55,4,4,3,1
+4,88,4,4,3,1
+5,71,4,4,3,1
+4,41,2,1,3,0
+5,49,4,4,3,1
+3,51,1,1,4,0
+4,39,1,3,3,0
+4,46,2,1,3,0
+5,52,4,4,3,1
+5,58,4,4,3,1
+4,67,4,5,3,1
+5,80,4,4,3,1
+4,45,1,1,3,0
+5,68,4,4,3,1
+4,44,2,3,3,0
+5,74,4,3,3,1
+5,55,4,5,3,0
+4,49,4,4,3,1
+4,49,1,1,3,0
+5,50,4,3,3,1
+5,52,3,5,3,1
+4,45,1,1,3,0
+4,66,1,1,3,0
+4,68,4,4,3,1
+4,72,2,1,3,0
+5,74,4,4,3,1
+5,58,4,4,3,1
+4,77,2,3,3,0
+4,49,3,1,3,0
+5,60,4,3,3,1
+5,69,4,3,3,1
+4,53,2,1,3,0
+3,46,3,4,3,0
+5,74,4,4,3,1
+4,58,1,1,3,0
+5,68,4,4,3,1
+5,46,4,3,3,0
+5,61,2,4,3,1
+5,70,4,3,3,1
+5,37,4,4,3,1
+3,65,4,5,3,1
+4,67,4,4,3,0
+5,69,3,4,3,0
+5,76,4,4,3,1
+4,65,4,3,3,0
+5,72,4,2,3,1
+4,62,4,2,3,0
+5,42,4,4,3,1
+5,66,4,3,3,1
+5,48,4,4,3,1
+4,35,1,1,3,0
+5,60,4,4,3,1
+5,67,4,2,3,1
+5,78,4,4,3,1
+4,66,1,1,3,1
+4,48,1,1,3,0
+4,31,1,1,3,0
+5,43,4,3,3,1
+5,72,2,4,3,0
+5,66,1,1,3,1
+4,56,4,4,3,0
+5,58,4,5,3,1
+5,33,2,4,3,1
+4,37,1,1,3,0
+5,36,4,3,3,1
+4,39,2,3,3,0
+4,39,4,4,3,1
+5,83,4,4,3,1
+4,68,4,5,3,1
+5,63,3,4,3,1
+5,78,4,4,3,1
+4,38,2,3,3,0
+5,46,4,3,3,1
+5,60,4,4,3,1
+5,56,2,3,3,1
+4,33,1,1,3,0
+4,69,1,5,3,1
+5,66,1,4,3,1
+4,72,1,3,3,0
+4,29,1,1,3,0
+5,54,4,5,3,1
+5,80,4,4,3,1
+5,68,4,3,3,1
+4,35,2,1,3,0
+4,50,1,1,3,0
+4,32,4,3,3,0
+0,69,4,5,3,1
+4,71,4,5,3,1
+5,87,4,5,3,1
+4,64,1,1,3,0
+5,55,4,5,3,1
+4,18,1,1,3,0
+4,53,1,1,3,0
+5,84,4,5,3,1
+5,80,4,3,3,1
+4,32,1,1,3,0
+5,77,3,4,3,1
+4,38,1,1,3,0
+5,54,4,5,3,1
+4,63,1,1,3,0
+4,61,1,1,3,0
+4,52,1,1,3,0
+4,36,1,1,3,0
+4,59,1,1,3,0
+5,51,4,4,2,1
+4,36,1,1,3,0
+5,40,4,3,3,1
+4,49,1,1,3,0
+4,37,2,3,3,0
+4,46,1,1,3,0
+4,63,1,1,3,0
+4,28,2,1,3,0
+4,47,2,1,3,0
+4,42,2,1,3,1
+5,44,4,5,3,1
+4,49,4,4,3,0
+5,47,4,5,3,1
+5,52,4,5,3,1
+4,53,1,1,3,1
+5,83,3,3,3,1
+5,63,4,4,3,1
+4,54,1,1,3,0
+4,50,4,4,3,0
+5,80,4,5,3,1
+5,45,2,4,3,0
+4,28,2,1,3,0
+4,31,1,1,3,0
+4,41,2,1,3,0
+4,21,3,1,3,0
+5,44,3,4,3,1
+5,49,4,4,3,1
+5,71,4,5,3,1
+5,75,4,5,3,1
+4,38,2,1,3,0
+4,60,1,3,3,0
+5,87,4,5,3,1
+4,70,4,4,3,1
+5,55,4,5,3,1
+3,21,1,1,3,0
+4,50,1,1,3,0
+5,76,4,5,3,1
+4,23,1,1,3,0
+5,73,4,5,3,1
+4,38,2,3,3,0
+2,57,1,1,3,0
+5,65,4,5,3,1
+5,67,2,4,3,1
+5,61,2,4,3,1
+5,56,4,4,3,0
+5,71,2,4,3,1
+4,49,2,2,3,0
+4,44,2,1,3,0
+0,58,4,4,3,0
+4,27,2,1,3,0
+5,73,4,5,3,1
+4,34,2,1,3,0
+4,50,2,1,3,1
+4,62,2,1,3,0
+3,21,3,1,3,0
+4,36,3,1,3,0
+4,45,2,1,3,1
+5,67,4,5,3,1
+4,21,1,1,3,0
+4,57,2,1,3,0
+5,66,4,5,3,1
+4,71,4,4,3,1
+5,69,3,4,3,1
+6,80,4,5,3,1
+3,27,2,1,3,0
+4,38,2,1,3,0
+4,23,2,1,3,0
+4,46,4,3,3,0
+4,61,2,3,3,0
+5,65,4,5,3,1
+4,60,4,3,3,0
+5,83,4,5,3,1
+5,40,4,4,3,1
+4,53,3,4,3,0
+4,76,4,4,3,0
+5,79,1,4,3,1
+5,38,2,4,3,1
+4,61,3,4,3,0
+4,56,2,1,3,0
+4,44,2,1,3,0
+4,66,3,3,3,0
+4,50,3,3,3,0
+4,46,1,1,3,0
+4,39,1,1,3,0
+5,55,4,5,3,1
+4,40,2,1,3,0
+4,26,1,1,3,0
+5,84,3,2,3,1
+4,41,2,2,3,0
+4,63,1,1,3,0
+4,49,1,1,3,0
+4,56,2,2,3,1
+5,65,4,4,3,0
+4,54,1,1,3,0
+4,36,1,1,3,0
+5,49,4,4,3,0
+4,59,4,4,3,1
+5,75,4,4,3,1
+5,59,4,2,3,0
+5,59,4,4,3,1
+4,28,4,4,3,1
+5,53,4,5,3,0
+5,57,4,4,3,0
+5,77,4,3,4,0
+5,85,4,3,3,1
+4,59,4,4,3,0
+5,59,1,5,3,1
+4,65,3,3,3,1
+4,54,2,1,3,0
+5,46,4,5,3,1
+4,63,4,4,3,1
+4,53,1,1,3,1
+4,56,1,1,3,0
+5,66,4,4,3,1
+5,66,4,5,3,1
+4,55,1,1,3,0
+4,44,1,1,3,0
+5,86,3,4,3,1
+5,47,4,5,3,1
+5,59,4,5,3,1
+5,66,4,5,3,0
+5,61,4,3,3,1
+4,69,1,1,3,0
+5,93,1,5,3,1
+4,39,1,3,3,0
+5,44,4,5,3,1
+4,45,2,2,3,0
+4,51,3,4,3,0
+4,56,2,4,3,0
+4,66,4,4,3,0
+5,61,4,5,3,1
+4,64,3,3,3,1
+5,57,2,4,3,0
+5,79,4,4,3,1
+4,44,4,1,1,0
+4,31,2,1,3,0
+4,63,4,4,3,0
+4,64,1,1,3,0
+5,47,4,5,3,0
+5,68,4,5,3,1
+4,30,1,1,3,0
+5,43,4,5,3,1
+4,56,1,1,3,0
+4,46,2,1,3,0
+4,67,2,1,3,0
+5,52,4,5,3,1
+4,67,4,4,3,1
+4,47,2,1,3,0
+5,58,4,5,3,1
+4,28,2,1,3,0
+4,43,1,1,3,0
+4,57,2,4,3,0
+5,68,4,5,3,1
+4,64,2,4,3,0
+4,64,2,4,3,0
+5,62,4,4,3,1
+4,38,4,1,3,0
+5,68,4,4,3,1
+4,41,2,1,3,0
+4,35,2,1,3,1
+4,68,2,1,3,0
+5,55,4,4,3,1
+5,67,4,4,3,1
+4,51,4,3,3,0
+2,40,1,1,3,0
+5,73,4,4,3,1
+5,59,4,3,3,1
+6,60,3,5,3,1
+5,54,4,3,3,0
+4,56,1,1,3,0
+5,53,4,5,3,1
+4,54,2,4,3,0
+5,79,1,4,3,1
+5,67,4,3,3,1
+5,64,3,3,3,1
+4,70,1,2,3,1
+5,55,4,3,3,1
+5,65,3,3,3,1
+5,45,4,2,3,1
+5,49,1,1,3,1
+4,24,2,1,3,0
+4,52,1,1,3,0
+4,50,2,1,3,0
+4,35,1,1,3,0
+5,64,4,3,3,1
+5,40,4,1,1,1
+5,66,4,4,3,1
+4,64,4,4,3,1
+5,52,4,3,3,1
+5,43,1,4,3,1
+4,56,4,4,3,0
+6,51,4,4,3,1
+4,79,4,4,3,1
+4,22,2,1,3,0
+4,73,2,1,3,0
+4,53,3,4,3,0
+4,59,2,1,3,1
+4,46,4,4,2,0
+5,66,4,4,3,1
+4,50,4,3,3,1
+4,58,1,1,3,1
+4,55,1,1,3,0
+4,62,2,4,3,1
+4,60,1,1,3,0
+5,57,4,3,3,1
+4,57,1,1,3,0
+6,41,2,1,3,0
+4,71,2,1,3,1
+4,32,2,1,3,0
+4,57,2,1,3,0
+4,19,1,1,3,0
+4,62,2,4,3,1
+5,67,4,5,3,1
+4,50,4,5,3,0
+4,65,2,3,2,0
+4,40,2,4,2,0
+6,71,4,4,3,1
+6,68,4,3,3,1
+4,68,1,1,3,0
+4,29,1,1,3,0
+4,53,2,1,3,0
+5,66,4,4,3,1
+5,76,4,4,3,1
+4,58,2,1,2,0
+5,96,3,4,3,1
+5,70,4,4,3,1
+4,34,2,1,3,0
+4,59,2,1,3,0
+4,45,3,1,3,1
+5,65,4,4,3,1
+4,59,1,1,3,0
+4,21,2,1,3,0
+3,43,2,1,3,0
+4,53,1,1,3,0
+4,65,2,1,3,0
+4,64,2,4,3,1
+4,53,4,4,3,0
+4,51,1,1,3,0
+4,59,2,4,3,0
+4,56,2,1,3,0
+4,60,2,1,3,0
+4,22,1,1,3,0
+4,25,2,1,3,0
+5,69,4,4,3,1
+4,58,2,1,3,0
+5,62,4,3,3,1
+4,56,4,4,3,0
+4,64,1,1,3,0
+4,32,2,1,3,0
+5,59,4,4,2,1
+4,52,1,1,3,0
+4,63,4,4,3,0
+5,67,4,4,3,1
+5,61,4,4,3,1
+5,59,4,5,3,1
+5,52,4,3,3,1
+4,35,4,4,3,0
+5,77,3,3,3,1
+5,71,4,3,3,1
+5,63,4,3,3,1
+4,38,2,1,2,0
+5,72,4,3,3,1
+4,76,4,3,3,1
+4,53,3,3,3,0
+4,67,4,5,3,0
+5,69,2,4,3,1
+4,54,1,1,3,0
+2,35,2,1,2,0
+5,68,4,3,3,1
+4,68,4,4,3,0
+4,67,2,4,3,1
+3,39,1,1,3,0
+4,44,2,1,3,0
+4,33,1,1,3,0
+4,58,1,1,3,0
+4,31,1,1,3,0
+3,23,1,1,3,0
+5,56,4,5,3,1
+4,69,2,1,3,1
+6,63,1,1,3,0
+4,65,1,1,3,1
+4,44,2,1,2,0
+4,62,3,3,3,1
+4,67,4,4,3,1
+4,56,2,1,3,0
+4,52,3,4,3,0
+4,43,1,1,3,1
+4,41,4,3,2,1
+4,42,3,4,2,0
+3,46,1,1,3,0
+5,55,4,4,3,1
+5,58,4,4,2,1
+5,87,4,4,3,1
+4,66,2,1,3,0
+0,72,4,3,3,1
+5,60,4,3,3,1
+5,83,4,4,2,1
+4,31,2,1,3,0
+4,53,2,1,3,0
+4,64,2,3,3,0
+5,31,4,4,2,1
+5,62,4,4,2,1
+4,56,2,1,3,0
+5,58,4,4,3,1
+4,67,1,4,3,0
+5,75,4,5,3,1
+5,65,3,4,3,1
+5,74,3,2,3,1
+4,59,2,1,3,0
+4,57,4,4,4,1
+4,76,3,2,3,0
+4,63,1,4,3,0
+4,44,1,1,3,0
+4,42,3,1,2,0
+5,65,4,3,3,1
+4,70,2,1,3,0
+4,48,1,1,3,0
+4,74,1,1,1,1
+4,63,1,1,3,0
+5,60,4,4,3,1
+5,86,4,3,3,1
+4,27,1,1,3,0
+4,71,4,5,2,1
+5,85,4,4,3,1
+4,51,3,3,3,0
+6,72,4,3,3,1
+5,52,4,4,3,1
+4,66,2,1,3,0
+5,71,4,5,3,1
+4,42,2,1,3,0
+4,64,4,4,2,1
+4,41,2,2,3,0
+4,50,2,1,3,0
+4,30,1,1,3,0
+4,67,1,1,3,0
+5,62,4,4,3,1
+4,46,2,1,2,0
+4,35,1,1,3,0
+4,53,1,1,2,0
+4,59,2,1,3,0
+4,19,3,1,3,0
+5,86,2,1,3,1
+4,72,2,1,3,0
+4,37,2,1,2,0
+4,46,3,1,3,1
+4,45,1,1,3,0
+4,48,4,5,3,0
+4,58,4,4,3,1
+4,42,1,1,3,0
+4,56,2,4,3,1
+4,47,2,1,3,0
+4,49,4,4,3,1
+5,76,2,5,3,1
+5,62,4,5,3,1
+5,64,4,4,3,1
+5,53,4,3,3,1
+4,70,4,2,2,1
+5,55,4,4,3,1
+4,34,4,4,3,0
+5,76,4,4,3,1
+4,39,1,1,3,0
+2,23,1,1,3,0
+4,19,1,1,3,0
+5,65,4,5,3,1
+4,57,2,1,3,0
+5,41,4,4,3,1
+4,36,4,5,3,1
+4,62,3,3,3,0
+4,69,2,1,3,0
+4,41,3,1,3,0
+3,51,2,4,3,0
+5,50,3,2,3,1
+4,47,4,4,3,0
+4,54,4,5,3,1
+5,52,4,4,3,1
+4,30,1,1,3,0
+3,48,4,4,3,1
+4,65,2,4,3,1
+4,50,1,1,3,0
+5,65,4,5,3,1
+5,66,4,3,3,1
+6,41,3,3,2,1
+5,72,3,2,3,1
+4,42,1,1,1,1
+4,80,4,4,3,1
+0,45,2,4,3,0
+4,41,1,1,3,0
+4,72,3,3,3,1
+4,60,4,5,3,0
+5,67,4,3,3,1
+4,55,2,1,3,0
+4,61,3,4,3,1
+4,55,3,4,3,1
+4,52,4,4,3,1
+4,42,1,1,3,0
+5,63,4,4,3,1
+4,62,4,5,3,1
+4,46,1,1,3,0
+4,65,2,1,3,0
+4,57,3,3,3,1
+4,66,4,5,3,1
+4,45,1,1,3,0
+4,77,4,5,3,1
+4,35,1,1,3,0
+4,50,4,5,3,1
+4,57,4,4,3,0
+4,74,3,1,3,1
+4,59,4,5,3,0
+4,51,1,1,3,0
+4,42,3,4,3,1
+4,35,2,4,3,0
+4,42,1,1,3,0
+4,43,2,1,3,0
+4,62,4,4,3,1
+4,27,2,1,3,0
+4,57,4,4,3,1
+4,59,2,1,3,0
+5,40,3,2,3,1
+4,20,1,1,3,0
+5,74,4,3,3,1
+4,22,1,1,3,0
+4,57,4,3,3,0
+4,57,4,3,3,1
+4,55,2,1,2,0
+4,62,2,1,3,0
+4,54,1,1,3,0
+4,71,1,1,3,1
+4,65,3,3,3,0
+4,68,4,4,3,0
+4,64,1,1,3,0
+4,54,2,4,3,0
+4,48,4,4,3,1
+4,58,4,3,3,0
+5,58,3,4,3,1
+4,70,1,1,1,0
+5,70,1,4,3,1
+4,59,2,1,3,0
+4,57,2,4,3,0
+4,53,4,5,3,0
+4,54,4,4,3,1
+4,53,2,1,3,0
+0,71,4,4,3,1
+5,67,4,5,3,1
+4,68,4,4,3,1
+4,56,2,4,3,0
+4,35,2,1,3,0
+4,52,4,4,3,1
+4,47,2,1,3,0
+4,56,4,5,3,1
+4,64,4,5,3,0
+5,66,4,5,3,1
+4,62,3,3,3,0
\ No newline at end of file
diff --git a/parthpathak1998@gmail.com/Project/Parth Pathak Assignment - AITS.ipynb b/parthpathak1998@gmail.com/Project/Parth Pathak Assignment - AITS.ipynb
new file mode 100644
index 000000000..732ddfa87
--- /dev/null
+++ b/parthpathak1998@gmail.com/Project/Parth Pathak Assignment - AITS.ipynb
@@ -0,0 +1,1472 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Internship Assignment - Write a report comparing 5 classification algorithims."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## AI.Tech Systems https://ai-techsystems.com/"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import numpy as np \n",
+ "import pandas as pd "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "data = pd.read_csv(\"MM.csv\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "(830, 6)"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "data.shape"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " BI-RADS | \n",
+ " Age | \n",
+ " Shape | \n",
+ " Margin | \n",
+ " Density | \n",
+ " Severity | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " 0 | \n",
+ " 5 | \n",
+ " 67 | \n",
+ " 3 | \n",
+ " 5 | \n",
+ " 3 | \n",
+ " 1 | \n",
+ "
\n",
+ " \n",
+ " 1 | \n",
+ " 5 | \n",
+ " 58 | \n",
+ " 4 | \n",
+ " 5 | \n",
+ " 3 | \n",
+ " 1 | \n",
+ "
\n",
+ " \n",
+ " 2 | \n",
+ " 4 | \n",
+ " 28 | \n",
+ " 1 | \n",
+ " 1 | \n",
+ " 3 | \n",
+ " 0 | \n",
+ "
\n",
+ " \n",
+ " 3 | \n",
+ " 5 | \n",
+ " 57 | \n",
+ " 1 | \n",
+ " 5 | \n",
+ " 3 | \n",
+ " 1 | \n",
+ "
\n",
+ " \n",
+ " 4 | \n",
+ " 5 | \n",
+ " 76 | \n",
+ " 1 | \n",
+ " 4 | \n",
+ " 3 | \n",
+ " 1 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " BI-RADS Age Shape Margin Density Severity\n",
+ "0 5 67 3 5 3 1\n",
+ "1 5 58 4 5 3 1\n",
+ "2 4 28 1 1 3 0\n",
+ "3 5 57 1 5 3 1\n",
+ "4 5 76 1 4 3 1"
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "data.head()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "BI-RADS int64\n",
+ "Age int64\n",
+ "Shape int64\n",
+ "Margin int64\n",
+ "Density int64\n",
+ "Severity int64\n",
+ "dtype: object"
+ ]
+ },
+ "execution_count": 5,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "data.dtypes"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import matplotlib.pyplot as plt "
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "427 403\n"
+ ]
+ }
+ ],
+ "source": [
+ "x0 =data.Severity.value_counts()[0]\n",
+ "x1 = data.Severity.value_counts()[1]\n",
+ "print(x0 , x1)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWQAAAD7CAYAAABdXO4CAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzt3Xl8VPW9//HXZ7Ykk5ABwi4iChGpqChKtG5oba0bVWvr2qv1drW9v19rey2/W6ujtmqXa7XXatXqte5aW20UF9xRwQCyihqHHWTPnsxMMsv398c5QIBAEsicM8vn+XjEDDNnznwmJu988z3fRYwxKKWUcp/H7QKUUkpZNJCVUipLaCArpVSW0EBWSqksoYGslFJZQgNZKaWyhAayyjoicqKIRESkVUTO38dz/EVEftXXtfUVEVkqIlPcrkNlF9FxyIVLRFYBQ4FUp7sPNcasd6cii4i8AVQbY+7aw+OrsOpOYtX+MfAIcL8xJu1UnUr1NW0hq/OMMWWdPnYLYxHxOVzTQcDSbo45zxjTzz72duAXwIOZLkypTNJAVrsRkdEiYkTk30VkDfCmff/xIjJLRBpFZFHnP7lF5GAReUdEWkTkNRG5W0Qe28trfFdElolIvYhUi8gI+/7lwCHAC3aXRdHeajXGNBljqoGLgStFZIJ9nodF5Nf27QEi8qKIbBGRBvv2yF1qn2nX/rqI/Hlb7Z2+FleKyBoR2Soiv+z03CIRuVNE1tsfd26rWUQG2a/VaL/Pd0XEYz+2SkTOsG9PFpF5ItIsIptE5I6e/99S+UQDWe3NqcB44EwROQCYDvwaGAj8HPiHiAy2j30C+BAYBNwCXLmnk4rI6cBtwDeB4cBq4CkAY8wYYA07Wu7tPSnUGDMHWAec3MXDHuB/sVrTo4AYcHenx58A5gAVQBj4VhfnOAkYB3wJuEFExtv3/xI4HpgIHAVMBq63H/uZXdNgrC6W/wK66iO8C7jLGFMOjAGe6e79qvykgayet1twjSLy/C6PhY0xbcaYGHAF8JIx5iVjTNoY8xowDzhbREYBxwG/Msa0G2NmAi/s5TUvBx4yxsy3A/f/ASeIyOj9fC/rsX5Z7MQYU2eM+YcxJmqMaQF+g/XLhk6132CM6TDGvAdUd3Hum4wxMWPMImARVvhuey83G2M2G2O2ADexI9ATWL9wDjLGJIwx75quL9okgLEiMsgY02qM+WBfvwAqt2kgq/ONMf3tj11HNKztdPsg4BudwrsRq9U4HBgBNBhj2jodv3ovrzmi8+PGmFagDjhgf96I/fz6Xe8UkaCI3Cciq0WkGZgJ9BcRr11LvTEm2ukpa3c9B7Cx0+0oUGbf3um92LdH2Ld/DywDZojIChGZtoe6/x04FPhUROaKyLl7fZcqb2kgq73p3JpbCzzaKbz7G2NKjTG3AxuAASJS2un4UXs573qsgAfAfl4F8Pm+Fioix2EF8ntdPPwzrO6GKrtb4JRtT7NrHygiwU7HH9iLl97pvWC97/UAxpgWY8zPjDGHAOcB14rIl3Y9gTEmYoy5FBgC/BZ4dpevpSoQGsiqpx4DzhORM0XEKyLFIjJFREYaY1ZjdV/cJCIBETkJK4D25Ang2yIy0b4AditQY4xZ1duiRKTcblE+BTxmjFnSxWH9sPqNG0VkIHDjtgc61R62az+hm9p39SRwvYgMFpFBwA1YXytE5FwRGSsiAjRjDdFL7XoCEblCRAbbQ/Ya7bt3O07lPw1k1SPGmLXA17AuTG3BajH/Jzu+hy4DqrC6DG7EGhe8p3O9AfwK+AdWC3UMcEkvS3pBRFrsOn4J3AF8ew/H3gmUAFuBD4BXdnn8cuAErG6TXwNPAz26mGgfPw9YDCwB5tv3AVQCrwOtwGzgHmPM212c46vAUhFpxbrAd4kxJt7D11d5RCeGqIwQkTAw1hhzhdu19JaIPA18aoy5sduDlepD2kJWBU9EjhORMSLiEZGvYv0lsOuIE6UyzukZWEplo2HAP7EuLK4DfmiMWeBuSaoQaZeFUkplCe2yUEqpLKGBrJRSWUIDWSmlsoQGslJKZQkNZKWUyhIayEoplSU0kJVSKktoICulVJbQQFZKqSyhgayUUllCA1kppbKELi6kCls4dDDWDiFJ+yOBtX7xJsJNzW6WpgqPBrIqdN/BWnR/d+FQFGsB/Y3258631wGLCDdtdqZMVQh0tTeVEaOnTQ9hbfa5p4+BQACrUeC3P09Zdfs5HztR39Rx/quAA64/JfClyQf4TtuPU60HFmDtFDIfWEC4aW8bvCq1R9pCVvtl9LTp/YBJwLHAccBRwEhgXzbpdPL78SSgpT25T3V2tu0XzDnb7wmH6rFCeh4wA3iXcFNiP19HFQANZNVjo6dNLwGOxgreY4FjjTHj7E08c1Gzz9PjvfN6YyDwJfvjF0Az4dAMYDrwknZzqD3RQFZ7NXra9MOB84GpxphJIuLt/HjuZrGjyoGL7I804dA84EVgOuGm+a5WprKKBrLayehp0z1YOzCfb4w5X0TGbntMw7dPeIDJ9sfNhENrgf8F/kq4aa2rlSnXaSArRk+bXgScgRXCU0VkCGgAO+RA4Abgl4RDLwP3AS8Tbkq5W5ZygwZyARs9bfoRWBt6XiEi/UBD2EVe4Fz7Yy3h0INYrebP3S1LOUkDucDYreFvmnTyx+LxTQYN4Sx0IBAGfkU49BJwB+Gmt12tSDlCA7lAjJ42fbAx6R9hzI/F460Qj/6vzwFe4DzgPMKhN4HrCTfNdrkmlUH6U5nnRk+bfphJJX6Bx3uZiCeANoZz1enALLvFfD3hpgVuF6T6ngZynho9bfqB6UT7f4svcJF4/RrD+eNs4CzCoeeAGwg3LXW7INV3NJDzzOhp08tT8dZbPYHg9zz+Ir/b9aiMEOBC4HzCoaewgnm5yzWpPqCBnCdGT5vuS8dbrxV/0fXe4rJ+btejHOEBLgMuJBy6CfgD4aakyzWp/aDrIeeBUT/9+zfTifZVnuKy34rXr2FceIqB24C5hEOT3C5G7TttIeewUdf+4zgwD3iKgke5XYvKChOBGsKhP2J1Y8TcLkj1jraQc9AB3/+rb+SPHrlX/IEPPIESDWPVmRf4ObCEcOh0t4tRvaOBnGOGf+u/j/cUla7w9av4gYhH//+pPRkDvEE49BDhUH+3i1E9oz/QOWLw1OtkxHfuvSMwvPJdb7D8QLfrUTnj28CHhEP6l1QO0EDOAcMu/+24olFHLA0MGvVT8Xi131/11iHAbMKhy90uRO2dBnIWC1ZWyYir776uaPihi3xlA8e7XY/KaSXAY4RDfyIc0vHpWUoDOUtVnPV/QgPP+P67gSEH/1Z8gSK361F54z+AtwiHhrtdiNqdBnIWGvjl708uGXPcx77Q0BPdrkXlpROx+pX1+yvLaCBnkWBllVR89T++VXb46a/7ygaOcLseldeGY7WUf+h2IWoHDeQsEays8pZ+YUq4bMLpf/Xo1GflDD9wD+HQLW4Xoix6xT4LBCurSvodffYDxaOPuVQ8OrZYOe56wqGBwI8JNxm3iylk+sPvstLxpwwInXDxjJJDjr1cw1i56BrgcR2B4S4NABeVV104tv9Jl9cUjRh3ktu1KAVcCjyjoeweDWSX9Jv41WP6TTz7LX/FyEq3a1Gqk/OBZwmHAm4XUog0kF1QNuH008onX/gv/4DhI92uRakuTAX+STik498dpoHssNLDp0wJnXDxI/6BB2gYq2x2DvAk4ZBmhIP0i+2g0vEnnxw64eK/+StGahirXHAB8Hu3iygkGsgOCVZWnVBeddFDgUGjRrldi1K9cC3h0DVuF1EoNJAdEKysOqZ88oUPFA0bO9btWpTaB38iHDrH7SIKgQZyhgUrqw4rm3jW/cUHTjjc7VqU2kde4CnCoaPdLiTfaSBnULCy6uCSMZPvCo45TjeeVLmuDHiRcEivf2SQBnKGBCurhvkGjryl7IgvTXG7FqX6yAisUNa1VjJEAzkDgpVVJRIo+Wmo6qKzxOvXAfYqnxwFPOJ2EflKA7mPBSurBPhWqOobF3mD5QPdrkepDDifcOi7bheRjzSQ+95ppRO+9O+BIaMPcbsQpTLoj4RDh7pdRL7RQO5DwcqqsYER464NVp5wrNu1KJVhpejqcH1OA7mPBCur+ntLB1xXPmnqqbqMpioQxwI3uV1EPtHg6APByiof4vl+6ISLz/EESsrcrkcpB/2CcOgUt4vIFxrIfeOC0sNPu8AXGqL74KlC4wEeJRwKuV1IPtBA3k/ByqojvWUV3wiOrZrodi1KuWQUcK/bReQDDeT9EKysCgJXlx93/kTx+nTtWFXILiUcOsPtInKdBvL+mVpyyLGH+wceoLt+KGUNhfO6XUQu00DeR8HKqoMlUHJu6eGnTXa7FqWyxATg+24Xkcs0kPdBsLLKB3y7fNJ5h3sCJeVu16NUFrmZcGiA20XkKg3kfTMlMHTM0YHh445yuxClskwFcKPbReQqDeReClZWDUY8l/Q75txjRUTcrkepLPQjwqHD3C4iF2kg94K9cNDlpeNPGesNhoa5XY9SWcoH3OF2EblIA7l3jsbjPabkkGOPdLsQpbLcWYRDZ7ldRK7RQO4h+0LeZaXjTxniKQrqRQuluhd2u4Bco4Hcc5MQz+CSgycd53YhSuWIyYRDJ7tdRC7RQO4Bu3V8kd061kXnleq5/3S7gFyigdwzRyOeISWHaOtYqV46l3BonNtF5AoN5G5sax0HDztpsKeotMLtepTKMQL8zO0icoUGcvcmIjI0eMhx2jpWat/8G+HQULeLyAUayHsRrKzyAhcFx5002FNcOsjtepTKUUXAj90uIhdoIO/dUcCwkoOP1inSSu2fawiHgm4Xke00kPcgWFnlAS4KDB3j9Qb7H+B2PUrluIHA5W4Xke00kPesEhgerDxetzpXqm9c5nYB2U4Dec9OFV9Ryj9olE6TVqpvnEI4NNztIrKZBnIXgpVV5UBVcNwXK8TrL3a7HqXyhAf4pttFZDMN5K4dA3iKRhx2hNuFKJVnLnG7gGymgbwLe4nNr3jLKuLefoMOcbsepfLM8YRDo90uIltpIO9uJDAseOgJo3UBeqUyQlvJe6CBvLtjgXRg6Bi9mKdUZmgg74EGcif2zLwpvv7D2r3B0Ai361EqTx2lWzx1TQN5Z2OAfsUHHqETQZTKrPPcLiAbaSDv7Egg5a84UC/mKZVZp7hdQDbSQN7ZJKDRWz74YLcLUSrPnUQ4pPmzC/2C2IKVVQOAoYGhY4Mef1GZ2/Uolef6AzrOfxcayDscDFA0Ypx2VyjlDO222IUG8g5HAh3+ipEayEo5QwN5FxrIbJ+dNxHxNHrLKg5yux6lCoTuSL0LDWTLEKC8+MDDB4nXV+R2MUoViKG6AerONJAthwAEho7V0RVKOUtbyZ1oIFuOBmLefhXD3C5EqQJzrNsFZJOCD2R7q6YjgEZPST/dyFQpZ1W6XUA2KfhAxhoPGUA8SU+gtMLtYpQqMGPdLiCbaCDDIMD4Kw4cIB6P1+1ilCowBxIO6a48Ng1kK5A9/oEHaHeFUs4T7IvqSgMZYBSQ9JYP1kBWyh3aj2zTQIaDgKi3dMBgtwtRqkBpP7KtoAPZnqE3Emjz6ggLpdyiLWRbQQcyEARKgYSnqEwDWSl3aAvZVuiBPBhIe0sHlIjPr1d6lXJHr2bIiogRkUc7/dsnIltE5MVunjdl2zEiMlVEpu1bub0nIhNF5Ozujiv0QB4EiKekvMTtQpQqYAN6eXwbMEFEtv3cfhn4vDcnMMZUG2Nu7+Xr7o+JgAZyN6xALgrqgkIqp6TShqPva+XcJ6IAvLEiyTH3tTLxL62c9FAby+rTXT5v8aYUJzzYxuH3tHLEva3Ek4b2pOGrj7Ux4Z5W7pnbsf3Y770QY8GGlBNvp5xwSHr5nJeBc+zblwJPbntARCaLyCwRWWB/3m0BIxG5SkTutm8/LCJ/so9dISIX2fd7ROQeEVkqIi+KyEudHrtBROaKyEcicr+IiH3/2yLyWxGZIyKficjJIhIAbgYuFpGFInLxnt5UoQdyOZDwFAW1u0LllLtqOhg/aMeP7w+nx3n8whIW/qCMy47w8+uZ7bs9J5k2XPHPGH85p5il15Tx9pVB/B54dXmSScO9LP5hKfd/aAXyoo0p0gaOHu7IXCkv0K+Xz3kKuEREirHWMq/p9NinwCnGmKOBG4Bbe3C+4cBJwLnAtpbzhcBorKUVvgOc0On4u40xxxljJgAl9vO28RljJgM/AW40xnTYdTxtjJlojHl6T0UUeiCXASnxl2gLWeWMdc1ppkeSfOeYwPb7RKC53QDQFDeM6Ld7g3PG8iRHDvVy1DArZCuCHrwewe+BWBKSnRrVv3qrnZtPc/THItSbg40xi7HC8lLgpS7O9XcR+Qj4I3B4D075vDEmbYz5GBhq33cS8Hf7/o3AW52OP01EakRkCXD6Lq/xT/vzh3aNPebrzcF5qBRIir9IA1nljJ+8Eud3ZxTT0mG23/fX84o5+4kYJT4oLxI++E7pbs/7rC6NCJz5WBtb2gyXTPBz3YlFfHmMj0cXJ6j6axvXnVhEdW2CScO9jOjnaHttX/axrAb+AEwBOq9DcwvwljHmAhEZDbzdg3N1/pNCdvm8E7tVfg9wrDFmrYiEgc5/ZW87V4peZmyht5CDQMqjgaxyxIufJRhSKkwasXNXwh8/6OCly0pYd20/vj3Rz7Wvxnd7bjIN761J8viFJbx3dSnPfZrkjRVJfB7hia8HWfD9Mr7xBR93ftDBz74Y4NpX41z0TJTq2oQTb21fug0fAm42xizZ5f4QOy7yXbUfNb0HfN3uSx6KFfywo9atIlIGXNSDc7XQg26ZQg9kq4Xs00BWueH9NSmqa5OMvrOFS56N8ebKJOc8EWXRphRVI63G2MUT/Mxau/vFuJHlHk49yMegoIegXzh7rI/5u1y0u2duB1ce5Wf22hQBLzx9UUmX/dEZ0OufQWPMOmPMXV089DvgNhF5H6t/el/9A1gHfATch9VP3WSMaQQeAJYAzwNze3Cut4AvdHdRr9C7LIJAUnwBDWSVE247o5jbzrAaaG+vSvKHWR08f0kJw/7Qymd1KQ6t8PLa8iTjB+/e1jpzjI/fvd9ONGEIeOGd1Ul+evyOfuiGmOHFSJIZVwSprk3iEatvOp505K31uIVsjNmte8MY8zZ214QxZjZwaKeHf9XFMQ8DD9u3r+rq/MaYtIj83BjTKiIVwBysEMYYcz1wfRd1TOl0eyt2H7Ixph44rrv3VuiBXAI0OBXI6+69Gk+gBDwexONl+JV30vbpezS99wSJurUM+7c7KBq+51mkJp1iw99+iq9fBUMuuhGALS/8nsSW1ZSMOY4Bp14JQOP7TxIYcjDByuOdeFs5LW0w3R+V3Xwe4YHzivn6MzE8AgOKhYe+Zg3Rra5NMG99iptPK2ZAiXDtCQGOe6ANAc6u9HHOof7t57n5nXauP7kIEeHMsT7+PLeDI+5t4weTAnt45T6VrY2iF0XEWjMdbrEv7mVMwQayvY5FCbAVr8/f3fF9Zeilt+IN7rigHBh0EIMv+C/qXr272+e2zKvGX3EgpsMae9qxeSUAI66+m42PX0e6vY10op2ODZ/R/8RLM/MG8kxLB1G3a9hXU0b7mDLa+hG+YLyfC8bv/m08dZyfqeN23H/FkQGuOLLrgP3jV3c0Uot9woxv7X5hMIMc6Rfprc4tXidkrA9ZRFJ2f8kiEZkvIl/cj3PdLCJn9GV9WL/xAAzptCOj37viH3Qg/oqR3R6XbN5KbMVcyo76yvb7xOPDJDswJo1JJUE8NL37GP1PviKTJWdSuYOvlQakMW5aHXxNtWctbheQDTLZQo4ZYyYCiMiZwG3AqftyImPMDX1ZmM0H9p+r6aQzvWQibH7GeitlE8+i38Sv9vipDW/cT/8pV29vHYMV5r5+g9nw8P+l7PDTSDZsACAwdEzf1u0cJzeZbQH8dVHT5uBrqj3TQMa5LotyoGHbP0TkP4FvYvUbPWeMudEeL/gy1lCTL2INW/maMSYmIg8DLxpjnrUX6LgD2ArMBw4xxpxrjwUchbX7wCjgTmPMn/ZSUwp7nKFxqIU87PLf4etXQaqtkU1PX4+/YiTFB07o9nnRZXPwlPanaNhY4msW7/TYwDO+t/325mdvYuCZP6Zp1tN0bF5J8eiJvQr9LDC0+0P6TAMwYFNbWlvI2UEDmcwOeyuxuyw+Bf6KNVgbEfkK1vqnk7EW3JgkIqfYz6kE/myMORxoBL7e+YT2gOz7gLOMMSdhrdbW2WHAmfa5bxSRvfUNJ9keyM60kH39rLHr3tL+BA89gfb1n/Xoee2ff0wsUsO6e69mS/XviK9ezNYX/rDTMdHIBwSGVWIScTq2rmbw+dNoW/oW6cTu41GzmJOBXA8E1rdol0WW0EDGuS6LE4BHRGQC8BX7Y4F9XBlWEK8BVhpjFtr3dzXt8DBghTFmpf3vJ4HvdXp8ujGmHWgXkc1YP+Dr9lDf9hYyqWTGR76nO+Jg0niKgqQ74sRXLiDUwwtvA069igGnXgVAfM1imuc8x6Dzfr79cZNK0jyvmiEX3UCyYT3bJxgZA6kkOHbJcr852WVRDwQ2t5m6VNqkvB7RDW7dY7BWcCt4jnRZGGNmi8ggrBatALcZY+7rfIzdZdH5SmsKaxTETod181K7Pn+P7y8aqTHByqok4Ekn2jPejExFG9nyz19b/0inKf3CqZQcMonoZ7Oof+0+UrEmNj97E4EhBzP04ltIttRR98qfGPqNm7o9d8v86ZRN+BIefzH+wQcDhvUP/oiSMcfiKd6XGamucbKFXId9YTeWpK0s4OgFRbWzVsJNOT/8sC84EsgichjWjJk64FXgFhF53B5wfQDQ0xbqp8AhIjLaGLMK2OOMlx5qBzwmEc94IPv7D2PE1bsPbQse+kWCh+4+AMXXr6LLMC4edSTFo47c6b7y4762/baIMHjqdX1QsSucDORm7Iu6sYRpLQuIBrJ7tLvClslALhGRbd0PAlxpjEkBM0RkPDDbXkK0FbgCq0W7V/YFvmuAV0RkK9bMmf0RBXzpjlhOdbTmMSe7LNqwhr7RlqBNd7h1lQayLWOBbIzZY5+cPf+8qznoEzod84dOt6/qdMxbxpjD7AWh/wzMs48J7/Ia3Q9fsH4Z9DMdUQ3k7OBkC3n7xbzWDr2w57LNbheQLXJxcaHv2i3vpVirOt3XzfF70wb4Um2NekEhOxSPnjbdqa6DVuxrEi3tGsgui7hdQLbIuanTxpg/Yi063RdaAF+ifl29Mem0iCcXf0Hlm6FY/buZ1oYdyE3teoXfZT0b/1kACj2A6oEA6VTadMQa3S5GAc71I8ex+pA9DTFtIbtMA9lW6IG8HvuvhHS8rc7lWpTFkX7k6tqEwZp8FKiL6Ww9l2mXha3QA7kO+0p7KtakgZwdnLyw1wj4N7fpehYuSgPL3C4iW2gg2/2IqdYGDeTs4PhsPZ0+7aq1hJt0lJOt0AN524JHkmrZooGcHRxfz2Jzm4mn0sa1JVgLnHZXdFLQgRyN1CSxVo0rTtSv10DODk5Pn/aDNX3awddVO+gFvU4KOpBt64BgsnFDs0mnHNleV+2Vk10W24fXxRLabeGSpW4XkE00kGE19iJG6fa2epdrUc62kNuw17NoS6CB7I6ZbheQTTSQYSP21yHV1rjB5VqU89OnDUBrh460cJoxpg5tIe9EA7nT0LdE3dqV3RyrMq949LTpoe4P6xM6fdpFIjJTl93cmQbyjhaytK9busrlWpTFqVZyp+nTGsgueMftArJNwQdyNFLTBqwFypKNG5vT7dGG7p6jMs6pQI5jLfvqaYjpKAsXaCDvouAD2TYfa+U4ks1btNvCfY6MtLCnTzeh06cdZ4xpABZ3e2CB0UC2fIb9p2uibs0qd0tROD99OrCpVbssnCQi7xJuSrtdR7bRQLasxgpkaV/38SqXa1HOz9bzf96ioywcpt0VXdBABqKRmihWKPdLNm1qSbfrym8uc3yz061RnT7tsNfdLiAbaSDvMB+snYeTTZtXuVtKwXN6gaFt06e128IBxphPCDdp/3EXNJB3iGD3I3dsXb3C5VoKnVu7T2u3hQNE5Em3a8hWGsg7rLY/S2zZnM9MKtnhajWFzZXNTnX6tGOecLuAbKWBbItGamLACiBkEvFkomH9x27XVMBcWc9Cp09nXtqYDwk3LXe7jmylgbyzd7DHI7evXaJ9XO5xevo0oNOnneARedztGrKZBvLOFmNvfBlbOX9VOhF3Yvdj1TWnWsmt2D8HOn06s4wxaeApt+vIZhrInUQjNc3AIqACY0xiy+olbtdUwJwaadEOJNHp0xlnYCbhJl1RcS80kHc3E3t95NiKDxe5XEshc3L36WYgsDWq06czySOiF/O6oYG8u0+ADsDfsWnZllSseaPbBRUoJy/sNWDtraeBnCFpY1qBp92uI9tpIO8iGqlpB94HBgN0bFyurWR3ODk5pAGdPp1RxvAXwk16TaYbGshdm409eysamb3EmLQuguI8nT6dJ9LGJL0eudPtOnKBBnLXVmC1moKplq1tyYYNOibZeU4G8lZ0+nTGJFI8Q7jpc7fryAUayF2IRmrSwJtABUC09v333a2oIDnZZbF9bz2dPt23jDGmyCe3ul1HrtBA3rPZ9mdv+/pPNyabt+r6Fs7S6dN5oD3FG4SbdCPTHtJA3oNopKYO6+LeEIDY8jnaSnaWW7tPayD3oWKf/NrtGnKJBvLezQACgMRWzFuRijatd7ugAlI0etr0/g691vZuipZ27bLoK+1Js5Bwky5E3wsayHsRjdSsw5pOPRggumzO264WVHgcnz7dGNcWcl/xe7nJ7RpyjQZy914EggCxyOxIKtqoV4ud41Qg75g+rYHcJ9o6zFzPTc3Pu11HrtFA7t4y4GN2tJL1TzDnODl92tp9OqpdFvsrbYwR4Xtu15GLNJC7EY3UGOA5oBQgFvkgkmprXOduVQXDyaFvjej06T5RHzN/D/6meaHbdeQiDeSeWQYsxR5x0frRGy8bY4y7JRUEx9ezWNesgbw/OlImWuqXa9yuI1dpIPdY4OjhAAAOKElEQVRAp1ZyEJD2dUvXJ7as+tDlsgqB09On/XUx067Tp/ddU9zcWvKbZt21fR9pIPfccuADYARA87znX08n2rW/MbOc7LLQ6dP7qbXDrB1c6vmt23XkMg3kHrJbyc9g7ShSnI61tMeWz33V5bLynSuz9aIJ7bbYF+1Jcw3hpqTbdeQyDeReiEZq6rHWdB0G0Lb0zSXJlrqV7laV15wO5DRANKE7h/RWfcy8U/G7lhfdriPXaSD33kxgDfbCQy0LXppu0mntc8wMV1rIOn26d6IJ05pMm2+6XUc+0EDupWikJgk8DPQDvIktK+vaN9TOcreqvOX09GkBaNbNTnvMGMMnW9L/MeT3LZvdriUfaCDvg2ikZgXwOjAcoOXD6pnp9miDu1XlLSenTwtAU1wnh/TUZ3XpFyfd3/qw23XkCw3kffc8EAdKTaI92frRG//SsckZ4fzu0zp9ukfqoumNCzamL3G7jnyigbyPopGaVuBR7Mki8VULVrevXfKmu1XlJcenT2+NaiB3J5EyycWb0hdf8mxU/5roQxrI+2cusBA4AKB57vPvJZs2R9wtKe+4sfu0hkw3lm5J33Xa39pmul1HvtFA3g/2Vk8PYvU/9gdonPXUc+mOWJOrheUXp3efDnyu06f3ak1TevENb7Vf53Yd+UgDeT9FIzXNwN1ACAiko42xlgUvPaND4fqMky3keiBQFzPtybTRCQ5daIybpiWbUlOraxO6E3sGaCD3gWikZjnwODASe62L+Kr5M1wuK184vfu0DyCe1Mkhu4onTcfrK5KXnvNEdLXbteQrDeS+8ybWWhcjAVoWvDQn0bBeN3fcf04Gcsu2Gzp9emeptDEv1CZvveiZ6Mtu15LPNJD7iN2f/DesVcMqAJpmPVWdjrfpylf7x8k+5Dbs6dNtHbrAUGdvrkw9+ejihG5YmmEayH0oGqlpw+pPDgLF6XhrR1PNs0+kE+36w73vhjj4Wp2nT2uXhW3W2uTM/5nT8Z3q2oReF8kwDeQ+Fo3UrAH+F2uZTk9i6+r65jn/fNQkE3GXS8tVTk6f3j5br0XXswBg4cbU0tvf67igujYRc7uWQqCBnBnvA68ABwHSsTGyuXn+i4+bdDLhcl25yqluizbsnwmdPg2RutTqO2a3n1tdm6h3u5ZCoYGcAfbayU8D7wKjAWlfu2Rd66IZT+lwuH3i5O7TCcBTHyvsFvKKhvT6P8/tmPrIosQqt2spJBrIGRKN1KSwLvLNAw4EiK2Yt6Lt47ef1TUves353acLOJA/3Zpa+5uZ7Rfd+UHHYrdrKTQayBkUjdQkgPuBT7GHw0Vr3/s0+tnsf2km94rjs/UKdfr0oo2pleG327/94IKO2W7XUog0kDMsGqlpxxp5sRp7P762j15fFF85/xVXC8stjs/WK8Tdp+d8nvzs5nfar3rqo8QbbtdSqDSQHRCN1ESBu4At2K29lgXTa2KrFszQlnKPOB7I9QU2fXrm6uRHv57Zcfk/PknogkEu0kB2iL3mxX9jzQYbDNDy4Quzo7XvP2dMWtcF2DsnuyzqsHefLpTp0zOWJ+f/YVbHZdW1iXlu11LoNJAdZG+S+nsgxo6NUhe3Lp7xhEklO1wtLrs5PX3a3uw0v7st0saYf32amH33nI7LqmsTS9yuR2kgOy4aqdkM3IrVfTESILZszvLmef/6WzrRXhAtsn3g9GanBvJ7+nQsYeJ/ntMx/cEFiSuqaxO1btejLBrILrBbyr8FItiTR9rXLV3f+O6jD6RiullkF5wM5O2/FPN1+vSGlvTmaa/Hn3xtReqa6trECrfrUTtoILvE3gLqTmAO1uQRb7JhfVPDmw88qLuO7CYwetr0AQ691vZWcT7uPj3381TtT16J37ey0fyiujax1u161M40kF1kD4m7H6jGaikXpeOtHfVv3Pdk+4aIjgPdmZO7T3sBmvIokJNpk3xsccesW2a23xZL8pvq2sQWt2tSu9NAdpk9o++fwH1YF/rKMcY0zXpyRsvi155IJzvy8s/mfeBUIHdgTZ/2NsTyo8uiKW6ab3mn/cVnliavAx6prk20u12T6poGchaIRmpMNFLzPnA7UIwdPrHI7EjDmw/cq10YgEND3+zp042APx92n/54S2rlT1+NP7JgY/on1bWJ9+33p7KUBnIWiUZqaoEbgTVY/cr+VEtdW/3rf3kiunzuSyadKpiJCl1wfPfpTW25G8itHab5nrkdb057vf1PW6Pmv6prE7rtUg7QQM4y0UjNFqyxys9gTbUeCNC68OW5Te8/eV8q1rzRzfpc5Hggr2/JvS6LtDFmzufJJT94MfbCK8uSdwL/U12baOn2iSor+NwuQO0uGqlJAi8FK6s+AX4AjALWdWxesbX+1T//tbzq66cHhlWeICLibqWOcnq23vbp0z6P5MTPSX0sveXeuYm5NZ+nPgQeqq7VpTNzjbaQs1g0UrMSCAPvYHVhlJlUItU066nXWhZMfyQVby2kK+VOr2fhB4glsn9ySCptUjOWJ+d8tzpeXfN56i7gZg3j3JQTv/kLWTRSEwtWVj0CLAK+C5QD6+Mr56+Kr1p4b9lRZ04qOeio08QXCLpbaca5Mn06ljRt/YrEqS2kem1ZfXrZffM6PqqtS78HPFZdm9jkdk1q32kg5wB7B5KFwcqq64GrgIlAPSbd1Lrw5XnR2veW9Dv6nFMCQ8dWicfjdbXYzHGyy2Ln6dOlDr5yD61uTEceXtix9MMN6Q3AI8Cc6tqELlKV4zSQc0g0UtMQrKy6EzgCuBw4GNiQjrXEm2Y99Zq/YtS8sqPO/LJ/wPDx7laaEU7uPt15+nRWdVmsa06veHRRYuHsdakW4D3g79W1iSa361J9Q3Q93twUrKzyAycD3wCKgM+BFEDx6KMPKh1/6pneYPlwF0vMhIGrbj+nIdMvMnWcvwJrrZF1vzgxcNqJo3ynZPo1u7O+Jb36iSWJhTNXpxqB+cDz1bWJNW7XpfqWtpBzlL091JvByqq5wDnAV7BmmW2Kr1qwOr564QOl46ccXnzQkV/0BkP5EszDsIakZVor23afdnH6tDGG9S1m5bMfJ5a8sTLVACwGnquuTax0qyaVWRrIOS4aqWkBngpWVr0DfBM4BmjCmPq2j9/6qO3jtz4qPuioUSVjJh/v6z/ssBwfKjcU+MSB1+kAkljTpx0P5FjCtC3ZnFr496XJlbV16TSwFPgHsEJn2uU3DeQ8EY3UbAhWVv0JOAy4GGuxonZgc3z1ojXx1YvW+AeO7B8cf3JVYPDBR4vXV+RqwfvGsd2np47zb5s+7cjkEGMM65rN8ndWJxc+90myIZHGB3yGFcQRDeLCoIGcR+zRGJ8EK6tuAsYAZwKTsIZwbUrUr2tsev/JVz3FZW+VfmHK0UUjDqvyFAWdWtayLzi9+/TgTE+fjiZM66KNqQXPfpyMROrTgtUyfw94F1ilQVxYNJDzkB3My4BlwcqqIcApwJeBALA1HW9ta5n/Yk3LgulzSsYeP7Z45PgjfKFh48TrC7hZdw84PX36gM+b0819feL6mNkcqUtFPliXWvH2qlR7yuDDuij7CrCwujYR7evXVLlBAznP2VtGPRusrHoJmAych9Wd0Yox9bHI7EgsMjsi/mJfydjJhxYNHzfBFxpSKR5vNn5vOBnIdUCgIU7H/k6fTqRMYl2zWbF0SyryxorkiuUNxguUYvVVv4PVIl6rrWGVjT90KgOikZoo8Hawsupd4HCs7oxt45WbTSLeFP1k5sfRT2Z+LP5iX8nBx4wJDKsc5+s/7FCPvyhbpkY4Hcjbp0/3K6LHs/WSaZOoi5pN65rNug83pJa9viK5OZ6kHGvkRhD4CJgNLKmuTcQyUbzKTRrIBcZeEH8xsDhYWdUfmACcCoy1D2kxiXhj9LNZtdHPZtUCFB044YDA0DEH+cqHjPSWDRjp8Rf3c6d6d2brRROmdU/Tp+NJE93SZjZsaE1vXNlgNi7dktq4eFO6Pm3oD5TZ5zDAq1hBvKK6NqE7jKsuaSAXsGikphHrz+X3gpVVFVgt5i8C4wDBGqXR2L72o8/b1370+bbn+ULD+gWGV470DxhxgLd88EhvsHyEeHx+B0p2evfpNMCmNrMxbdLplg7T3BSnuSFuWtY1p+sWbkxtXNVo2rC6H8qwWtTFWMumfgrUYI2U2KzdEaondKae2k2wsqoUOBQ4FmuadueWXivW4jup7U8QjwSGjR0SGHTQCE8wFPIUl5Z7AqUhT6C4XPzF5X14sbBj1e3nODJcb+o4/yjgBmCdfZdgXRQtwfp6eLAD2z4mAqwANgAbq2sTcSfqVPlFA1ntVbCySoABwAFYa2d8AWtI3bZFjDqwAjpO55DuxFNSXuwLDSn3llWUe4P9yz1FwVLE40FEEOs/iFj/tm+LiJh0OmUS8Vgq2ljsKx88vWjEYbOBt1fdfk7Gv2mnjvMPxNoowGCFsQdra6cNWK3eNfbtLdW1iULeyUX1IQ1k1WvByiofVvfBSKzujUOBwVh/sm9rNW5ba7sDq+uj3b697RvO7HJ7Vz77fH6sLoCHopGaGX36RroxdZz/IKz30wy0VtcmuvyFo1Rf0UBWfcJuSRcDIaw1m8uB/ljBPQQrsENYQe1hR6tz28e2lui2b8gYVsu7BWgCXo5GapY59HaUcoUGsnKdHeadP4w9GkSpgqKBrJRSWUL31FNKqSyhgayUUllCA1kppbKEBrJSSmUJDWSllMoSGshKKZUlNJCVUipLaCArpVSW0EBWSqksoYGslFJZQgNZKaWyhAayUkplCQ1kpZTKEhrISimVJTSQlVIqS2ggK6VUltBAVkqpLPH/AS9LGSbubnUeAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "labels = \"Benign\" , \"Malingant\"\n",
+ "sizes = [x0 , x1]\n",
+ "explode = [0,0.3]\n",
+ "plt.pie(sizes , explode=explode , labels=labels , shadow=True , startangle=90 , autopct='%1.1f%%')\n",
+ "plt.axis(\"equal\")\n",
+ "plt.title(\"Freq of Diagnosis\")\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "data1 = data.loc[data[\"Severity\"]== 0]\n",
+ "y_Val1 = data1.Age.values.reshape(-1,1)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "data2 = data.loc[data[\"Severity\"]== 1]\n",
+ "y_Val2 = data1.Age.values.reshape(-1,1)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "x = data.iloc[:,0:5]\n",
+ "y = data.iloc[:,5]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from sklearn.model_selection import train_test_split\n",
+ "X_TRAIN , X_TEST , Y_TRAIN , Y_TEST = train_test_split(x, y, test_size=0.2)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 15,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "(664, 5)"
+ ]
+ },
+ "execution_count": 15,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "X_TRAIN.shape"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 16,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "(166, 5)"
+ ]
+ },
+ "execution_count": 16,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "X_TEST.shape"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## PRE PROCESSING OF DATA"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from sklearn import preprocessing\n",
+ "scaler = preprocessing.StandardScaler()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 18,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "F:\\Anaconda\\lib\\site-packages\\sklearn\\preprocessing\\data.py:645: DataConversionWarning: Data with input dtype int64 were all converted to float64 by StandardScaler.\n",
+ " return self.partial_fit(X, y)\n",
+ "F:\\Anaconda\\lib\\site-packages\\sklearn\\base.py:464: DataConversionWarning: Data with input dtype int64 were all converted to float64 by StandardScaler.\n",
+ " return self.fit(X, **fit_params).transform(X)\n",
+ "F:\\Anaconda\\lib\\site-packages\\ipykernel_launcher.py:2: DataConversionWarning: Data with input dtype int64 were all converted to float64 by StandardScaler.\n",
+ " \n"
+ ]
+ }
+ ],
+ "source": [
+ "x_train = scaler.fit_transform(X_TRAIN)\n",
+ "x_test = scaler.transform(X_TEST)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 19,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from sklearn.decomposition import PCA"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 20,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "pca = PCA()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 22,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X_train = pca.fit_transform(x_train)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 23,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X_test = pca.transform(x_test)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 24,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "5"
+ ]
+ },
+ "execution_count": 24,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "total = sum(pca.explained_variance_)\n",
+ "k = 0\n",
+ "cur_var = 0 \n",
+ "while cur_var / total < 0.999:\n",
+ " cur_var += pca.explained_variance_[k]\n",
+ " k = k+1\n",
+ "k"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 25,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "pca = PCA(n_components=k)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 26,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "x_train_pca = pca.fit_transform(x_train)\n",
+ "x_test_pca = pca.transform(x_test)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## SVM"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 33,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from sklearn.metrics import confusion_matrix"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 27,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from sklearn import svm"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 28,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "clf1 = svm.SVC(kernel='rbf')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 29,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0,\n",
+ " decision_function_shape='ovr', degree=3, gamma='auto_deprecated',\n",
+ " kernel='rbf', max_iter=-1, probability=False, random_state=None,\n",
+ " shrinking=True, tol=0.001, verbose=False)"
+ ]
+ },
+ "execution_count": 29,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "clf1.fit(x_train_pca,Y_TRAIN)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 30,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "pred2 = clf1.predict(x_test_pca)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 31,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "0.7951807228915663\n"
+ ]
+ }
+ ],
+ "source": [
+ "print(clf1.score(x_test_pca,Y_TEST))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 34,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "array([[65, 14],\n",
+ " [20, 67]], dtype=int64)"
+ ]
+ },
+ "execution_count": 34,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "confusion_matrix(Y_TEST, pred2)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## DEC TREE"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 35,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from sklearn.tree import DecisionTreeClassifier"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 36,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "clf = DecisionTreeClassifier()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 37,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "DecisionTreeClassifier(class_weight=None, criterion='gini', max_depth=None,\n",
+ " max_features=None, max_leaf_nodes=None,\n",
+ " min_impurity_decrease=0.0, min_impurity_split=None,\n",
+ " min_samples_leaf=1, min_samples_split=2,\n",
+ " min_weight_fraction_leaf=0.0, presort=False, random_state=None,\n",
+ " splitter='best')"
+ ]
+ },
+ "execution_count": 37,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "clf.fit(x_train_pca,Y_TRAIN)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 38,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "pred3 = clf.predict(x_test_pca)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 39,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "0.9518072289156626\n",
+ "0.7469879518072289\n"
+ ]
+ }
+ ],
+ "source": [
+ "print(clf.score(x_train_pca,Y_TRAIN))\n",
+ "print(clf.score(x_test_pca,Y_TEST))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 40,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "array([[63, 16],\n",
+ " [26, 61]], dtype=int64)"
+ ]
+ },
+ "execution_count": 40,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "confusion_matrix(Y_TEST, pred3)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## RANDOM FORR"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 41,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "F:\\Anaconda\\lib\\site-packages\\sklearn\\ensemble\\forest.py:246: FutureWarning: The default value of n_estimators will change from 10 in version 0.20 to 100 in 0.22.\n",
+ " \"10 in version 0.20 to 100 in 0.22.\", FutureWarning)\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ "(0.9412650602409639, 0.7951807228915663)"
+ ]
+ },
+ "execution_count": 41,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "from sklearn.ensemble import RandomForestClassifier\n",
+ "clf = RandomForestClassifier()\n",
+ "clf.fit(x_train_pca, Y_TRAIN)\n",
+ "clf.score(x_train_pca,Y_TRAIN),clf.score(x_test_pca,Y_TEST)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 42,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "pred4 = clf.predict(x_test_pca)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 44,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "array([[66, 13],\n",
+ " [21, 66]], dtype=int64)"
+ ]
+ },
+ "execution_count": 44,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "confusion_matrix(Y_TEST , pred4)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "## Boosted Trees"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 76,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from sklearn.ensemble import GradientBoostingClassifier"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 80,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "0.7831325301204819\n",
+ "0.911144578313253\n"
+ ]
+ }
+ ],
+ "source": [
+ "clf = GradientBoostingClassifier(n_estimators=100, learning_rate=1.0,max_depth=1, random_state=0).fit(x_train_pca, Y_TRAIN)\n",
+ "print(clf.score(x_test_pca, Y_TEST)) \n",
+ "print(clf.score(x_train_pca , Y_TRAIN))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 81,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "pred_6 = clf.predict(x_test_pca)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 82,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "array([[65, 14],\n",
+ " [22, 65]], dtype=int64)"
+ ]
+ },
+ "execution_count": 82,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "confusion_matrix(Y_TEST , pred_6)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Artificial Neural Network"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 47,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X = data.iloc[:, 0:5].values"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 48,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "y = data.iloc[:, 5].values"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 49,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "array([[ 5, 67, 3, 5, 3],\n",
+ " [ 5, 58, 4, 5, 3],\n",
+ " [ 4, 28, 1, 1, 3],\n",
+ " ...,\n",
+ " [ 4, 64, 4, 5, 3],\n",
+ " [ 5, 66, 4, 5, 3],\n",
+ " [ 4, 62, 3, 3, 3]], dtype=int64)"
+ ]
+ },
+ "execution_count": 49,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "X"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 50,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "array([1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0,\n",
+ " 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0,\n",
+ " 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 0,\n",
+ " 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 1,\n",
+ " 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0,\n",
+ " 1, 0, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0,\n",
+ " 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0,\n",
+ " 1, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0,\n",
+ " 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0,\n",
+ " 1, 0, 1, 0, 1, 0, 0, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1,\n",
+ " 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 0, 1,\n",
+ " 0, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0,\n",
+ " 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1,\n",
+ " 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1,\n",
+ " 0, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0,\n",
+ " 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1,\n",
+ " 0, 0, 1, 1, 1, 0, 0, 0, 1, 1, 1, 0, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0,\n",
+ " 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0,\n",
+ " 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0,\n",
+ " 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 0, 1, 1,\n",
+ " 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0,\n",
+ " 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0,\n",
+ " 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0,\n",
+ " 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 0, 1, 0,\n",
+ " 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0,\n",
+ " 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0,\n",
+ " 0, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0,\n",
+ " 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 0,\n",
+ " 0, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1,\n",
+ " 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0,\n",
+ " 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1,\n",
+ " 0, 1, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1,\n",
+ " 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1,\n",
+ " 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0,\n",
+ " 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1,\n",
+ " 1, 1, 0, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1,\n",
+ " 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1,\n",
+ " 0, 0, 0, 1, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 0], dtype=int64)"
+ ]
+ },
+ "execution_count": 50,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "y\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 51,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from sklearn.model_selection import train_test_split"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 52,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 53,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from sklearn.preprocessing import StandardScaler"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 54,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "sc = StandardScaler()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 55,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "F:\\Anaconda\\lib\\site-packages\\sklearn\\utils\\validation.py:595: DataConversionWarning: Data with input dtype int64 was converted to float64 by StandardScaler.\n",
+ " warnings.warn(msg, DataConversionWarning)\n",
+ "F:\\Anaconda\\lib\\site-packages\\sklearn\\utils\\validation.py:595: DataConversionWarning: Data with input dtype int64 was converted to float64 by StandardScaler.\n",
+ " warnings.warn(msg, DataConversionWarning)\n"
+ ]
+ }
+ ],
+ "source": [
+ "X_train = sc.fit_transform(X_train)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 56,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "F:\\Anaconda\\lib\\site-packages\\sklearn\\utils\\validation.py:595: DataConversionWarning: Data with input dtype int64 was converted to float64 by StandardScaler.\n",
+ " warnings.warn(msg, DataConversionWarning)\n"
+ ]
+ }
+ ],
+ "source": [
+ "X_test = sc.transform(X_test)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 57,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "F:\\Anaconda\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
+ " from ._conv import register_converters as _register_converters\n",
+ "Using TensorFlow backend.\n"
+ ]
+ }
+ ],
+ "source": [
+ "import keras"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 58,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from keras.models import Sequential"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 59,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from keras.layers import Dense"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 60,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "classifier = Sequential()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 61,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "classifier.add?"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 71,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "Dense?"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 62,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "classifier.add(Dense(units = 3, kernel_initializer='glorot_uniform', activation='relu', input_dim=5))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 63,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "classifier.add(Dense(units = 3, kernel_initializer='glorot_uniform', activation='relu'))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 64,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "classifier.add(Dense(units = 1, kernel_initializer='glorot_uniform', activation='sigmoid'))"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 65,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "classifier.compile?"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 66,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "classifier.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 67,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "F:\\Anaconda\\lib\\site-packages\\ipykernel_launcher.py:1: UserWarning: The `nb_epoch` argument in `fit` has been renamed `epochs`.\n",
+ " \"\"\"Entry point for launching an IPython kernel.\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Epoch 1/100\n",
+ "664/664 [==============================] - 1s 2ms/step - loss: 0.6831 - acc: 0.5873\n",
+ "Epoch 2/100\n",
+ "664/664 [==============================] - 0s 209us/step - loss: 0.6425 - acc: 0.6883\n",
+ "Epoch 3/100\n",
+ "664/664 [==============================] - 0s 207us/step - loss: 0.6059 - acc: 0.7410\n",
+ "Epoch 4/100\n",
+ "664/664 [==============================] - 0s 206us/step - loss: 0.5807 - acc: 0.7651\n",
+ "Epoch 5/100\n",
+ "664/664 [==============================] - 0s 207us/step - loss: 0.5608 - acc: 0.7816\n",
+ "Epoch 6/100\n",
+ "664/664 [==============================] - 0s 209us/step - loss: 0.5455 - acc: 0.7816\n",
+ "Epoch 7/100\n",
+ "664/664 [==============================] - 0s 212us/step - loss: 0.5325 - acc: 0.7877\n",
+ "Epoch 8/100\n",
+ "664/664 [==============================] - 0s 225us/step - loss: 0.5186 - acc: 0.7937\n",
+ "Epoch 9/100\n",
+ "664/664 [==============================] - 0s 207us/step - loss: 0.5045 - acc: 0.7967\n",
+ "Epoch 10/100\n",
+ "664/664 [==============================] - 0s 216us/step - loss: 0.4912 - acc: 0.8117\n",
+ "Epoch 11/100\n",
+ "664/664 [==============================] - 0s 227us/step - loss: 0.4789 - acc: 0.8163\n",
+ "Epoch 12/100\n",
+ "664/664 [==============================] - 0s 221us/step - loss: 0.4680 - acc: 0.8133\n",
+ "Epoch 13/100\n",
+ "664/664 [==============================] - 0s 219us/step - loss: 0.4572 - acc: 0.8148\n",
+ "Epoch 14/100\n",
+ "664/664 [==============================] - 0s 228us/step - loss: 0.4489 - acc: 0.8178\n",
+ "Epoch 15/100\n",
+ "664/664 [==============================] - 0s 231us/step - loss: 0.4409 - acc: 0.8193\n",
+ "Epoch 16/100\n",
+ "664/664 [==============================] - 0s 224us/step - loss: 0.4333 - acc: 0.8193\n",
+ "Epoch 17/100\n",
+ "664/664 [==============================] - 0s 218us/step - loss: 0.4273 - acc: 0.8208\n",
+ "Epoch 18/100\n",
+ "664/664 [==============================] - 0s 216us/step - loss: 0.4230 - acc: 0.8163\n",
+ "Epoch 19/100\n",
+ "664/664 [==============================] - 0s 233us/step - loss: 0.4191 - acc: 0.8223\n",
+ "Epoch 20/100\n",
+ "664/664 [==============================] - 0s 279us/step - loss: 0.4157 - acc: 0.8193\n",
+ "Epoch 21/100\n",
+ "664/664 [==============================] - 0s 270us/step - loss: 0.4127 - acc: 0.8238\n",
+ "Epoch 22/100\n",
+ "664/664 [==============================] - 0s 284us/step - loss: 0.4100 - acc: 0.8223\n",
+ "Epoch 23/100\n",
+ "664/664 [==============================] - 0s 218us/step - loss: 0.4077 - acc: 0.8268\n",
+ "Epoch 24/100\n",
+ "664/664 [==============================] - 0s 224us/step - loss: 0.4054 - acc: 0.8283\n",
+ "Epoch 25/100\n",
+ "664/664 [==============================] - 0s 225us/step - loss: 0.4031 - acc: 0.8298\n",
+ "Epoch 26/100\n",
+ "664/664 [==============================] - 0s 225us/step - loss: 0.4010 - acc: 0.8298\n",
+ "Epoch 27/100\n",
+ "664/664 [==============================] - 0s 231us/step - loss: 0.3991 - acc: 0.8313\n",
+ "Epoch 28/100\n",
+ "664/664 [==============================] - 0s 216us/step - loss: 0.3971 - acc: 0.8313\n",
+ "Epoch 29/100\n",
+ "664/664 [==============================] - 0s 233us/step - loss: 0.3951 - acc: 0.8313\n",
+ "Epoch 30/100\n",
+ "664/664 [==============================] - 0s 218us/step - loss: 0.3942 - acc: 0.8328\n",
+ "Epoch 31/100\n",
+ "664/664 [==============================] - 0s 287us/step - loss: 0.3920 - acc: 0.8343\n",
+ "Epoch 32/100\n",
+ "664/664 [==============================] - 0s 270us/step - loss: 0.3904 - acc: 0.8389\n",
+ "Epoch 33/100\n",
+ "664/664 [==============================] - 0s 257us/step - loss: 0.3896 - acc: 0.8389\n",
+ "Epoch 34/100\n",
+ "664/664 [==============================] - 0s 261us/step - loss: 0.3877 - acc: 0.8404\n",
+ "Epoch 35/100\n",
+ "664/664 [==============================] - 0s 264us/step - loss: 0.3860 - acc: 0.8358\n",
+ "Epoch 36/100\n",
+ "664/664 [==============================] - 0s 251us/step - loss: 0.3835 - acc: 0.8328\n",
+ "Epoch 37/100\n",
+ "664/664 [==============================] - 0s 242us/step - loss: 0.3821 - acc: 0.8343\n",
+ "Epoch 38/100\n",
+ "664/664 [==============================] - 0s 216us/step - loss: 0.3801 - acc: 0.8404\n",
+ "Epoch 39/100\n",
+ "664/664 [==============================] - 0s 225us/step - loss: 0.3796 - acc: 0.8389\n",
+ "Epoch 40/100\n",
+ "664/664 [==============================] - 0s 215us/step - loss: 0.3776 - acc: 0.8389\n",
+ "Epoch 41/100\n",
+ "664/664 [==============================] - 0s 221us/step - loss: 0.3768 - acc: 0.8373\n",
+ "Epoch 42/100\n",
+ "664/664 [==============================] - 0s 221us/step - loss: 0.3756 - acc: 0.8328\n",
+ "Epoch 43/100\n",
+ "664/664 [==============================] - 0s 215us/step - loss: 0.3744 - acc: 0.8389\n",
+ "Epoch 44/100\n",
+ "664/664 [==============================] - 0s 218us/step - loss: 0.3740 - acc: 0.8373\n",
+ "Epoch 45/100\n",
+ "664/664 [==============================] - 0s 221us/step - loss: 0.3730 - acc: 0.8373\n",
+ "Epoch 46/100\n",
+ "664/664 [==============================] - 0s 224us/step - loss: 0.3726 - acc: 0.8373\n",
+ "Epoch 47/100\n",
+ "664/664 [==============================] - 0s 221us/step - loss: 0.3724 - acc: 0.8373\n",
+ "Epoch 48/100\n",
+ "664/664 [==============================] - 0s 215us/step - loss: 0.3714 - acc: 0.8373\n",
+ "Epoch 49/100\n",
+ "664/664 [==============================] - 0s 222us/step - loss: 0.3712 - acc: 0.8343\n",
+ "Epoch 50/100\n",
+ "664/664 [==============================] - 0s 224us/step - loss: 0.3710 - acc: 0.8373\n",
+ "Epoch 51/100\n",
+ "664/664 [==============================] - 0s 228us/step - loss: 0.3705 - acc: 0.8373\n",
+ "Epoch 52/100\n",
+ "664/664 [==============================] - 0s 213us/step - loss: 0.3705 - acc: 0.8404\n",
+ "Epoch 53/100\n",
+ "664/664 [==============================] - 0s 212us/step - loss: 0.3699 - acc: 0.8358\n",
+ "Epoch 54/100\n",
+ "664/664 [==============================] - 0s 218us/step - loss: 0.3697 - acc: 0.8373\n",
+ "Epoch 55/100\n",
+ "664/664 [==============================] - 0s 216us/step - loss: 0.3690 - acc: 0.8373\n",
+ "Epoch 56/100\n",
+ "664/664 [==============================] - 0s 204us/step - loss: 0.3689 - acc: 0.8389\n",
+ "Epoch 57/100\n",
+ "664/664 [==============================] - 0s 216us/step - loss: 0.3683 - acc: 0.8389\n",
+ "Epoch 58/100\n",
+ "664/664 [==============================] - 0s 213us/step - loss: 0.3686 - acc: 0.8434\n",
+ "Epoch 59/100\n",
+ "664/664 [==============================] - 0s 206us/step - loss: 0.3680 - acc: 0.8419\n",
+ "Epoch 60/100\n",
+ "664/664 [==============================] - 0s 227us/step - loss: 0.3688 - acc: 0.8389\n",
+ "Epoch 61/100\n",
+ "664/664 [==============================] - 0s 219us/step - loss: 0.3675 - acc: 0.8419\n",
+ "Epoch 62/100\n",
+ "664/664 [==============================] - 0s 228us/step - loss: 0.3678 - acc: 0.8449\n",
+ "Epoch 63/100\n",
+ "664/664 [==============================] - 0s 239us/step - loss: 0.3672 - acc: 0.8419\n",
+ "Epoch 64/100\n",
+ "664/664 [==============================] - 0s 219us/step - loss: 0.3678 - acc: 0.8404\n",
+ "Epoch 65/100\n",
+ "664/664 [==============================] - 0s 278us/step - loss: 0.3672 - acc: 0.8509\n",
+ "Epoch 66/100\n",
+ "664/664 [==============================] - 0s 278us/step - loss: 0.3673 - acc: 0.8464\n",
+ "Epoch 67/100\n",
+ "664/664 [==============================] - 0s 249us/step - loss: 0.3669 - acc: 0.8464\n",
+ "Epoch 68/100\n",
+ "664/664 [==============================] - 0s 249us/step - loss: 0.3670 - acc: 0.8419\n",
+ "Epoch 69/100\n",
+ "664/664 [==============================] - 0s 249us/step - loss: 0.3664 - acc: 0.8479\n",
+ "Epoch 70/100\n",
+ "664/664 [==============================] - 0s 269us/step - loss: 0.3669 - acc: 0.8464\n",
+ "Epoch 71/100\n",
+ "664/664 [==============================] - 0s 243us/step - loss: 0.3661 - acc: 0.8464\n",
+ "Epoch 72/100\n",
+ "664/664 [==============================] - 0s 231us/step - loss: 0.3662 - acc: 0.8449\n",
+ "Epoch 73/100\n",
+ "664/664 [==============================] - 0s 218us/step - loss: 0.3663 - acc: 0.8434\n",
+ "Epoch 74/100\n",
+ "664/664 [==============================] - 0s 227us/step - loss: 0.3666 - acc: 0.8464\n",
+ "Epoch 75/100\n",
+ "664/664 [==============================] - 0s 221us/step - loss: 0.3659 - acc: 0.8464 0s - loss: 0.3574 - acc: 0.846\n",
+ "Epoch 76/100\n",
+ "664/664 [==============================] - 0s 242us/step - loss: 0.3655 - acc: 0.8464\n",
+ "Epoch 77/100\n",
+ "664/664 [==============================] - 0s 228us/step - loss: 0.3658 - acc: 0.8479\n",
+ "Epoch 78/100\n",
+ "664/664 [==============================] - 0s 224us/step - loss: 0.3652 - acc: 0.8464\n",
+ "Epoch 79/100\n",
+ "664/664 [==============================] - 0s 221us/step - loss: 0.3651 - acc: 0.8509\n",
+ "Epoch 80/100\n",
+ "664/664 [==============================] - 0s 218us/step - loss: 0.3652 - acc: 0.8524\n",
+ "Epoch 81/100\n",
+ "664/664 [==============================] - 0s 245us/step - loss: 0.3648 - acc: 0.8464\n",
+ "Epoch 82/100\n",
+ "664/664 [==============================] - 0s 243us/step - loss: 0.3648 - acc: 0.8539\n",
+ "Epoch 83/100\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "664/664 [==============================] - 0s 218us/step - loss: 0.3646 - acc: 0.8494\n",
+ "Epoch 84/100\n",
+ "664/664 [==============================] - 0s 228us/step - loss: 0.3644 - acc: 0.8494\n",
+ "Epoch 85/100\n",
+ "664/664 [==============================] - 0s 207us/step - loss: 0.3641 - acc: 0.8464\n",
+ "Epoch 86/100\n",
+ "664/664 [==============================] - 0s 206us/step - loss: 0.3642 - acc: 0.8524\n",
+ "Epoch 87/100\n",
+ "664/664 [==============================] - 0s 201us/step - loss: 0.3637 - acc: 0.8509\n",
+ "Epoch 88/100\n",
+ "664/664 [==============================] - 0s 228us/step - loss: 0.3637 - acc: 0.8524\n",
+ "Epoch 89/100\n",
+ "664/664 [==============================] - 0s 218us/step - loss: 0.3639 - acc: 0.8554\n",
+ "Epoch 90/100\n",
+ "664/664 [==============================] - 0s 227us/step - loss: 0.3637 - acc: 0.8539\n",
+ "Epoch 91/100\n",
+ "664/664 [==============================] - 0s 212us/step - loss: 0.3636 - acc: 0.8449\n",
+ "Epoch 92/100\n",
+ "664/664 [==============================] - 0s 201us/step - loss: 0.3639 - acc: 0.8524\n",
+ "Epoch 93/100\n",
+ "664/664 [==============================] - 0s 204us/step - loss: 0.3636 - acc: 0.8464\n",
+ "Epoch 94/100\n",
+ "664/664 [==============================] - 0s 206us/step - loss: 0.3637 - acc: 0.8479\n",
+ "Epoch 95/100\n",
+ "664/664 [==============================] - 0s 201us/step - loss: 0.3629 - acc: 0.8494\n",
+ "Epoch 96/100\n",
+ "664/664 [==============================] - 0s 200us/step - loss: 0.3631 - acc: 0.8479\n",
+ "Epoch 97/100\n",
+ "664/664 [==============================] - 0s 204us/step - loss: 0.3634 - acc: 0.8524\n",
+ "Epoch 98/100\n",
+ "664/664 [==============================] - 0s 203us/step - loss: 0.3633 - acc: 0.8524\n",
+ "Epoch 99/100\n",
+ "664/664 [==============================] - 0s 204us/step - loss: 0.3629 - acc: 0.8509\n",
+ "Epoch 100/100\n",
+ "664/664 [==============================] - 0s 204us/step - loss: 0.3631 - acc: 0.8494\n"
+ ]
+ },
+ {
+ "data": {
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 67,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "classifier.fit(X_train, y_train, batch_size=5, nb_epoch=100)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 68,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "y_pred = classifier.predict(X_test)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 69,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "y_pred = (y_pred > 0.5)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 70,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from sklearn.metrics import confusion_matrix"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 71,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "cm = confusion_matrix(y_test, y_pred)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 72,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "array([[75, 15],\n",
+ " [13, 63]], dtype=int64)"
+ ]
+ },
+ "execution_count": 72,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "cm"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 73,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "0.8132530120481928"
+ ]
+ },
+ "execution_count": 73,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "(63+72) / (63+16+15+72)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 85,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ " precision recall f1-score support\n",
+ "\n",
+ " 0 0.76 0.82 0.79 79\n",
+ " 1 0.83 0.77 0.80 87\n",
+ "\n",
+ " micro avg 0.80 0.80 0.80 166\n",
+ " macro avg 0.80 0.80 0.80 166\n",
+ "weighted avg 0.80 0.80 0.80 166\n",
+ "\n",
+ " precision recall f1-score support\n",
+ "\n",
+ " 0 0.71 0.80 0.75 79\n",
+ " 1 0.79 0.70 0.74 87\n",
+ "\n",
+ " micro avg 0.75 0.75 0.75 166\n",
+ " macro avg 0.75 0.75 0.75 166\n",
+ "weighted avg 0.75 0.75 0.75 166\n",
+ "\n",
+ " precision recall f1-score support\n",
+ "\n",
+ " 0 0.76 0.84 0.80 79\n",
+ " 1 0.84 0.76 0.80 87\n",
+ "\n",
+ " micro avg 0.80 0.80 0.80 166\n",
+ " macro avg 0.80 0.80 0.80 166\n",
+ "weighted avg 0.80 0.80 0.80 166\n",
+ "\n",
+ " precision recall f1-score support\n",
+ "\n",
+ " 0 0.75 0.82 0.78 79\n",
+ " 1 0.82 0.75 0.78 87\n",
+ "\n",
+ " micro avg 0.78 0.78 0.78 166\n",
+ " macro avg 0.78 0.78 0.78 166\n",
+ "weighted avg 0.79 0.78 0.78 166\n",
+ "\n",
+ " precision recall f1-score support\n",
+ "\n",
+ " 0 0.52 0.58 0.55 79\n",
+ " 1 0.58 0.52 0.55 87\n",
+ "\n",
+ " micro avg 0.55 0.55 0.55 166\n",
+ " macro avg 0.55 0.55 0.55 166\n",
+ "weighted avg 0.55 0.55 0.55 166\n",
+ "\n"
+ ]
+ }
+ ],
+ "source": [
+ "from sklearn.metrics import classification_report\n",
+ "print(classification_report(Y_TEST,pred2))\n",
+ "print(classification_report(Y_TEST,pred3))\n",
+ "print(classification_report(Y_TEST,pred4))\n",
+ "print(classification_report(Y_TEST,pred_6))\n",
+ "print(classification_report(Y_TEST,y_pred))"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.6.4"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/parthpathak1998@gmail.com/Project/Report AITS.docx b/parthpathak1998@gmail.com/Project/Report AITS.docx
new file mode 100644
index 000000000..fed19803e
Binary files /dev/null and b/parthpathak1998@gmail.com/Project/Report AITS.docx differ