فهرست منبع

Added Cagris cleanup and fix in the LR-tester function.

Kristian Schultz 4 سال پیش
والد
کامیت
2099e9b768
3فایلهای تغییر یافته به همراه7 افزوده شده و 29 حذف شده
  1. 2 2
      library/SimpleGan.py
  2. 1 2
      library/exercise.py
  3. 4 25
      library/testers.py

+ 2 - 2
library/SimpleGan.py

@@ -15,7 +15,7 @@ from library.interfaces import GanBaseClass
 from keras.layers import Dense, Dropout, Input
 from keras.models import Model, Sequential
 from keras.layers.advanced_activations import LeakyReLU
-from keras.optimizers import Adam
+from tensorflow.keras.optimizers import Adam
 
 
 class SimpleGan(GanBaseClass):
@@ -122,7 +122,7 @@ class SimpleGan(GanBaseClass):
 
                 # During the training of gan,
                 # the weights of discriminator should be fixed.
-                #We can enforce that by setting the trainable flag
+                #We can enforce that by setting the trainable flag.
                 self.discriminator.trainable=False
 
                 #training  the GAN by alternating the training of the Discriminator

+ 1 - 2
library/exercise.py

@@ -13,7 +13,7 @@ from sklearn.preprocessing import StandardScaler
 import matplotlib.pyplot as plt
 
 from library.dataset import DataSet, TrainTestData
-from library.testers import lr, svm, knn, gb, TestResult
+from library.testers import lr,knn, gb, TestResult
 
 
 class Exercise:
@@ -47,7 +47,6 @@ class Exercise:
         if self.testFunctions is None:
             self.testFunctions = {
                 "LR": lr,
-                "SVM": svm,
                 "GB": gb,
                 "KNN": knn
                 }

+ 4 - 25
library/testers.py

@@ -12,12 +12,10 @@ from sklearn.linear_model import LogisticRegression
 from sklearn.metrics import confusion_matrix
 from sklearn.metrics import average_precision_score
 from sklearn.metrics import f1_score
-from sklearn.metrics import balanced_accuracy_score
 from sklearn.metrics import cohen_kappa_score
 from sklearn.ensemble import GradientBoostingClassifier
 
 _tF1 = "f1 score"
-_tBalAcc = "balanced accuracy"
 _tTN = "TN"
 _tTP = "TP"
 _tFN = "FN"
@@ -45,13 +43,12 @@ class TestResult:
         *aps* is a real number representing the average precision score.
         """
         self.title = title
-        self.heading = [_tTN, _tTP, _tFN, _tFP, _tF1, _tBalAcc, _tCks]
+        self.heading = [_tTN, _tTP, _tFN, _tFP, _tF1, _tCks]
         if aps is not None:
             self.heading.append(_tAps)
         self.data = { n: 0.0 for n in self.heading }
 
         if labels is not None and prediction is not None:
-            self.data[_tBalAcc] = balanced_accuracy_score(labels, prediction)
             self.data[_tF1]     = f1_score(labels, prediction)
             self.data[_tCks]    = cohen_kappa_score(labels, prediction)
             conMat = self._enshureConfusionMatrix(confusion_matrix(labels, prediction))
@@ -74,8 +71,8 @@ class TestResult:
         tp = self.data[_tTP]
         fn = self.data[_tFN]
         fp = self.data[_tFP]
-        text += f"{self.title} tn, fp: {tn}, {tp}\n"
-        text += f"{self.title} fn, tp: {fn}, {tp}"
+        text += f"{self.title} tn, fp: {tn}, {fp}\n"
+        text += f"{self.title} fn, tp: {fn}, {tp}\n"
 
         for k in self.heading:
             if k not in [_tTP, _tTN, _tFP, _tFN]:
@@ -161,6 +158,7 @@ def lr(ttd):
     logreg = LogisticRegression(
         C=1e5,
         solver='lbfgs',
+        max_iter=1000,
         multi_class='multinomial',
         class_weight={0: 1, 1: 1.3}
         )
@@ -173,25 +171,6 @@ def lr(ttd):
     return TestResult("LR", ttd.test.labels, prediction, aps_lr)
 
 
-def svm(ttd):
-    """
-    Runs a test for a dataset with the support vector machine algorithm.
-    It returns a /TestResult./
-
-    *ttd* is a /library.dataset.TrainTestData/ instance containing data to test.
-    """
-    checkType(ttd)
-    svmTester = sklearn.svm.SVC(
-        kernel='linear',
-        decision_function_shape='ovo',
-        class_weight={0: 1., 1: 1.},
-        probability=True
-        )
-    svmTester.fit(ttd.train.data, ttd.train.labels)
-
-    prediction = svmTester.predict(ttd.test.data)
-    return TestResult("SVM", ttd.test.labels, prediction)
-
 
 def knn(ttd):
     """