Skip to content

Commit

Permalink
Add adam and ridge classifier
Browse files Browse the repository at this point in the history
  • Loading branch information
vishishtpriyadarshi authored Nov 17, 2021
1 parent 8201e16 commit b290306
Show file tree
Hide file tree
Showing 4 changed files with 39 additions and 8 deletions.
17 changes: 16 additions & 1 deletion tests/models/logistic_regression.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,25 @@
from sklearn.linear_model import LogisticRegression
from sklearn.neural_network import MLPClassifier


def logistic_regression(X_train, y_train, X_test):
print("[Executing]: Running Logistic Regression model ...\n")

# Model Fitting
model = LogisticRegression(random_state = 0, solver='lbfgs', multi_class='multinomial')
model = LogisticRegression(random_state = 21)
model.fit(X_train, y_train)

# Predictions on test dataset
y_pred = model.predict(X_test)

return y_pred


def logistic_regression_adam(X_train, y_train, X_test):
print("[Executing]: Running Logistic Regression model with Adam ...\n")

# Model Fitting
model = MLPClassifier(hidden_layer_sizes=(1), solver='adam', random_state=48)
model.fit(X_train, y_train)

# Predictions on test dataset
Expand Down
10 changes: 5 additions & 5 deletions tests/models/parent_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,12 @@ def execute_model(X, y, num_splits, seed, model, with_undersampling = False, maj
print("[Testing]: Count of test data before Undersampling = ", X_train.shape[0])
verdict = undersampling_method.undersample(X_train, y_train, majority_class)

# X_train = X_train[verdict, :]
# y_train = y_train[verdict]
X_train = X_train[verdict, :]
y_train = y_train[verdict]

# In-buit near miss algorithm
nr = NearMiss()
X_train, y_train = nr.fit_resample(X_train, y_train)
# nr = NearMiss()
# X_train, y_train = nr.fit_resample(X_train, y_train)

# Note: Be careful while plotting, make sure same features are being compared
# plt.scatter(X_train[:, 0], X_train[:, 1], marker = '.', c = y_train)
Expand Down Expand Up @@ -66,7 +66,7 @@ def execute_model(X, y, num_splits, seed, model, with_undersampling = False, maj
class1_metrics_list = np.mean(class1_metrics_list, axis = 0)


print("Majority Class = Class ", majority_class, "\n")
print("\nMajority Class = Class ", majority_class)
print("\n--------------- Cross-validated Evaluation Metrics ---------------\n")
print("Accuracy \t= \t", metrics_list[0])
print("Precision \t= \t", metrics_list[1])
Expand Down
13 changes: 13 additions & 0 deletions tests/models/ridge_classification.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from sklearn.linear_model import RidgeClassifier

def ridge_classification(X_train, y_train, X_test):
print("[Executing]: Running Ridge Regression model ...\n")

# Model Fitting
model = RidgeClassifier(random_state=88)
model.fit(X_train, y_train)

# Predictions on test dataset
y_pred = model.predict(X_test)

return y_pred
7 changes: 5 additions & 2 deletions tests/sample_test_1.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@
# from cobraclassifier import classifier_cobra
# from cobraclassifier import near_miss_v1, near_miss_v2, near_miss_v3, knn_und, edited_knn, condensed_knn, tomek_link

from models.logistic_regression import logistic_regression
from models.logistic_regression import logistic_regression, logistic_regression_adam
from models.ridge_classification import ridge_classification
from models.adaboost import adaboost_classifier
from models.parent_model import execute_model
from undersampling_algorithms import near_miss_v1, near_miss_v2, near_miss_v3, knn_und, edited_knn, condensed_knn, tomek_link
Expand Down Expand Up @@ -257,9 +258,11 @@ def main():
X, y, majority_class_label = prepare_data(seed, choice=ch)

# models = [logistic_regression, adaboost_classifier, classifier_cobra.execute_cobra]
models = [classifier_cobra.execute_cobra]
# models = [classifier_cobra.execute_cobra]
# models = [logistic_regression, adaboost_classifier]
# models = [logistic_regression]
# models = [logistic_regression_adam]
models = [ridge_classification]

for m in models:
print("\n\n############################# MODEL -", m.__name__, " #############################")
Expand Down

0 comments on commit b290306

Please sign in to comment.