Skip to content

Commit

Permalink
0.81.1
Browse files Browse the repository at this point in the history
  • Loading branch information
FBurkhardt committed Mar 21, 2024
1 parent 24b21f9 commit 58da2eb
Show file tree
Hide file tree
Showing 10 changed files with 38 additions and 12 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,11 @@
Changelog
=========

Version 0.81.1
--------------
* fixed bugs in demo module
* made kernel for SVM/SVR configurable

Version 0.81.0
--------------
* added test selection to test module
Expand Down
3 changes: 3 additions & 0 deletions ini_file.md
Original file line number Diff line number Diff line change
Expand Up @@ -252,8 +252,11 @@
* **tree_reg**: Classification tree regressor
* **svm**: Support Vector Machine
* C_val = 0.001
* kernel = rbf # ‘linear’, ‘poly’, ‘rbf’, ‘sigmoid’, ‘precomputed’
* **xgb**:XG-Boost
* **svr**: Support Vector Regression
* C_val = 0.001
* kernel = rbf # ‘linear’, ‘poly’, ‘rbf’, ‘sigmoid’, ‘precomputed’
* **xgr**: XG-Boost Regression
* **mlp**: Multi-Layer-Perceptron for classification
* **mlp_reg**: Multi-Layer-Perceptron for regression
Expand Down
2 changes: 1 addition & 1 deletion nkululeko/constants.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
VERSION="0.81.0"
VERSION="0.81.1"
SAMPLING_RATE = 16000
2 changes: 1 addition & 1 deletion nkululeko/demo_predictor.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ def predict_signal(self, signal, sr):
return result_dict
else:
# experiment is regression and returns one estimation
dict_2["predicted"] = result_dict[0]
dict_2["predicted"] = result_dict
print(dict_2)
return dict_2

Expand Down
9 changes: 7 additions & 2 deletions nkululeko/models/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,19 +269,24 @@ def predict(self):
)
return report

def get_type(self):
return "generic"

def predict_sample(self, features):
"""Predict one sample"""
prediction = {}
if self.util.exp_is_classification():
# get the class probabilities
predictions = self.clf.predict_proba([features])
if not self.get_type() == "xgb":
features = [features]
predictions = self.clf.predict_proba(features)
# pred = self.clf.predict(features)
for i in range(len(self.clf.classes_)):
cat = self.clf.classes_[i]
prediction[cat] = predictions[0][i]
else:
predictions = self.clf.predict(features)
prediction["result"] = predictions[0]
prediction = predictions[0]
return prediction

def store(self):
Expand Down
2 changes: 1 addition & 1 deletion nkululeko/models/model_mlp_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,4 +247,4 @@ def predict_sample(self, features):
features = np.reshape(features, (-1, 1)).T
logits = self.model(features.to(self.device)).reshape(-1)
a = logits.numpy()
return a
return a[0]
11 changes: 10 additions & 1 deletion nkululeko/models/model_svm.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,17 @@ class SVM_model(Model):
def __init__(self, df_train, df_test, feats_train, feats_test):
super().__init__(df_train, df_test, feats_train, feats_test)
c = float(self.util.config_val("MODEL", "C_val", "0.001"))
if eval(self.util.config_val("MODEL", "class_weight", "False")):
class_weight = "balanced"
else:
class_weight = None
kernel = self.util.config_val("MODEL", "kernel", "rbf")
self.clf = svm.SVC(
kernel="linear", C=c, gamma="scale", probability=True
kernel=kernel,
C=c,
gamma="scale",
probability=True,
class_weight=class_weight,
) # set up the classifier

def set_C(self, c):
Expand Down
4 changes: 3 additions & 1 deletion nkululeko/models/model_svr.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@ class SVR_model(Model):
def __init__(self, df_train, df_test, feats_train, feats_test):
super().__init__(df_train, df_test, feats_train, feats_test)
c = float(self.util.config_val("MODEL", "C_val", "0.001"))
self.clf = svm.SVR(kernel="rbf", C=c, probability=True) # set up the classifier
# kernel{‘linear’, ‘poly’, ‘rbf’, ‘sigmoid’, ‘precomputed’} or callable, default=’rbf’
kernel = self.util.config_val("MODEL", "kernel", "rbf")
self.clf = svm.SVR(kernel=kernel, C=c) # set up the classifier

def set_C(self, c):
"""Set the C parameter"""
Expand Down
3 changes: 3 additions & 0 deletions nkululeko/models/model_xgb.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,6 @@ class XGB_model(Model):
is_classifier = True

clf = XGBClassifier() # set up the classifier

def get_type(self):
return "xgb"
9 changes: 4 additions & 5 deletions nkululeko/utils/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,27 +175,26 @@ def get_model_description(self):
mt = f'{self.config["MODEL"]["type"]}'
ft = "_".join(ast.literal_eval(self.config["FEATS"]["type"]))
ft += "_"
set = self.config_val("FEATS", "set", False)
set_string = ""
if set:
set_string += set
layer_string = ""
layer_s = self.config_val("MODEL", "layers", False)
if layer_s:
layers = ast.literal_eval(layer_s)
sorted_layers = sorted(layers.items(), key=lambda x: x[1])
for l in sorted_layers:
layer_string += f"{str(l[1])}-"
return_string = f"{mt}_{ft}{set_string}{layer_string[:-1]}"
return_string = f"{mt}_{ft}{layer_string[:-1]}"
options = [
["MODEL", "C_val"],
["MODEL", "kernel"],
["MODEL", "drop"],
["MODEL", "class_weight"],
["MODEL", "loss"],
["MODEL", "logo"],
["MODEL", "learning_rate"],
["MODEL", "k_fold_cross"],
["FEATS", "balancing"],
["FEATS", "scale"],
["FEATS", "set"],
["FEATS", "wav2vec2.layer"],
]
for option in options:
Expand Down

0 comments on commit 58da2eb

Please sign in to comment.