chaitanya9 commited on
Commit
c731520
·
1 Parent(s): 7b727e4

Upload grid_search.py

Browse files
Files changed (1) hide show
  1. grid_search.py +60 -0
grid_search.py ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ A script to grid search all parameters provided in parameters.py
3
+ including both classifiers and regressors.
4
+ Note that the execution of this script may take hours to search the
5
+ best possible model parameters for various algorithms, feel free
6
+ to edit parameters.py on your need ( e.g remove some parameters for
7
+ faster search )
8
+ """
9
+
10
+ import pickle
11
+
12
+ from emotion_recognition import EmotionRecognizer
13
+ from parameters import classification_grid_parameters, regression_grid_parameters
14
+
15
+ # emotion classes you want to perform grid search on
16
+ emotions = ['sad', 'neutral', 'happy']
17
+ # number of parallel jobs during the grid search
18
+ n_jobs = 4
19
+
20
+ best_estimators = []
21
+
22
+ for model, params in classification_grid_parameters.items():
23
+ if model.__class__.__name__ == "KNeighborsClassifier":
24
+ # in case of a K-Nearest neighbors algorithm
25
+ # set number of neighbors to the length of emotions
26
+ params['n_neighbors'] = [len(emotions)]
27
+ d = EmotionRecognizer(model, emotions=emotions)
28
+ d.load_data()
29
+ best_estimator, best_params, cv_best_score = d.grid_search(params=params, n_jobs=n_jobs)
30
+ best_estimators.append((best_estimator, best_params, cv_best_score))
31
+ print(f"{emotions} {best_estimator.__class__.__name__} achieved {cv_best_score:.3f} cross validation accuracy score!")
32
+
33
+ print(f"[+] Pickling best classifiers for {emotions}...")
34
+ pickle.dump(best_estimators, open(f"grid/best_classifiers.pickle", "wb"))
35
+
36
+ best_estimators = []
37
+
38
+ for model, params in regression_grid_parameters.items():
39
+ if model.__class__.__name__ == "KNeighborsRegressor":
40
+ # in case of a K-Nearest neighbors algorithm
41
+ # set number of neighbors to the length of emotions
42
+ params['n_neighbors'] = [len(emotions)]
43
+ d = EmotionRecognizer(model, emotions=emotions, classification=False)
44
+ d.load_data()
45
+ best_estimator, best_params, cv_best_score = d.grid_search(params=params, n_jobs=n_jobs)
46
+ best_estimators.append((best_estimator, best_params, cv_best_score))
47
+ print(f"{emotions} {best_estimator.__class__.__name__} achieved {cv_best_score:.3f} cross validation MAE score!")
48
+
49
+ print(f"[+] Pickling best regressors for {emotions}...")
50
+ pickle.dump(best_estimators, open(f"grid/best_regressors.pickle", "wb"))
51
+
52
+
53
+
54
+ # Best for SVC: C=0.001, gamma=0.001, kernel='poly'
55
+ # Best for AdaBoostClassifier: {'algorithm': 'SAMME', 'learning_rate': 0.8, 'n_estimators': 60}
56
+ # Best for RandomForestClassifier: {'max_depth': 7, 'max_features': 0.5, 'min_samples_leaf': 1, 'min_samples_split': 2, 'n_estimators': 40}
57
+ # Best for GradientBoostingClassifier: {'learning_rate': 0.3, 'max_depth': 7, 'max_features': None, 'min_samples_leaf': 1, 'min_samples_split': 2, 'n_estimators': 70, 'subsample': 0.7}
58
+ # Best for DecisionTreeClassifier: {'criterion': 'entropy', 'max_depth': 7, 'max_features': None, 'min_samples_leaf': 1, 'min_samples_split': 2}
59
+ # Best for KNeighborsClassifier: {'n_neighbors': 5, 'p': 1, 'weights': 'distance'}
60
+ # Best for MLPClassifier: {'alpha': 0.005, 'batch_size': 256, 'hidden_layer_sizes': (300,), 'learning_rate': 'adaptive', 'max_iter': 500}