Spaces:
Sleeping
Sleeping
File size: 3,211 Bytes
9183c57 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
import streamlit as st
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier, AdaBoostClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.naive_bayes import GaussianNB
from sklearn.svm import SVC
from xgboost import XGBClassifier
@st.cache_data
def train_selected_model(X_train, Y_train, model_type, model_params=None):
"""
Trains a specific classification model based on the provided model type and parameters.
Parameters:
- X_train (array-like): The training input samples.
- Y_train (array-like): The target labels for classification.
- model_type (int): Specifies the type of classification model to be trained.
1 for Logistic Regression, 2 for Support Vector Machine (SVM), 3 for Naive Bayes,
4 for Random Forest, 5 for AdaBoost, 6 for XGBoost, and 7 for Gradient Boosting.
- model_params (dict, optional): A dictionary of parameters for the model. Defaults to None.
Returns:
- model: The trained model object based on the specified type.
"""
if model_type == 1:
return LogisticRegression_train(X_train, Y_train, model_params)
elif model_type == 2:
return SVM_train(X_train, Y_train, model_params)
elif model_type == 3:
return NaiveBayes_train(X_train, Y_train, model_params)
elif model_type == 4:
return RandomForest_train(X_train, Y_train, model_params=model_params)
elif model_type == 5:
return AdaBoost_train(X_train, Y_train, model_params)
elif model_type == 6:
return XGBoost_train(X_train, Y_train, model_params)
elif model_type == 7:
return GradientBoosting_train(X_train, Y_train, model_params)
def LogisticRegression_train(X_train, Y_train, model_params=None):
if model_params is None: model_params = {}
logreg = LogisticRegression(**model_params)
logreg.fit(X_train, Y_train)
return logreg
def SVM_train(X_train, Y_train, model_params=None):
if model_params is None: model_params = {}
svm = SVC(**model_params)
svm.fit(X_train, Y_train)
return svm
def NaiveBayes_train(X_train, Y_train, model_params=None):
if model_params is None: model_params = {}
nb = GaussianNB(**model_params)
nb.fit(X_train, Y_train)
return nb
def RandomForest_train(X_train, Y_train, n_estimators=100, random_state=None, model_params=None):
if model_params is None: model_params = {}
rf_params = {'n_estimators': n_estimators, 'random_state': random_state}
rf_params.update(model_params)
rf = RandomForestClassifier(**rf_params)
rf.fit(X_train, Y_train)
return rf
def AdaBoost_train(X_train, Y_train, model_params=None):
if model_params is None: model_params = {}
ab = AdaBoostClassifier(**model_params)
ab.fit(X_train, Y_train)
return ab
def XGBoost_train(X_train, Y_train, model_params=None):
if model_params is None: model_params = {}
xgb = XGBClassifier(**model_params)
xgb.fit(X_train, Y_train)
return xgb
def GradientBoosting_train(X_train, Y_train, model_params=None):
if model_params is None: model_params = {}
gb = GradientBoostingClassifier(**model_params)
gb.fit(X_train, Y_train)
return gb |