admin 管理员组

文章数量: 1184232


2024年2月19日发(作者:vb编程的代码有哪些)

import as pltfrom import SVCfrom _selection import StratifiedKFoldfrom e_selection import RFECVfrom ts import make_classification# Build a classification task using 3 informative featuresX, y = make_classification(n_samples=1000, n_features=25, n_informative=3, n_redundant=2, n_repeated=0, n_classes=8, n_clusters_per_class=1, random_state=0)# Create the RFE object and compute a cross-validated = SVC(kernel="linear")# The "accuracy" scoring is proportional to the number of correct# classificationsrfecv = RFECV(estimator=svc, step=1, cv=StratifiedKFold(2), scoring='accuracy')(X, y)print("Optimal number of features : %d" % rfecv.n_features_)print("Ranking of features : %s" % g_)# Plot number of features VS. cross-validation ()("Number of features selected")("Cross validation score (nb of correct classifications)")(range(1, len(_scores_) + 1), _scores_)()8

from e_selection import SelectFdr,f_classif,SelectFpr,SelectFwe,chi2,mutual_info_classifiris = load_iris()X = = selector1 = SelectFpr(score_func = mutual_info_classif,alpha=0.5)# alpha是预期错误发现率的上限,默认是0.5,score_func 默认为 f_(X, y)print("nScores of features %s" % _)print("p-values of feature scores is %s" % s_)# print("Shape after transform is ",orm(X).shape)selector2 = SelectFdr(score_func = f_classif,alpha=4.37695696e-80) # alpha是预期错误发现率的上限(X, y)print("nScores of features %s" % _)print("p-values of feature scores is %s" % s_)print("Shape after transform is ",orm(X).shape)selector3 = SelectFwe(score_func = chi2,alpha=1) # alpha是预期错误发现率的上限(X, y)print("nScores of features %s" % _)print("p-values of feature scores is %s" % s_)print("Shape after transform is ",orm(X).shape)1718192

891011121314Scores of features [ 119.26450218 47.3644614 1179.0343277 959.32440573]p-values of feature scores is [ 1.66966919e-31 1.32791652e-16 3.05197580e-91 4.37695696e-85]Shape after transform is (150, 4)Support is [ True True True True]Params is {'mode': 'fpr', 'param': 0.5, 'score_func':


本文标签: 编程 错误 发现 上限 预期