import seaborn as sns corrmat = df.corr() top_corr_features = corrmat.index plt.figure(figsize=(16,16)) #plot heat map g=sns.heatmap(df[top_corr_features].corr(),annot=True,cmap="RdYlGn") plt.show() sns.set_style('whitegrid') sns.countplot(x='target',data=df,palette='RdBu_r') plt.show() dataset = pd.get_dummies(df, columns = ['sex', 'cp', 'fbs','restecg', 'exang', 'slope', 'ca', 'thal']) from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler standardScaler = StandardScaler() columns_to_scale = ['age', 'trestbps', 'chol', 'thalach', 'oldpeak'] dataset[columns_to_scale] = standardScaler.fit_transform(dataset[columns_to_scale]) dataset.head() y = dataset['target'] X = dataset.drop(['target'], axis=1) from sklearn.model_selection import cross_val_score knn_scores = [] for k in range(1, 21): knn_classifier = KNeighborsClassifier(n_neighbors=k) score = cross_val_score(knn_classifier, X, y, cv=10) knn_scores.append(score.mean()) plt.plot([k for k in range(1, 21)], knn_scores, color='red') for i in range(1, 21): plt.text(i, knn_scores[i - 1], (i, knn_scores[i - 1])) plt.xticks([i for i in range(1, 21)]) plt.xlabel('Number of Neighbors (K)') plt.ylabel('Scores') plt.title('K Neighbors Classifier scores for different K values') plt.show() knn_classifier = KNeighborsClassifier(n_neighbors = 12) score=cross_val_score(knn_classifier,X,y,cv=10) score.mean() from sklearn.ensemble import RandomForestClassifier randomforest_classifier= RandomForestClassifier(n_estimators=10) score=cross_val_score(randomforest_classifier,X,y,cv=10) score.mean()的roc曲线的代码
时间: 2023-06-12 10:07:55 浏览: 137
以下是绘制ROC曲线的代码:
```
from sklearn.metrics import roc_curve, auc
from sklearn.model_selection import StratifiedKFold
cv = StratifiedKFold(n_splits=10)
# KNN Classifier ROC Curve
knn_classifier = KNeighborsClassifier(n_neighbors=12)
knn_tprs = []
knn_aucs = []
mean_fpr = np.linspace(0, 1, 100)
fig, ax = plt.subplots()
for i, (train, test) in enumerate(cv.split(X, y)):
knn_classifier.fit(X.iloc[train], y.iloc[train])
knn_proba = knn_classifier.predict_proba(X.iloc[test])[:, 1]
knn_fpr, knn_tpr, knn_thresholds = roc_curve(y.iloc[test], knn_proba)
knn_tprs.append(np.interp(mean_fpr, knn_fpr, knn_tpr))
knn_tprs[-1][0] = 0.0
knn_roc_auc = auc(knn_fpr, knn_tpr)
knn_aucs.append(knn_roc_auc)
ax.plot(knn_fpr, knn_tpr, lw=1, alpha=0.3,
label='ROC fold %d (AUC = %0.2f)' % (i+1, knn_roc_auc))
# Random Forest Classifier ROC Curve
randomforest_classifier = RandomForestClassifier(n_estimators=10)
rf_tprs = []
rf_aucs = []
for i, (train, test) in enumerate(cv.split(X, y)):
randomforest_classifier.fit(X.iloc[train], y.iloc[train])
rf_proba = randomforest_classifier.predict_proba(X.iloc[test])[:, 1]
rf_fpr, rf_tpr, rf_thresholds = roc_curve(y.iloc[test], rf_proba)
rf_tprs.append(np.interp(mean_fpr, rf_fpr, rf_tpr))
rf_tprs[-1][0] = 0.0
rf_roc_auc = auc(rf_fpr, rf_tpr)
rf_aucs.append(rf_roc_auc)
ax.plot(rf_fpr, rf_tpr, lw=1, alpha=0.3,
label='ROC fold %d (AUC = %0.2f)' % (i+1, rf_roc_auc))
# Plot the mean ROC curves
ax.plot([0, 1], [0, 1], linestyle='--', lw=2, color='r',
label='Chance', alpha=.8)
knn_mean_tpr = np.mean(knn_tprs, axis=0)
knn_mean_tpr[-1] = 1.0
knn_mean_auc = auc(mean_fpr, knn_mean_tpr)
std_auc = np.std(knn_aucs)
ax.plot(mean_fpr, knn_mean_tpr, color='b',
label=r'KNN Mean ROC (AUC = %0.2f $\pm$ %0.2f)' % (knn_mean_auc, std_auc),
lw=2, alpha=.8)
rf_mean_tpr = np.mean(rf_tprs, axis=0)
rf_mean_tpr[-1] = 1.0
rf_mean_auc = auc(mean_fpr, rf_mean_tpr)
std_auc = np.std(rf_aucs)
ax.plot(mean_fpr, rf_mean_tpr, color='g',
label=r'RF Mean ROC (AUC = %0.2f $\pm$ %0.2f)' % (rf_mean_auc, std_auc),
lw=2, alpha=.8)
# Set the plot parameters
ax.set(xlim=[-0.05, 1.05], ylim=[-0.05, 1.05],
title="Receiver operating characteristic example")
ax.legend(loc="lower right")
plt.show()
```
这段代码将绘制KNN分类器和随机森林分类器的ROC曲线,以及它们的平均曲线和AUC值。您需要将其与您的数据集和分类器参数一起使用。
阅读全文