import seaborn as sns sns.set() plt.figure(figsize=(11,8), dpi=100) data_dummies.corr()['Churn'].sort_values(ascending=False).plot(kind='bar') plt.show()
时间: 2023-07-08 14:35:49 浏览: 108
这段代码使用了 Python 的数据可视化库 seaborn 和 matplotlib,用于绘制数据的相关性柱状图。具体解释如下:
1. `import seaborn as sns` 导入 seaborn 库。
2. `sns.set()` 设置 seaborn 的默认样式。
3. `plt.figure(figsize=(11,8), dpi=100)` 创建一个新的图形,设置图形的大小和分辨率。
4. `data_dummies.corr()['Churn'].sort_values(ascending=False).plot(kind='bar')` 计算数据集中各列与目标变量 Churn 的相关系数,并将结果按照从大到小的顺序绘制成柱状图。
5. `plt.show()` 显示图形。
这段代码的运行结果将会是一个柱状图,横轴为数据集中的列名,纵轴为该列与目标变量 Churn 的相关系数,柱子的高度表示相关系数的大小,柱子颜色越深表示相关系数越大。
相关问题
import seaborn as sns import pandas as pd import matplotlib.pyplot as plt from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler from sklearn.linear_model import LogisticRegression df = pd.read_csv('heart.csv') corrmat = df.corr() top_corr_features = corrmat.index plt.figure(figsize=(16,16)) sns.heatmap(df[top_corr_features].corr(),annot=True,cmap="RdYlGn") plt.show() sns.set_style('whitegrid') sns.countplot(x='target',data=df,palette='RdBu_r') plt.show() dataset = pd.get_dummies(df, columns=['sex', 'cp', 'fbs', 'restecg', 'exang', 'slope', 'ca', 'thal']) columns_to_scale = ['age', 'trestbps', 'chol', 'thalach', 'oldpeak'] scaler = StandardScaler() dataset[columns_to_scale] = scaler.fit_transform(dataset[columns_to_scale]) dataset.head() y = dataset['target'] X = dataset.drop(['target'], axis=1) X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0) logreg = LogisticRegression() logreg.fit(X_train, y_train) print("Training accuracy: {:.3f}".format(logreg.score(X_train, y_train))) print("Test accuracy: {:.3f}".format(logreg.score(X_test, y_test)))改写为ROC代码
以下是将 Logistic Regression 模型的评估指标改为 ROC 曲线的代码:
```python
import seaborn as sns
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import roc_curve, auc
df = pd.read_csv('heart.csv')
# 查看特征相关性
corrmat = df.corr()
top_corr_features = corrmat.index
plt.figure(figsize=(16,16))
sns.heatmap(df[top_corr_features].corr(),annot=True,cmap="RdYlGn")
plt.show()
# 查看数据分布
sns.set_style('whitegrid')
sns.countplot(x='target',data=df,palette='RdBu_r')
plt.show()
# 对数据进行 One-hot 编码和标准化
dataset = pd.get_dummies(df, columns=['sex', 'cp', 'fbs', 'restecg', 'exang', 'slope', 'ca', 'thal'])
columns_to_scale = ['age', 'trestbps', 'chol', 'thalach', 'oldpeak']
scaler = StandardScaler()
dataset[columns_to_scale] = scaler.fit_transform(dataset[columns_to_scale])
dataset.head()
# 划分数据集
y = dataset['target']
X = dataset.drop(['target'], axis=1)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=0)
# 训练模型
logreg = LogisticRegression()
logreg.fit(X_train, y_train)
# 评估模型
y_train_pred = logreg.predict_proba(X_train)[:, 1]
y_test_pred = logreg.predict_proba(X_test)[:, 1]
fpr_train, tpr_train, thresholds_train = roc_curve(y_train, y_train_pred)
fpr_test, tpr_test, thresholds_test = roc_curve(y_test, y_test_pred)
roc_auc_train = auc(fpr_train, tpr_train)
roc_auc_test = auc(fpr_test, tpr_test)
# 绘制 ROC 曲线
plt.figure()
plt.plot(fpr_train, tpr_train, color='darkorange', lw=2, label='Train ROC curve (area = %0.2f)' % roc_auc_train)
plt.plot(fpr_test, tpr_test, color='navy', lw=2, label='Test ROC curve (area = %0.2f)' % roc_auc_test)
plt.plot([0, 1], [0, 1], color='black', lw=2, linestyle='--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic')
plt.legend(loc="lower right")
plt.show()
```
在这个代码中,我们使用 `roc_curve` 函数计算训练集和测试集的 FPR 和 TPR,然后使用 `auc` 函数计算 ROC 曲线下的面积。最后,我们使用 `matplotlib` 库绘制 ROC 曲线。
import seaborn as sns corrmat = df.corr() top_corr_features = corrmat.index plt.figure(figsize=(16,16)) #plot heat map g=sns.heatmap(df[top_corr_features].corr(),annot=True,cmap="RdYlGn") plt.show() sns.set_style('whitegrid') sns.countplot(x='target',data=df,palette='RdBu_r') plt.show() dataset = pd.get_dummies(df, columns = ['sex', 'cp', 'fbs','restecg', 'exang', 'slope', 'ca', 'thal']) from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler standardScaler = StandardScaler() columns_to_scale = ['age', 'trestbps', 'chol', 'thalach', 'oldpeak'] dataset[columns_to_scale] = standardScaler.fit_transform(dataset[columns_to_scale]) dataset.head() y = dataset['target'] X = dataset.drop(['target'], axis=1) from sklearn.model_selection import cross_val_score knn_scores = [] for k in range(1, 21): knn_classifier = KNeighborsClassifier(n_neighbors=k) score = cross_val_score(knn_classifier, X, y, cv=10) knn_scores.append(score.mean()) plt.plot([k for k in range(1, 21)], knn_scores, color='red') for i in range(1, 21): plt.text(i, knn_scores[i - 1], (i, knn_scores[i - 1])) plt.xticks([i for i in range(1, 21)]) plt.xlabel('Number of Neighbors (K)') plt.ylabel('Scores') plt.title('K Neighbors Classifier scores for different K values') plt.show() knn_classifier = KNeighborsClassifier(n_neighbors = 12) score=cross_val_score(knn_classifier,X,y,cv=10) score.mean() from sklearn.ensemble import RandomForestClassifier randomforest_classifier= RandomForestClassifier(n_estimators=10) score=cross_val_score(randomforest_classifier,X,y,cv=10) score.mean()的roc曲线的代码
以下是绘制ROC曲线的代码:
```
from sklearn.metrics import roc_curve, auc
from sklearn.model_selection import StratifiedKFold
cv = StratifiedKFold(n_splits=10)
# KNN Classifier ROC Curve
knn_classifier = KNeighborsClassifier(n_neighbors=12)
knn_tprs = []
knn_aucs = []
mean_fpr = np.linspace(0, 1, 100)
fig, ax = plt.subplots()
for i, (train, test) in enumerate(cv.split(X, y)):
knn_classifier.fit(X.iloc[train], y.iloc[train])
knn_proba = knn_classifier.predict_proba(X.iloc[test])[:, 1]
knn_fpr, knn_tpr, knn_thresholds = roc_curve(y.iloc[test], knn_proba)
knn_tprs.append(np.interp(mean_fpr, knn_fpr, knn_tpr))
knn_tprs[-1][0] = 0.0
knn_roc_auc = auc(knn_fpr, knn_tpr)
knn_aucs.append(knn_roc_auc)
ax.plot(knn_fpr, knn_tpr, lw=1, alpha=0.3,
label='ROC fold %d (AUC = %0.2f)' % (i+1, knn_roc_auc))
# Random Forest Classifier ROC Curve
randomforest_classifier = RandomForestClassifier(n_estimators=10)
rf_tprs = []
rf_aucs = []
for i, (train, test) in enumerate(cv.split(X, y)):
randomforest_classifier.fit(X.iloc[train], y.iloc[train])
rf_proba = randomforest_classifier.predict_proba(X.iloc[test])[:, 1]
rf_fpr, rf_tpr, rf_thresholds = roc_curve(y.iloc[test], rf_proba)
rf_tprs.append(np.interp(mean_fpr, rf_fpr, rf_tpr))
rf_tprs[-1][0] = 0.0
rf_roc_auc = auc(rf_fpr, rf_tpr)
rf_aucs.append(rf_roc_auc)
ax.plot(rf_fpr, rf_tpr, lw=1, alpha=0.3,
label='ROC fold %d (AUC = %0.2f)' % (i+1, rf_roc_auc))
# Plot the mean ROC curves
ax.plot([0, 1], [0, 1], linestyle='--', lw=2, color='r',
label='Chance', alpha=.8)
knn_mean_tpr = np.mean(knn_tprs, axis=0)
knn_mean_tpr[-1] = 1.0
knn_mean_auc = auc(mean_fpr, knn_mean_tpr)
std_auc = np.std(knn_aucs)
ax.plot(mean_fpr, knn_mean_tpr, color='b',
label=r'KNN Mean ROC (AUC = %0.2f $\pm$ %0.2f)' % (knn_mean_auc, std_auc),
lw=2, alpha=.8)
rf_mean_tpr = np.mean(rf_tprs, axis=0)
rf_mean_tpr[-1] = 1.0
rf_mean_auc = auc(mean_fpr, rf_mean_tpr)
std_auc = np.std(rf_aucs)
ax.plot(mean_fpr, rf_mean_tpr, color='g',
label=r'RF Mean ROC (AUC = %0.2f $\pm$ %0.2f)' % (rf_mean_auc, std_auc),
lw=2, alpha=.8)
# Set the plot parameters
ax.set(xlim=[-0.05, 1.05], ylim=[-0.05, 1.05],
title="Receiver operating characteristic example")
ax.legend(loc="lower right")
plt.show()
```
这段代码将绘制KNN分类器和随机森林分类器的ROC曲线,以及它们的平均曲线和AUC值。您需要将其与您的数据集和分类器参数一起使用。
阅读全文