python语言实现 (1)实现一个简单的有监督学习中的判定树算法(2) 实现一个简单的有监督学习中的朴素贝叶斯算法(3)实现一个无监督学习中的K近邻算法
时间: 2024-03-06 11:50:16 浏览: 91
好的,我会分别回答你的三个问题:
1. 实现一个简单的有监督学习中的判定树算法:
```python
class DecisionTree:
def __init__(self):
self.tree = {}
def fit(self, X, y):
self.tree = self.build_tree(X, y)
def predict(self, X):
return [self.predict_instance(x, self.tree) for x in X]
def build_tree(self, X, y):
n_samples, n_features = X.shape
if n_samples == 0:
return None
if len(set(y)) == 1:
return y[0]
best_feature, best_threshold = self.get_best_split(X, y)
left_indices = X[:, best_feature] < best_threshold
right_indices = X[:, best_feature] >= best_threshold
left_tree = self.build_tree(X[left_indices], y[left_indices])
right_tree = self.build_tree(X[right_indices], y[right_indices])
return {'feature': best_feature, 'threshold': best_threshold, 'left_tree': left_tree, 'right_tree': right_tree}
def get_best_split(self, X, y):
best_feature = None
best_threshold = None
best_score = -1
for feature in range(X.shape[1]):
feature_values = X[:, feature]
possible_thresholds = np.unique(feature_values)
for threshold in possible_thresholds:
left_indices = feature_values < threshold
right_indices = feature_values >= threshold
if len(left_indices) == 0 or len(right_indices) == 0:
continue
left_y = y[left_indices]
right_y = y[right_indices]
score = self.gini_index(y, left_y, right_y)
if score > best_score:
best_score = score
best_feature = feature
best_threshold = threshold
return best_feature, best_threshold
def gini_index(self, y, left_y, right_y):
p = len(left_y) / len(y)
gini_left = 1 - sum([(np.sum(left_y == c) / len(left_y)) ** 2 for c in set(left_y)])
gini_right = 1 - sum([(np.sum(right_y == c) / len(right_y)) ** 2 for c in set(right_y)])
return p * gini_left + (1 - p) * gini_right
def predict_instance(self, x, tree):
if type(tree) != dict:
return tree
feature, threshold = tree['feature'], tree['threshold']
if x[feature] < threshold:
return self.predict_instance(x, tree['left_tree'])
else:
return self.predict_instance(x, tree['right_tree'])
```
2. 实现一个简单的有监督学习中的朴素贝叶斯算法:
```python
class NaiveBayes:
def __init__(self):
self.classes = None
self.priors = None
self.means = None
self.stds = None
def fit(self, X, y):
self.classes = np.unique(y)
n_classes = len(self.classes)
n_features = X.shape[1]
self.priors = np.zeros(n_classes)
self.means = np.zeros((n_classes, n_features))
self.stds = np.zeros((n_classes, n_features))
for i, c in enumerate(self.classes):
X_c = X[y == c]
self.priors[i] = X_c.shape[0] / X.shape[0]
self.means[i] = X_c.mean(axis=0)
self.stds[i] = X_c.std(axis=0)
def predict(self, X):
return [self.predict_instance(x) for x in X]
def predict_instance(self, x):
posteriors = []
for i, c in enumerate(self.classes):
prior = np.log(self.priors[i])
class_conditional = np.sum(np.log(self.gaussian_pdf(x, self.means[i], self.stds[i])))
posterior = prior + class_conditional
posteriors.append(posterior)
return self.classes[np.argmax(posteriors)]
def gaussian_pdf(self, x, mean, std):
return (1 / (np.sqrt(2 * np.pi) * std)) * np.exp(-(x - mean) ** 2 / (2 * std ** 2))
```
3. 实现一个无监督学习中的K近邻算法:
```python
class KNN:
def __init__(self, k):
self.k = k
def fit(self, X):
self.X = X
def predict(self, X):
return [self.predict_instance(x) for x in X]
def predict_instance(self, x):
distances = np.sqrt(np.sum((self.X - x) ** 2, axis=1))
nearest_indices = np.argsort(distances)[:self.k]
nearest_labels = self.y[nearest_indices]
return np.bincount(nearest_labels).argmax()
```
阅读全文