黑松山资源网 Design By www.paidiu.com
代码
import numpy as np # 各种激活函数及导数 def sigmoid(x): return 1 / (1 + np.exp(-x)) def dsigmoid(y): return y * (1 - y) def tanh(x): return np.tanh(x) def dtanh(y): return 1.0 - y ** 2 def relu(y): tmp = y.copy() tmp[tmp < 0] = 0 return tmp def drelu(x): tmp = x.copy() tmp[tmp >= 0] = 1 tmp[tmp < 0] = 0 return tmp class MLPClassifier(object): """多层感知机,BP 算法训练""" def __init__(self, layers, activation='tanh', epochs=20, batch_size=1, learning_rate=0.01): """ :param layers: 网络层结构 :param activation: 激活函数 :param epochs: 迭代轮次 :param learning_rate: 学习率 """ self.epochs = epochs self.learning_rate = learning_rate self.layers = [] self.weights = [] self.batch_size = batch_size for i in range(0, len(layers) - 1): weight = np.random.random((layers[i], layers[i + 1])) layer = np.ones(layers[i]) self.layers.append(layer) self.weights.append(weight) self.layers.append(np.ones(layers[-1])) self.thresholds = [] for i in range(1, len(layers)): threshold = np.random.random(layers[i]) self.thresholds.append(threshold) if activation == 'tanh': self.activation = tanh self.dactivation = dtanh elif activation == 'sigomid': self.activation = sigmoid self.dactivation = dsigmoid elif activation == 'relu': self.activation = relu self.dactivation = drelu def fit(self, X, y): """ :param X_: shape = [n_samples, n_features] :param y: shape = [n_samples] :return: self """ for _ in range(self.epochs * (X.shape[0] // self.batch_size)): i = np.random.choice(X.shape[0], self.batch_size) # i = np.random.randint(X.shape[0]) self.update(X[i]) self.back_propagate(y[i]) def predict(self, X): """ :param X: shape = [n_samples, n_features] :return: shape = [n_samples] """ self.update(X) return self.layers[-1].copy() def update(self, inputs): self.layers[0] = inputs for i in range(len(self.weights)): next_layer_in = self.layers[i] @ self.weights[i] - self.thresholds[i] self.layers[i + 1] = self.activation(next_layer_in) def back_propagate(self, y): errors = y - self.layers[-1] gradients = [(self.dactivation(self.layers[-1]) * errors).sum(axis=0)] self.thresholds[-1] -= self.learning_rate * gradients[-1] for i in range(len(self.weights) - 1, 0, -1): tmp = np.sum(gradients[-1] @ self.weights[i].T * self.dactivation(self.layers[i]), axis=0) gradients.append(tmp) self.thresholds[i - 1] -= self.learning_rate * gradients[-1] / self.batch_size gradients.reverse() for i in range(len(self.weights)): tmp = np.mean(self.layers[i], axis=0) self.weights[i] += self.learning_rate * tmp.reshape((-1, 1)) * gradients[i]
测试代码
import sklearn.datasets import numpy as np def plot_decision_boundary(pred_func, X, y, title=None): """分类器画图函数,可画出样本点和决策边界 :param pred_func: predict函数 :param X: 训练集X :param y: 训练集Y :return: None """ # Set min and max values and give it some padding x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5 y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5 h = 0.01 # Generate a grid of points with distance h between them xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h)) # Predict the function value for the whole gid Z = pred_func(np.c_[xx.ravel(), yy.ravel()]) Z = Z.reshape(xx.shape) # Plot the contour and training examples plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral) plt.scatter(X[:, 0], X[:, 1], s=40, c=y, cmap=plt.cm.Spectral) if title: plt.title(title) plt.show() def test_mlp(): X, y = sklearn.datasets.make_moons(200, noise=0.20) y = y.reshape((-1, 1)) n = MLPClassifier((2, 3, 1), activation='tanh', epochs=300, learning_rate=0.01) n.fit(X, y) def tmp(X): sign = np.vectorize(lambda x: 1 if x >= 0.5 else 0) ans = sign(n.predict(X)) return ans plot_decision_boundary(tmp, X, y, 'Neural Network')
效果
更多机器学习代码,请访问 https://github.com/WiseDoge/plume
以上就是如何用Python 实现全连接神经网络(Multi-layer Perceptron)的详细内容,更多关于Python 实现全连接神经网络的资料请关注其它相关文章!
黑松山资源网 Design By www.paidiu.com
广告合作:本站广告合作请联系QQ:858582 申请时备注:广告合作(否则不回)
免责声明:本站资源来自互联网收集,仅供用于学习和交流,请遵循相关法律法规,本站一切资源不代表本站立场,如有侵权、后门、不妥请联系本站删除!
免责声明:本站资源来自互联网收集,仅供用于学习和交流,请遵循相关法律法规,本站一切资源不代表本站立场,如有侵权、后门、不妥请联系本站删除!
黑松山资源网 Design By www.paidiu.com
暂无评论...
RTX 5090要首发 性能要翻倍!三星展示GDDR7显存
三星在GTC上展示了专为下一代游戏GPU设计的GDDR7内存。
首次推出的GDDR7内存模块密度为16GB,每个模块容量为2GB。其速度预设为32 Gbps(PAM3),但也可以降至28 Gbps,以提高产量和初始阶段的整体性能和成本效益。
据三星表示,GDDR7内存的能效将提高20%,同时工作电压仅为1.1V,低于标准的1.2V。通过采用更新的封装材料和优化的电路设计,使得在高速运行时的发热量降低,GDDR7的热阻比GDDR6降低了70%。
更新日志
2024年10月05日
2024年10月05日
- 群星《前途海量 电影原声专辑》[FLAC/分轨][227.78MB]
- 张信哲.1992-知道新曲与精丫巨石】【WAV+CUE】
- 王翠玲.1995-ANGEL【新艺宝】【WAV+CUE】
- 景冈山.1996-我的眼里只有你【大地唱片】【WAV+CUE】
- 群星《八戒 电影原声带》[320K/MP3][188.97MB]
- 群星《我的阿勒泰 影视原声带》[320K/MP3][139.47MB]
- 纪钧瀚《胎教古典音乐 钢琴与大提琴的沉浸时光》[320K/MP3][148.91MB]
- 刘雅丽.2001-丽花皇后·EMI精选王【EMI百代】【FLAC分轨】
- 齐秦.1994-黄金十年1981-1990CHINA.TOUR.LIVE精丫上华】【WAV+CUE】
- 群星.2008-本色·百代音乐人创作专辑【EMI百代】【WAV+CUE】
- 群星.2001-同步过冬AVCD【环球】【WAV+CUE】
- 群星.2020-同步过冬2020冀待晴空【环球】【WAV+CUE】
- 沈雁.1986-四季(2012梦田复刻版)【白云唱片】【WAV+CUE】
- 纪钧瀚《胎教古典音乐 钢琴与大提琴的沉浸时光》[FLAC/分轨][257.88MB]
- 《国语老歌 怀旧篇 3CD》[WAV/分轨][1.6GB]