自适应性神经网络(Adaline)——Python实现

阅读: 评论:0

自适应性神经网络(Adaline)——Python实现

自适应性神经网络(Adaline)——Python实现

Python代码实现:

import numpy as np
import matplotlib.pyplot as plt
import pandas as pdclass AdalineGD(object):"""ADAptive LInear NEuron classifier.

    Parameters
    ----------
    eta : float
        Learing rate (between 0.0 and 1.0)
    n_iter : int
        Passes over the training dataset.

    Attributes
    ----------
    w_ :  1d-array
        Weights after fitting.
    errors_ : list
        Nuber of misclassifications in every epoch.

    """
    def __init__(self, eta=0.01, n_iter=50):a = etaself.n_iter = n_iterdef fit(self, X, y):"""Fit training data.

        Parameters
        ----------
        X : {array-like}, shape = [n_samples, n_features]
            Training vectors,
            where n_samples is the number of samples and
            n_features is the number of features.
        y : array-like, shape = [n_samples]
            Target values

        Returns
        -------
        self : object

        """
        self.w_ = np.zeros(1 + X.shape[1])st_ = []for i in range(self.n_iter):output = self_input(X)errors = (y-output)self.w_[1:] += a * X.T.dot(errors)self.w_[0] += a*errors.sum()cost = (errors**2).sum() / 2.0
            st_.append(cost)return self

    def net_input(self, X):"""Calculate net input"""
        return np.dot(X, self.w_[1:]) + self.w_[0]def activation(self, X):"""Compute linear activation"""
        return self_input(X)def predict(self, X):"""Return class label after unit step"""
        return np.where(self.activation(X) >= 0.0, 1, -1)

Python代码实现:

import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from numpy.random import seedclass AdalineSGD(object):"""ADAptive LInear NEuron classifier.

    Parameters
    ----------
    eta : float
        Learing rate (between 0.0 and 1.0)
    n_iter : int
        Passes over the training dataset.

    Attributes
    ----------
    w_ :  1d-array
        Weights after fitting.
    errors_ : list
        Nuber of misclassifications in every epoch.

    """
    def __init__(self, eta=0.01, n_iter=10, shuffle=True, random_state=None):a = etaself.n_iter = n_iterself.w_initialized = False
        self.shuffle = shuffleif random_state:seed(random_state)def fit(self, X, y):"""Fit training data.

        Parameters
        ----------
        X : {array-like}, shape = [n_samples, n_features]
            Training vectors,
            where n_samples is the number of samples and
            n_features is the number of features.
        y : array-like, shape = [n_samples]
            Target values

        Returns
        -------
        self : object

        """
        self._initialize_weight(1 + X.shape[1])st_ = []for i in range(self.n_iter):if self.shuffle:X, y = self._shuffle(X, y)cost = []for xi, target in zip(X, y):cost.append(self._update_weights(xi, target))avg_cost = sum(cost)/len(st_.append(avg_cost)return self

    def partial_fit(self, X, y):if not self.w_initialized:self._initialize_weights(X.shape[1])if y.ravel().shape[0] > 1:for xi, target in zip(X, y):self._update_weights(xi, target)else:self._update_weights(X, y)return self

    def _shuffle(self, X, y):r = np.random.permutation(len(y))return X[r], y[r]def _initialize_weights(self, m):self._w = np.zeros(1+m)self.w_initialized = True

    def _update_weight(self, xi, target):output = self_input(xi)error = (target - output)self.w_[1:] += a * xi.dot(error)self.w_[0] += a * errorcost = 0.5*error**2
        return costdef net_input(self, X):"""Calculate net input"""
        return np.dot(X, self.w_[1:]) + self.w_[0]def activation(self, X):"""Compute linear activation"""
        return self_input(X)def predict(self, X):"""Return class label after unit step"""
        return np.where(self.activation(X) >= 0.0, 1, -1)

本文发布于:2024-01-29 03:43:00,感谢您对本站的认可!

本文链接:https://www.4u4v.net/it/170647098512453.html

版权声明:本站内容均来自互联网,仅供演示用,请勿用于商业和其他非法用途。如果侵犯了您的权益请与我们联系,我们将在24小时内删除。

留言与评论(共有 0 条评论)
   
验证码:

Copyright ©2019-2022 Comsenz Inc.Powered by ©

网站地图1 网站地图2 网站地图3 网站地图4 网站地图5 网站地图6 网站地图7 网站地图8 网站地图9 网站地图10 网站地图11 网站地图12 网站地图13 网站地图14 网站地图15 网站地图16 网站地图17 网站地图18 网站地图19 网站地图20 网站地图21 网站地图22/a> 网站地图23