티스토리 뷰

5. 파이썬

파이썬/퍼셉트론-아달린/하마/ perceptron.py

패스트코드블로그 2020. 10. 5. 15:57

Perceptron

 

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
import numpy as np
 
class Perceptron:
    def __init__(self, eta= 0.01, n_iter = 50, random_state=1):
        self.eta = eta
        self.n_iter = n_iter
        self.random_state = random_state
 
    def fit(self, X, y):
        regen = np.random.RandomState(self.random_state)
        self.w_ = regen.normal(loc = 0.0, scale=0.01, size= 1 + X.shape[1])
        self.errors_ = []
 
        for _ in range(self.n_iter):
            errors = 0
            for xi, target in zip(X, y):
                update = self.eta * (target - self.predict(xi))
                self.w_[1:] += update * xi
                self.w_[1:] += update
                errors += int(update != 0.0)
 
            self.errors_.append(errors)
        return self
 
    def net_input(self, X):
        """ 최종 입력 계산"""
        return np.dot(X, self.w_[1:]) + self.w_[0]
 
    def predict(self, X):
        return np.where(self.net_input(X) >= 0.01-1)
cs

 

Adaline

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import numpy as np
 
class Adaline():
    def __init__(self, eta=0.01, n_iter=50, random_state=None, shuffle=True):
        self.eta = eta
        self.n_iter = n_iter
        self.random_state = random_state
        self.shuffle = shuffle
 
    def fit(self, X, y):
        self._initialize_weights(X.shape[1])
        self.cost_ = []
        for i in range(self.n_iter):
            if self.shuffle:
                X, y = self._shuffle(X,y)
            cost = []
            for xi, target in zip(X, y):
                cost.append(self._update_weights(xi, target))
            avg_cost = sum(cost) / len(y)
            self.cost_.append(avg_cost)
        return self
 
    def partial_fit(self, X, y):
        """
        가중치를 다시 초기화하지 않고 훈련 데이터를 학습
        """
        if not self.w_initialized:
            self._initialize_weights(X.shape[1])
        if y.ravel().shape[0> 1:
            for xi, target in zip(X, y):
                self._update_weights(xi, target)
 
        else:
            self._update_weights(X, y)
        return self
 
    def _shuffle(self, X, y):
        """훈련 데이터 섞기"""
        r = self.rgen.permutation(len(y))
        return X[r], y[r]
 
    def _initialize_weights(self, m):
        """랜덤한 작은 수로 가중치를 초기화 """
        self.rgen = np.random.RandomState(self.random_state)
        self.w_ = self.rgen.normal(loc = 0.0, scale = 0.01, size = 1 + m)
        self.w_initialized = True
 
    def _update_weights(self, xi, target):
        """아달린 학습 규칙을 적용하여 가중치를 업데이트 함"""
        # eta : 학습률 0.0 ~ 1.0
        output = self.activation(self.net_input(xi))
        error = (target - output)
        self.w_[1:] += self.eta * xi.dot(error)
        self.w_[0+= self.eta * error
        cost = 0.5 * error ** 2
        return cost
 
    def net_input(self, X):
        """최종 입력 계산"""
        return np.dot(X, self.w_[1:]) + self.w_[0]
 
    def activation(self, X):
        """선형 활성화 계산"""
        return X
    def predict(self, X):
        """단위 계단 함수를 사용하여 클래스 레이블을 반환"""
        return np.where(self.activation(self.net_input(X)) >= 0.01-1)
 
 
 
cs
댓글
공지사항
최근에 올라온 글
최근에 달린 댓글
Total
Today
Yesterday
링크
«   2024/12   »
1 2 3 4 5 6 7
8 9 10 11 12 13 14
15 16 17 18 19 20 21
22 23 24 25 26 27 28
29 30 31
글 보관함