SHOW:
|
|
- or go back to the newest paste.
1 | (define (n-mk lst) ;make new neuron | |
2 | (list 'n lst)) | |
3 | - | (list 'n lst)) |
3 | + | |
4 | (define (l-mk lst) ;make new layer | |
5 | (list 'l lst)) | |
6 | ||
7 | - | (define (l-mk lst) ;make new layer |
7 | + | (define (n-inp-lst x) ;return neuron wheights |
8 | (cadr x)) | |
9 | - | (list 'l lst)) |
9 | + | |
10 | (define (n? x) ;check if x is neuron | |
11 | (if (= (car x) 'n) #t #f)) | |
12 | ||
13 | - | (define (n-inp-lst x) ;return neuron wheights |
13 | + | (define (l-neuron-lst x) ;returns list of layers neurons |
14 | (cadr x)) | |
15 | - | (cadr x)) |
15 | + | |
16 | (define (l? x) ;checks if x is a layer | |
17 | (if (= (car x) 'l) #t #f)) | |
18 | ||
19 | - | (define (n? x) ;check if x is neuron |
19 | + | (define (l-inp-wh l) ;shows all layer neurns input wheights |
20 | (map (lambda(x)(n-inp-lst x)) (l-neuron-lst l))) | |
21 | - | (if (= (car x) 'n) #t #f)) |
21 | + | |
22 | (define (l-out-wh l) ;returns all layer neuron output wheights | |
23 | (transpose (map (lambda(x)(n-inp-lst x)) (l-neuron-lst l)) '() )) | |
24 | ||
25 | - | (define (l-neuron-lst x) ;returns list of layers neurons |
25 | + | (define (net-inp-wh net) ;return all network input wheights |
26 | (map (lambda(x)(l-inp-wh x)) net)) | |
27 | - | (cadr x)) |
27 | + | |
28 | (define (net-out-wh net) ;returns all network output wheights | |
29 | (map (lambda(x)(l-out-wh x)) net)) | |
30 | ||
31 | - | (define (l? x) ;checks if x is a layer |
31 | + | (define (net-mk-data lst) ;make network from data list |
32 | (map (lambda(x)(l-mk-data x)) lst)) | |
33 | - | (if (= (car x) 'l) #t #f)) |
33 | + | |
34 | (define (l-mk-data lst) ;make layer from list | |
35 | (l-mk (map (lambda(x)(n-mk x)) lst))) | |
36 | ||
37 | - | (define (l-inp-wh l) ;shows all layer neurns input wheights |
37 | + | (define (lst-push x lst) ;push |
38 | (cons x lst)) | |
39 | - | (map (lambda(x)(n-inp-lst x)) (l-neuron-lst l))) |
39 | + | |
40 | (define (lst-push-b x lst) ;push from the end | |
41 | (append lst (list x))) | |
42 | ||
43 | - | (define (l-out-wh l) ;returns all layer neuron output wheights |
43 | + | (define (randomize n lst) ;randomize returns n random numbers |
44 | (if (= n 0) | |
45 | - | (transpose (map (lambda(x)(n-inp-lst x)) (l-neuron-lst l)) '() )) |
45 | + | lst |
46 | (randomize (- n 1) (append (list (+ (random 9) 1) ) lst) ))) | |
47 | ||
48 | (define (empty? lst) ;checks if the list is empty | |
49 | - | (define (net-inp-wh net) ;return all network input wheights |
49 | + | (if (= (length lst) 0) #t #f)) |
50 | ||
51 | - | (map (lambda(x)(l-inp-wh x)) net)) |
51 | + | (define (net-make lay) ;makes new network. lay consists of number of neuron for each layer |
52 | (net-mk-new (car lay) (cdr lay) '())) | |
53 | ||
54 | (define (net-mk-new input n-lst lst) ;input - ammount of input neurons; n-lsts - list of neuron ammount for each layer | |
55 | - | (define (net-out-wh net) ;returns all network output wheights |
55 | + | (let ( (lst1 (lst-push-b (l-mk-new input (car n-lst) '()) lst)) ) |
56 | (if (= (length n-lst) 1) lst1 (net-mk-new (car n-lst) (cdr n-lst) lst1)))) | |
57 | - | (map (lambda(x)(l-out-wh x)) net)) |
57 | + | |
58 | (define (l-mk-new input n lst) ;makes new layer | |
59 | (let ( (lst1 (append lst (list (n-mk (lst-rand input))))) ) ; seit kluda | |
60 | (if (= n 1) (l-mk lst1) (l-mk-new input (- n 1) lst1)))) | |
61 | - | (define (net-mk-data lst) ;make network from data list |
61 | + | |
62 | (define (lst-rand n) ;random returns n random numbers divided by 10 | |
63 | - | (map (lambda(x)(l-mk-data x)) lst)) |
63 | + | (map (lambda(x)(/ x 10)) (randomize n '()) )) |
64 | ||
65 | (define (n-akt x param) ;neuron activation function | |
66 | (/ x (+ (abs x) param))) | |
67 | - | (define (l-mk-data lst) ;make layer from list |
67 | + | |
68 | (define (lst-sum lst);sums list | |
69 | - | (l-mk (map (lambda(x)(n-mk x)) lst))) |
69 | + | (apply + lst)) |
70 | ||
71 | (define (n-proc neuron input) ;process single neuron | |
72 | (n-akt (lst-sum (lst-mul (n-inp-lst neuron) input )) 4)) | |
73 | - | (define (lst-push x lst) ;push |
73 | + | |
74 | (define (lst-mul a b) ;multiply each element of list a with same element of list b | |
75 | - | (cons x lst)) |
75 | + | (map (lambda(x y)(* x y)) a b)) |
76 | ||
77 | (define (lst-if a b) ;if one of the lists consists of only one element it returns true | |
78 | (if (or (= (length a) 1) (= (length b) 1)) #t #f)) | |
79 | - | (define (lst-push-b x lst) ;push from the end |
79 | + | |
80 | (define (l-proc l input) ;proceses a layer | |
81 | - | (append lst (list x))) |
81 | + | (map (lambda(x)(n-proc x input)) (n-inp-lst l))) |
82 | ||
83 | (define (net-proc net input) ;proceses a network | |
84 | (net-proc2 net (l-check input))) | |
85 | - | (define (randomize n lst) ;randomize returns n random numbers |
85 | + | |
86 | (define (net-proc2 net input) | |
87 | - | (if (= n 0) |
87 | + | (let ( (l (l-proc (car net) input)) ) |
88 | (if (= 1 (length net)) l | |
89 | - | lst |
89 | + | (net-proc2 (cdr net) l)))) |
90 | ||
91 | - | (randomize (- n 1) (append (list (+ (random 9) 1) ) lst) ))) |
91 | + | (define (net-proc-res net input) ;returns a list of results for each layer |
92 | (net-proc-res2 net (l-check input) '() )) | |
93 | ||
94 | (define (net-proc-res2 net input res) | |
95 | - | (define (empty? lst) ;checks if the list is empty |
95 | + | (let ( (l (l-proc (car net) input)) ) |
96 | (let ( (res1 (lst-push-b l res)) ) | |
97 | - | (if (= (length lst) 0) #t #f)) |
97 | + | (if (= 1 (length net)) res1 |
98 | (net-proc-res2 (cdr net) l res1))))) | |
99 | ||
100 | (define (l-err out err wh) ;counts error for layer | |
101 | - | (define (net-make lay) ;makes new network. lay consists of number of neuron for each layer |
101 | + | (lst-mul out (l-err*wh err wh))) |
102 | ||
103 | - | (net-mk-new (car lay) (cdr lay) '())) |
103 | + | (define (l-err*wh err wh) ;next l error * output wheights |
104 | (map (lambda(y)(lst-sum y)) (map (lambda(x)(lst-mul err x)) wh))) | |
105 | ||
106 | (define (lst-2-mul a b) ;multiply 2 lists | |
107 | - | (define (net-mk-new input n-lst lst) ;input — ammount of input neurons; n-lsts — list of neuron ammount for each layer |
107 | + | (map (lambda(x y)(* x y)) a b)) |
108 | ||
109 | - | (let ( (lst1 (lst-push-b (l-mk-new input (car n-lst) '()) lst)) ) |
109 | + | (define (lst-lst-mul a b) ; miltiply list a with every list from b |
110 | (map (lambda(x)(lst-mul-x x a)) b)) | |
111 | - | (if (= (length n-lst) 1) lst1 (net-mk-new (car n-lst) (cdr n-lst) lst1)))) |
111 | + | |
112 | (define (net-wh*err wh err) ;miltiply each wheight with error | |
113 | (map (lambda(x y)(lst-lst-mul x y)) wh err)) | |
114 | ||
115 | - | (define (l-mk-new input n lst) ;makes new layer |
115 | + | (define (net-err+wh err wh) ;add error to wheghts |
116 | (map (lambda(x y)(l-err+wh x y)) err wh)) | |
117 | - | (let ( (lst1 (append lst (list (n-mk (lst-rand input))))) ); seit kluda |
117 | + | |
118 | (define (lst-2-sum a b) ; adds to lists | |
119 | - | (if (= n 1) (l-mk lst1) (l-mk-new input (- n 1) lst1)))) |
119 | + | (map (lambda(x y)(+ x y)) a b)) |
120 | ||
121 | (define (l-err+wh err wh) ;adds error to wheight for layer | |
122 | (map (lambda(x y)(lst-2-sum x y)) err wh)) | |
123 | - | (define (lst-rand n) ;random returns n random numbers divided by 10 |
123 | + | |
124 | (define (lst-put-num lst) ;ads number to each el. of list ex. (1 a) | |
125 | - | (map (lambda(x)(/ x 10)) (randomize n '()) )) |
125 | + | (lst-zip (sequence 0 (- (length lst) 1) '() ) lst )) |
126 | ||
127 | (define (sequence from to res) ;creates sequency of numbers from to | |
128 | (let ( (nr (append res (list from)))) | |
129 | - | (define (n-akt x param) ;neuron activation function |
129 | + | (if (= from to) nr (sequence (+ from 1) to nr ) ))) |
130 | ||
131 | - | (/ x (+ (abs x) param))) |
131 | + | (define (lst-order-max lst) ;sort el. |
132 | (hsort (lst-put-num lst))) | |
133 | ||
134 | (define (lst-zip a b) ; zip 2 lists | |
135 | - | (define (lst-sum lst);sums list |
135 | + | (map (lambda(x y)(list x y)) a b)) |
136 | ||
137 | - | (apply + lst)) |
137 | + | (define (hsort lst) ;sorts |
138 | (if (empty? lst) '() | |
139 | (append | |
140 | (hsort (filter (lambda(x) (> (cadr x) (cadr (car lst)))) (cdr lst))) | |
141 | - | (define (n-proc neuron input) ;process single neuron |
141 | + | (list (car lst)) |
142 | (hsort (filter (lambda(x) (<= (cadr x) (cadr (car lst)))) (cdr lst)))))) | |
143 | - | (n-akt (lst-sum (lst-mul (n-inp-lst neuron) input )) 4)) |
143 | + | |
144 | (define (ssort lst) ;sort | |
145 | (if (empty? lst) '() | |
146 | (append | |
147 | - | (define (lst-mul a b) ;multiply each element of list a with same element of list b |
147 | + | (ssort (filter (lambda(x) (> x (car lst))) (cdr lst))) |
148 | (list (car lst)) | |
149 | - | (map (lambda(x y)(* x y)) a b)) |
149 | + | (ssort (filter (lambda(x) (<= x (car lst))) (cdr lst)))))) |
150 | ||
151 | (define (net-study net lst spd) ;studys net for each example from the list smpl ((inp)(out)) | |
152 | (if (net-check-lst net lst) net | |
153 | - | (define (lst-if a b) ;if one of the lists consists of only one element it returns true |
153 | + | (net-study (net-study-lst net lst spd) lst spd))) |
154 | ||
155 | - | (if (or (= (length a) 1) (= (length b) 1)) #t #f)) |
155 | + | (define (net-proc-num net inp) ;process network and returns output number |
156 | (caar (lst-order-max (net-proc net inp)))) | |
157 | ||
158 | (define (net-check-lst net lst) ;parbauda sarakstu ar paraugiem | |
159 | - | (define (l-proc l input) ;proceses a layer |
159 | + | (lst-and (map (lambda(x)(net-check-smpl net x)) lst))) |
160 | ||
161 | - | (map (lambda(x)(n-proc x input)) (n-inp-lst l))) |
161 | + | (define (lst-and lst) ;accumulates list |
162 | (lst-and2 (car lst) (cdr lst))) | |
163 | ||
164 | (define (lst-and2 a lst) | |
165 | - | (define (net-proc net input) ;proceses a network |
165 | + | (if (empty? lst) a (lst-and2 (and a (car lst)) (cdr lst)))) |
166 | ||
167 | - | (net-proc2 net (l-check input))) |
167 | + | |
168 | (define (net-check-smpl net x) ;checks one sample (input output) | |
169 | (let ( (inp (car x)) | |
170 | (out (cadr x)) ) | |
171 | - | (define (net-proc2 net input) |
171 | + | (if (= (caar (lst-order-max out)) (net-proc-num net inp)) #t #f))) |
172 | ||
173 | - | (let ( (l (l-proc (car net) input)) ) |
173 | + | (define (net-study-lst net lst spd) ; studies network for smaples from lst. Smpl (input output) |
174 | (let ( (x (net-study1 net (caar lst) (cadar lst) spd)) ) | |
175 | - | (if (= 1 (length net)) l |
175 | + | (if (= 1 (length lst)) x |
176 | (net-study-lst x (cdr lst) spd)))) | |
177 | - | (net-proc2 (cdr net) l)))) |
177 | + | |
178 | (define (net-study1 net inp need spd) | |
179 | (net-study2 net (l-check inp) need spd)) | |
180 | ||
181 | - | (define (net-proc-res net input) ;returns a list of results for each layer |
181 | + | (define (net-study2 net inp need spd) |
182 | (let ( (x (net-study3 net inp need spd))) | |
183 | - | (net-proc-res2 net (l-check input) '() )) |
183 | + | (if (= (caar (lst-order-max need))(caar (lst-order-max (net-proc x inp)))) |
184 | x | |
185 | (net-study2 x inp need spd)))) | |
186 | ||
187 | - | (define (net-proc-res2 net input res) |
187 | + | (define (slice lst from to) |
188 | (let ( (lng (length lst)) ) | |
189 | - | (let ( (l (l-proc (car net) input)) ) |
189 | + | (take (drop lst from) (+ (- lng from) to)))) |
190 | ||
191 | - | (let ( (res1 (lst-push-b l res)) ) |
191 | + | (define (net-study3 net inp need spd) |
192 | (let ((err (net-spd*err spd (slice (net-err net inp need) 1 0))) | |
193 | - | (if (= 1 (length net)) res1 |
193 | + | (wh (net-inp-wh net)) |
194 | (out (slice (net-proc-res-out net inp) 0 -1))) | |
195 | - | (net-proc-res2 (cdr net) l res1))))) |
195 | + | (net-mk-data (net-err+wh wh (map (lambda(x y)(lst-lst-mul x y)) out err))))) |
196 | ||
197 | (define (net-err2 out-lst err-lst wh-lst) | |
198 | (let ( (err-lst1 (lst-push (l-err (car out-lst) (car err-lst) (car wh-lst)) err-lst)) ) | |
199 | - | (define (l-err out err wh) ;counts error for layer |
199 | + | (if (lst-if out-lst wh-lst) err-lst1 (net-err2 (cdr out-lst) err-lst1 (cdr wh-lst))))) |
200 | ||
201 | - | (lst-mul out (l-err*wh err wh))) |
201 | + | (define (net-err net inp need) ;networks error list for each neuron |
202 | (let ( (wh-lst (reverse (net-out-wh net))) | |
203 | (err-lst (list (net-out-err net inp need))) | |
204 | (out-lst (reverse (slice (net-proc-res-out net inp) 0 -1)))) | |
205 | - | (define (l-err*wh err wh) ;next l error * output wheights |
205 | + | (net-err2 out-lst err-lst wh-lst))) |
206 | ||
207 | - | (map (lambda(y)(lst-sum y)) (map (lambda(x)(lst-mul err x)) wh))) |
207 | + | (define (net-spd*err spd err) ;multiply error with speed |
208 | (map (lambda(x)(lst-mul-x spd x)) err)) | |
209 | ||
210 | (define (lst-mul-x s lst) ;multiply list with x | |
211 | - | (define (lst-2-mul a b) ;multiply 2 lists |
211 | + | (map (lambda(x)(* s x)) lst)) |
212 | ||
213 | - | (map (lambda(x y)(* x y)) a b)) |
213 | + | (define (net-proc-res-out net inp) ;x*(1 - x) for each neuron output |
214 | (lst-push inp (map (lambda(x)(l-proc-res-out x)) (net-proc-res net inp)))) | |
215 | ||
216 | (define (l-proc-res-out lst) | |
217 | - | (define (lst-lst-mul a b); miltiply list a with every list from b |
217 | + | (map (lambda(x)(* x (- 1 x))) lst)) |
218 | ||
219 | - | (map (lambda(x)(lst-mul-x x a)) b)) |
219 | + | (define (l-out-err need fact) ;error of otput layer |
220 | (map (lambda(x y)(n-out-err x y)) need fact)) | |
221 | ||
222 | (define (n-out-err need fact) ;error of output neuron | |
223 | - | (define (net-wh*err wh err) ;miltiply each wheight with error |
223 | + | (* fact (- 1 fact)(- need fact))) |
224 | ||
225 | - | (map (lambda(x y)(lst-lst-mul x y)) wh err)) |
225 | + | (define (net-out-err net inp need) ;networks error lists |
226 | (l-out-err need (net-proc net inp))) | |
227 | ||
228 | (define (lst-print lst);prints list | |
229 | - | (define (net-err+wh err wh) ;add error to wheghts |
229 | + | (map (lambda(x)(and (newline)(display x))) lst)(newline)) |
230 | ||
231 | - | (map (lambda(x y)(l-err+wh x y)) err wh)) |
231 | + | (define (lst-print2 lst) |
232 | (map (lambda(x)(lst-print x)) lst)(newline)) | |
233 | ||
234 | (define (l-check lst) lst) | |
235 | - | (define (lst-2-sum a b); adds to lists |
235 | + | |
236 | (define (transpose lst res) | |
237 | - | (map (lambda(x y)(+ x y)) a b)) |
237 | + | (if (empty? (car lst)) res |
238 | (transpose (lst-cdr lst) (append res (lst-car lst))))) | |
239 | ||
240 | (define (lst-cdr lst) ;cdr of all list elements | |
241 | - | (define (l-err+wh err wh) ;adds error to wheight for layer |
241 | + | (map (lambda(x)(cdr x)) lst)) |
242 | ||
243 | - | (map (lambda(x y)(lst-2-sum x y)) err wh)) |
243 | + | (define (lst-car lst) ;car of all list elements |
244 | (list (map (lambda(x)(car x)) lst))) |