summaryrefslogtreecommitdiff
path: root/src/algo2.lisp
blob: e1dc0b7dedbdd42cb3606481592f89447622e397 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;; SYMBOLIC FEED-FORWARD NEURAL NETWORK ;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;;
;;;; (C) 2026 Screwlisp, CC-By-SA
;;;;
;;;; Originally published on https://screwlisp.small-web.org/fundamental/a-better-deep-learning-algorithm/
;;;;
;;;; OVERVIEW: This implements a neural network designed for pattern
;;;; recognition within symbolic grids. Instead of using traditional
;;;; "Dense" layers of floating-point weights, it uses "Memory
;;;; Blocks".
;;;;
;;;; HOW IT WORKS:
;;;; 1. WINDOWING: The system slides a "lens" (the memory block size) over 
;;;;    the input data (the input grid).
;;;; 
;;;; 2. CORRELATION: It performs a symbolic comparison between the memory 
;;;;    and the input. It checks for "Co-occurrence":
;;;;    - If the feature is present in BOTH, it's a positive match.
;;;;    - If the feature is absent in BOTH, it's also a positive match.
;;;;    - Mismatches generate negative signals.
;;;;
;;;; 3. NON-LINEAR ACTIVATION: The resulting sums are passed through a 
;;;;    "Rectified Polynomial." This is the "secret sauce" of Deep Learning. 
;;;;    By squaring or cubing the positive signals (the pluses), we amplify 
;;;;    strong patterns and suppress weak noise.
;;;;
;;;; 4. INFERENCE: The system aggregates the "votes" of all memories. If the 
;;;;    net signal is positive, the network confirms the presence of the 
;;;;    requested item (T); otherwise, it rejects it (NIL).
;;;;
;;;; PERSPECTIVE:
;;;; Think of this as a "Fuzzy Grep" for 2D grids. It doesn't look for 
;;;; exact matches, but rather "statistical resonance" between a known 
;;;; template and a raw input patch.
;;;; ==========================================================================

(defpackage ffnn.algo2
  (:use :cl)
  (:export #:scan-all-memories))
(in-package :ffnn.algo2)

;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; 1. THE ACTIVATION FUNCTION ;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; This defines how a "neuron" scales its output. By squaring the
;;; result (degree 2), we make strong matches significantly more
;;; influential than weak ones. The DEGREE is the knob that can be
;;; cranked to 11 to control how the NN behaves.
(defun make-rectified-polynomial (degree)
  (lambda (x)
    ;; "Rectified": Only positive values pass.
    (cond ((plusp x) (expt x degree))
	  ;; Negative or zero signals are killed.
          ((not (plusp x)) '0))))    

;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; 2. THE MAGNITUDE CALCULATOR ;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; This compares a row of input to a row of memory. It's essentially
;;; calculating "Overlap" and "Divergence."
(defun sum-one-row (item row-in mem-in idx
		    &key (predicate 'member) predicate-keys &allow-other-keys)
  "Iterate over each place of one input row and return a number of pluses and minuses
 for the proposed item to fit based on one row of memory."
  (loop :for me :in mem-in	; Walk the memory column-to-column and
        :for ro :in row-in	; row-to-row, _simultaneously_!
        :for count :from 0
        ;; Transform presence of 'item' into +1 or -1
        :for m := (if (apply predicate item me predicate-keys) +1 -1)
        :for c := (if (apply predicate item ro predicate-keys) +1 -1)
        
        ;; CASE A: We are at the 'Target'
        ;; If it matches, we sum the memory signal (+m or -m) directly.
        :when (and idx (= count idx))
          :sum (* +1 m) :into pluses
        :when (and idx (= count idx))
          :sum (* -1 m) :into minuses
          
        ;; CASE B: We are at any other column (The context)
	;;
	;; Multiply the input (c) by the memory (m). If both match (+1
        ;; * +1) or both lack the item (-1 * -1), result is +1. This
        ;; is the "correlation" or "weighting" step.
        :unless (and idx (= count idx))
          :sum (* c m) :into pluses
        :unless (and idx (= count idx))
          :sum (* c m) :into minuses
        :finally
           ;; Returns the total "evidence" for and against this row.
           (return (list pluses minuses))))

(when nil
  ;; Test to understand how sum-one-row works. A plus is added when
  ;; the symbol is in the memory row, if the symbol occurs on the
  ;; index, 2 pluses are added. Eval with SLIME (C-x C-e at the
  ;; closing brace.)
  (sum-one-row 'foo
	       '((foo) () (bar))
	       '((foo) (foo) (bar))
	       1) ; (3 1) - strong sign that foo belongs there
  (sum-one-row 'foo
	       '((bar) () (bar))
	       '((bar) (bar) (foo))
	       1) ; (-1 1) - undecided?
  (sum-one-row 'foo
	       '((bar) () (bar))
	       '((bar) (bar) (bar))
	       1) ; (1 3) - allergy!
  (sum-one-row 'foo
	       '((foo) () (bar))
	       '((foo) (foo) (bar))
	       nil) ; (3 1) - strong sign that foo belongs there
  )

;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; 3. THE SPATIAL AGGREGATOR ;;;
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;
;;; This loops through the rows of a single memory "block." Think of a
;;; memory block.
(defun sum-one-memory (item input memory
		       &rest keys
		       &key
			 (target-row 0) (target-col 0)
			 (start-row 0) (start-col 0)
			 rectified-polynomial &allow-other-keys)
  "Iterate through each memory row and calculates the fit of the item in
each input location based on the memory rows."
  (loop :for mem-row :in memory
        :for count :from 0
        ;; 'nthcdr' is used to offset into the input matrix, 
        ;; allowing the network to look at specific "patches" of data.
        :for whole-input-row :in (nthcdr start-row input)
        :for input-row := (nthcdr start-col whole-input-row)
        
        ;; If the current row matches the row we are predicting, 
        ;; pass that column index down to sum-one-row.
        :for idx := (when (= count target-row) target-col)
        
        ;; Execute the row-level comparison.
        :for (row-plus row-minus)
          := (apply 'sum-one-row item input-row mem-row idx keys)
        
        ;; Accumulate the results for the entire memory block.
        :summing row-plus :into row-pluses
        :summing row-minus :into row-minuses
        :finally
           ;; Apply the "Polynomial" to amp up strong signals, 
           ;; then subtract the 'negative evidence' from the 'positive evidence'.
           (return
             (- (funcall rectified-polynomial row-pluses)
                (funcall rectified-polynomial row-minuses)))))

(when nil
  ;;; Tests for sum-one-memory
  (sum-one-memory 'foo
		  '(((foo) (foo))
		    ((bar) (bar))) ;; Input
		  '(((foo) (foo))
		    ((bar) (bar))) ;; Memory
		  :rectified-polynomial (make-rectified-polynomial 2)
		  :target-row 0 :target-col 0
		  :start-row 0 :start-col 0)

  ;; The above is like calling (you can try!):
  ;; This outputs (2 0)
  (sum-one-row 'foo
	       '((foo) (foo))
	       '((foo) (foo))
	       0)
  ;; This outputs (2 2)
  (sum-one-row 'foo
	       '((bar) (bar))
	       '((bar) (bar))
	       nil)
  ;; final output: 12
  (- (funcall (make-rectified-polynomial 2) 4)
     (funcall (make-rectified-polynomial 2) 2))
  

  (sum-one-memory 'foo
		  '(((foo) (foo))
		    ((baz) (baz)))
		  '(((foo) (foo))
		    ((foo) (foo)))
		  :rectified-polynomial (make-rectified-polynomial 2)
		  :target-row 0 :target-col 0
		  :start-row 0 :start-col 0)
  )

(defparameter *poly-2* (make-rectified-polynomial 2))

(defun sum-all-memories (item input memories target-row target-col)
  "Iterate through all input cells and assign a score for the fit of item
in that location based on memories."
  (loop :for memory :in memories
	:sum (sum-one-memory item input memory
			     :target-col target-col
			     :target-row target-row
			     :rectified-polynomial *poly-2*)
          :into best-memory
        :finally (return best-memory)))

(defun scan-all-memories (item input memories)
  (loop :for row :in input
	:for row-idx :from 0
	:collect (loop :for col :in row
		       :for col-idx :from 0
		       :collect (sum-all-memories item input memories row-idx col-idx))))

(when nil
  (defparameter *layer1-memories*
    '((((foo) (foo) (foo))
       (() () ())
       (() () ()))
      (((foo) () ())
       (() (foo) ())
       (() () (foo)))
      (((foo) () ())
       ((foo) () ())
       ((foo) () ()))))
  (defparameter *layer2-memories*
    '(((() () ())
       (() () ())
       (() () ()))
      ((() () ())
       (() () ())
       (() () ()))
      ((() () ())
       (() () ())
       (() () ()))))

  (sum-all-memories 'foo '((() (foo) (foo))
			   ((bar) (bar) ())
			   (() () ()))
		    *layer1-memories* 0 0) ;; 34

  (sum-all-memories 'foo '(((foo) (bar)) ((qux) (foo))) *layer1-memories* 0 0) ;; 12

  (scan-all-memories 'foo
		     (scan-all-memories 'foo
					'((() (foo) (foo))
					  ((bar) (bar) ())
					  (() () ()))
					*layer1-memories*)
		     *layer2-memories*)

  )