/[marvin]/marvin/src/libnn/error/errorTdnnVisitor.ml
ViewVC logotype

Diff of /marvin/src/libnn/error/errorTdnnVisitor.ml

Parent Directory Parent Directory | Revision Log Revision Log | View Patch Patch

revision 1.3 by matthieu, Sun Sep 14 15:24:51 2003 UTC revision 1.4 by matthieu, Mon Sep 15 23:58:12 2003 UTC
# Line 49  open DefaultVisitor Line 49  open DefaultVisitor
49  open TdNN  open TdNN
50    
51  class errorTdnnVisitor =  class errorTdnnVisitor =
52    object  object
53      inherit [tdNN] errorVisitor    inherit [tdNN] errorVisitor
54    
55      val mutable _transfertFunction = function x    val mutable _transfertFunction = function x
56          -> (1. /. (1. +. exp (-.x)))        -> (1. /. (1. +. exp (-.x)))
57    
58      val mutable _derivateFunction = function x    val mutable _derivateFunction = function x
59          -> (((1. /. (1. +. exp (-.x)))) *. (1. -. (1. /. (1. +. exp (-.x)))))        -> (((1. /. (1. +. exp (-.x)))) *. (1. -. (1. /. (1. +. exp (-.x)))))
60    
61      method visit (network : tdNN) =    method visit (network : tdNN) =
62        let error = network#getError and      let
63            gradients = network#getGradients and        (**
64            output = network#getOutputLearnVector and          This method returns the neuron interval of the layer l + 1 which are connected
65            outputActivation = network#getOutputActivation and          to the neuron state of the layer l.
66            inputSum = network#getInputSum and  
67            weights = network#getWeights and          state -> index of the neuron of the layer l in time direction.
68            derivate = _derivateFunction and          field -> the field of the layer l.
69            delay = network#getDelay and          delay -> the delay of the layer l.
70            timeNb = network#getTimeNb and          currentTimeNb -> the number of neuron in the time direction of the layer l
71            featuresNb = network#getFeaturesNb and          nextTimeNb -> the number of neuron in the time direction of the layer l + 1
72            fieldSize = network#getFieldSize and        *)
73  (**  
74     This method compute the neurons of the layer l + 1 which are connected        nbConnected (state, field, delay, currentTimeNb, nextTimeNb) =
75     to the neuron state of the layer l.        let step = ref 0 and
76            startState = ref 0 and
77     state -> index of the neuron of the layer l in time direction.          endState = ref 0 and
78     field -> the field of the layer l.          stop = ref 0 in
    delay -> the delay of the layer l.  
    currentTimeNb -> the number of neuron in the time direction of the layer l  
    nextTimeNb -> the number of neuron in the time direction of the layer l + 1  
  *)  
           nbConnected (state, field, delay, currentTimeNb, nextTimeNb) =  
         let step = ref 0 and  
             startState = ref 0 and  
             endState = ref 0 and  
             stop = ref 0 in  
79          begin          begin
80            endState := -1;            endState := -1;
81            startState := -1;            startState := -1;
# Line 93  class errorTdnnVisitor = Line 84  class errorTdnnVisitor =
84              startState := if ((!step <= state) && (state <= !stop) && (!startState == -1)) then i else !startState;              startState := if ((!step <= state) && (state <= !stop) && (!startState == -1)) then i else !startState;
85              endState := if ((state < !step) && (!endState == -1)) then (i - 1) else !endState;              endState := if ((state < !step) && (!endState == -1)) then (i - 1) else !endState;
86              step := !step + delay              step := !step + delay
 (*          Printf.printf "Index [i=%d] [stop=%d] [step=%d] [startState=%d] [endState=%d] [state=%d] [field=%d] [delay=%d] [current=%d] [next=%d]\n"  
               i !stop !step !startState !endState state field delay currentTimeNb nextTimeNb *)  
87            done;            done;
88            endState := if (!endState == -1) then nextTimeNb - 1 else !endState            endState := if (!endState == -1) then nextTimeNb - 1 else !endState
89          end; (!startState, !endState) and          end; (!startState, !endState) and
90            startEnd = ref (0, 0) and  
91            stepDelay = ref 0 in        (**
92            Compute the weight index which connect the neuron currentState
93            of the layer l to the neuron nextState of the layer l + 1.
94            field -> field of the layer l
95            delay -> delay of the layer l
96            currentTimeNb -> the number of neuron in the time direction of the layer l
97          *)
98    
99          findWeightIndex (currentState, nextState, currentTimeNb, field, delay) =
100          let index = ref 0 and
101            start = ref 0 in
102            begin
103              start := delay * nextState;
104              while ((!start != currentState) && (!index < (field - 1)) && (!start < currentTimeNb - 1)) do
105                index := !index + 1;
106                start := !start + 1
107              done
108            end; !index and
109    
110          (**
111            In order to simplify the notations
112          *)
113    
114          error = network#getError and
115          gradients = network#getGradients and
116          output = network#getOutputLearnVector and
117          outputActivation = network#getOutputActivation and
118          inputSum = network#getInputSum and
119          weights = network#getWeights and
120          derivate = _derivateFunction and
121          delay = network#getDelay and
122          timeNb = network#getTimeNb and
123          featuresNb = network#getFeaturesNb and
124          fieldSize = network#getFieldSize and
125          startEnd = ref (0, 0) and
126          index = ref 0 and
127          stepDelay = ref 0 in
128        begin        begin
129          (**  
130             Compute the error of the output layer          (**
131             Here the output vector is mapped in a first time on            Compute the error of the output layer
132             the feature direction and in a second time in the time direction.            Here the output vector is mapped in a first time on
133           *)            the feature direction and in a second time in the time direction.
134          Printf.printf "Starting Backpropagation out \n\n";          *)
135            
136          for i = 0 to featuresNb.(network#getLayerNb - 1) - 1 do          for i = 0 to featuresNb.(network#getLayerNb - 1) - 1 do
137            for j = 0 to timeNb.(network#getLayerNb - 1) - 1 do            for j = 0 to timeNb.(network#getLayerNb - 1) - 1 do
138              !error.(network#getLayerNb - 1).(i).(j)              !error.(network#getLayerNb - 1).(i).(j)
139              <- derivate(!inputSum.(network#getLayerNb - 1).(i).(j))              <- (* derivate(!inputSum.(network#getLayerNb - 1).(i).(j))
140                  *. (!outputActivation.(network#getLayerNb - 1).(i).(j) -. output.(i + j))                  *.*) (output.(i + j) -. !outputActivation.(network#getLayerNb - 1).(i).(j));
141            done            done
142          done;          done;
143          (**          (**
144             Compute the error and gradient of the hidden layers.            Initialize the gradients
145             l the layer          *)
146             i neuron of the layer l + 1 in the feature direction          for i = 0 to Array.length !gradients - 1 do
147             j neuron of the layer l + 1 in the time direction            for j = 0 to Array.length !gradients.(i) - 1 do
148             k neuron of the layer l in the feature direction              for k = 0 to Array.length !gradients.(i).(j) - 1 do
149             m neuron of the layer l in the time direction                for l = 0 to Array.length !gradients.(i).(j).(k) - 1 do
150             stepDelay used to keep the delay concept                  !gradients.(i).(j).(k).(l) <- 0.
151           *)                done
152          Printf.printf "Starting Backpropagation hidden \n\n";              done
153              done
154            done;
155            (**
156              Compute the error and gradient of the hidden layers.
157              l the layer
158              i neuron of the layer l + 1 in the feature direction
159              j neuron of the layer l + 1 in the time direction
160              k neuron of the layer l in the feature direction
161              m neuron of the layer l in the time direction
162              stepDelay used to keep the delay concept
163            *)
164          for l = network#getLayerNb - 2 downto 0 do          for l = network#getLayerNb - 2 downto 0 do
165            for m = 0 to timeNb.(l) - 1 do            for m = 0 to timeNb.(l) - 1 do
166              startEnd := nbConnected (m, fieldSize.(l), !delay.(l), timeNb.(l), timeNb.(l + 1));              startEnd := nbConnected (m, fieldSize.(l), !delay.(l), timeNb.(l), timeNb.(l + 1));
167              for k = 0 to featuresNb.(l) - 1 do              for k = 0 to featuresNb.(l) - 1 do
168  (**                (**
169     Initialization of the error term.                  Initialization of the error term.
170   *)                *)
171                !error.(l).(k).(m) <- 0.;                !error.(l).(k).(m) <- 0.;
172                for j = fst !startEnd to snd !startEnd do                for j = fst !startEnd to snd !startEnd do
173                    index := findWeightIndex(m, j, timeNb.(l), fieldSize.(l), !delay.(l));
174                  for i = 0 to featuresNb.(l + 1) - 1 do                  for i = 0 to featuresNb.(l + 1) - 1 do
175    
176                    (**                    (**
177                       Backpropagation of the error term                      Backpropagation of the error term
178                     *)                    *)
179  (*                Printf.printf "Index [l=%d] [m=%d] [k=%d] [j=%d] [i=%d] [start=%d] [endD=%d]\n" l m k j i (fst !startEnd) (snd !startEnd); *)  
180                    !error.(l).(k).(m)                    !error.(l).(k).(m)
181                    <- !error.(l).(k).(m)                    <- !error.(l).(k).(m)
182                        +. !error.(l + 1).(i).(j) *. !weights.(l).(k).(m).(i);                    +. !error.(l + 1).(i).(j)
183  (*                Printf.printf "!error.(%d).(%d).(%d) <- !error.(%d).(%d).(%d) +. !error.(%d).(%d).(%d) *. !weights.(%d).(%d).(%d).(%d);\n"                    *. !weights.(l).(k).(!index).(i);
184                      l k m l k m (l+1) i j l k m i;  *)  
185                    (**                    (**
186                       We compute the gradient with the error term of                      We compute the gradient with the error term of
187                       the layer l + 1.                      the layer l + 1.
188                     *)                    *)
189                    !gradients.(l).(k).(m).(i)  
190                    <- !error.(l + 1).(i).(j) *. !outputActivation.(l).(k).(m)                    !gradients.(l).(k).(!index).(i)
191  (*                Printf.printf "!gradients.(%d).(%d).(%d).(%d) <- !error.(%d).(%d).(%d) *. !outputActivation.(%d).(%d).(%d)\n\n"                    <- !gradients.(l).(k).(!index).(i)
192                      l k m i (l+1) i j l k m *)                    +. !error.(l + 1).(i).(j)
193                  done                    *. !outputActivation.(l).(k).(m)
194                    done;
195                done;                done;
196                !error.(l).(k).(m) <- !error.(l).(k).(m)                !error.(l).(k).(m) <- !error.(l).(k).(m)
197                    *. derivate(!inputSum.(l).(k).(m))                *. derivate(!inputSum.(l).(k).(m))
198              done              done
199            done            done
200          done          done
201        end        end
202    end  end
203    
204    (*          Printf.printf "Index [i=%d] [stop=%d] [step=%d] [startState=%d] [endState=%d] [state=%d] [field=%d] [delay=%d] [current=%d] [next=%d]\n"
205       i !stop !step !startState !endState state field delay currentTimeNb nextTimeNb *)
206    (*          Printf.printf "value=%f output=%f activation=%f input=%f \n\n"
207       !error.(network#getLayerNb - 1).(i).(j) output.(i + j)
208       !outputActivation.(network#getLayerNb - 1).(i).(j)
209       !inputSum.(network#getLayerNb - 1).(i).(j) *)
210    
211    (*                Printf.printf "Index [l=%d] [m=%d] [k=%d] [j=%d] [i=%d] [start=%d] [endD=%d]\n" l m k j i (fst !startEnd) (snd !startEnd); *)
212    
213    
214    (*                Printf.printf "!error.(%d).(%d).(%d) <- !error.(%d).(%d).(%d) +. !error.(%d).(%d).(%d) *. !weights.(%d).(%d).(%d).(%d);\n"
215       l k m l k m (l+1) i j l k m i;  
216       Printf.printf "value = %f\n\n" !error.(l).(k).(m); *)
217    
218    (*                Printf.printf "!gradients.(%d).(%d).(%d).(%d) <- !error.(%d).(%d).(%d) *. !outputActivation.(%d).(%d).(%d)\n\n"
219       l k m i (l+1) i j l k m *)
220      

Legend:
Removed from v.1.3  
changed lines
  Added in v.1.4

savannah-hackers-public@gnu.org
ViewVC Help
Powered by ViewVC 1.1.26