#PSYCO 452 Geometry of Hebb learning #Hebb learning of 3D vectors #This is a simple script that demonstrates Hebb #learning of associations between pairs of 3D vectors # Author: Michael R.W. Dawson # This version: August 18, 2014 ###################################################### # Initialization of TBL patterns, learning rate, # and weight matrix ####################################################### #---------- Basis Vectors -- #Define 3 patterns to be used in learning #Note that each is a basis vector for 3D space! p1 <- c(1, 0, 0) #pattern 1 p2 <- c(0, 1, 0) #pattern 2 p3 <- c(0, 0, 1) #pattern 3 #---------- Linearly Independent Vectors -- #define 3 patterns to be used in learning #Note that these three are linearly independent vectors #p1 <- c(0.1, 0.2, 0.3) #pattern 1 #p2 <- c(0.4, -0.5, 0.6) #pattern 2 #p3 <- c(0.7, -0.9, -0.5) #pattern 3 #------------- Linearly Dependent Vectors #define 3 patterns to be used in learning #note that these three are linearly dependent vectors #p1 <- c(0.1, 0.2, 0.3) #pattern 1 #p2 <- c(0.4, 0.5, 0.6) #pattern 2 #p3 <- c(0.5, 0.7, 0.9) #pattern 3 #----- Origin for plots #define the origin to be used to plot network responses # as vectors in a 3D space origin <- c(0,0,0) #this is the origin #---- Define learning Rate #assign a value to the learning rate LearningRate LearningRate <- 0.1 #make LearningRate = 0.1 #------ Define maximum number of training sweeps MaxEpochs <- 10 #------------Tabula Rasa -- #Create an initial weight matrix #Fill it with zeros -- tabula rasa! Wts <- matrix(c(0,0,0,0,0,0,0,0,0),ncol=3) #----- Initial noise -- #or fill it with small random numbers -- noise! #Wts <- matrix(runif(9, min=-0.3, max=0.3),ncol=3) #Make sure that the required R packages are installed #uncomment these two next lines if missing! #install.packages("rgl") #install.packages("compositions") #Load the required libraries; they handle the graphing library("rgl") library("compositions") #---- Display starting state on R console #check to see what we have! #display each pattern p1 p2 p3 #display LearningRate LearningRate #display intial connection weights Wts ############################################################### # Learn three different associations each epoch. After # # learning all three, test recall by presenting p1 as a cue. # # Plot the result to show the geometry of learning. # ############################################################### ################################################################ # Start by drawing a 3D space in which to put plotted vectors # ################################################################ open3d(windowRect = c(00,00, 800, 800)) #rgl graphics window #Use rgl to draw a coordinate system decorate3d(c(-2,2), c(-2,2), c(-2,2), xlab = "x", ylab = "y", zlab = "z", box = TRUE, axes = TRUE, main = NULL, sub = NULL) #Draw the three desired patterns in the coordinate system #Draw them in black #A black vector is the correct output in our graph! arrows3D(origin,p1,length=0.1,lwd=5,col="black") arrows3D(origin,p2,length=0.1,lwd=5,col="black") arrows3D(origin,p3,length=0.1,lwd=5,col="black") ######################################################### # With space drawn, let us do some learning. We will # test recall, and plot one of the recalled vectors in # the graph each sweep. How close does it come to the # correct response drawn in black? ######################################################### #-------------------------------------- # Put learning and recall in a for loop for (epoch in 1:MaxEpochs) #loop until desired max reached { #begin for loop ########################################################## #Learn three different associations between pattern pairs: # p1 to cue p2 # p2 to cue p3 # p3 to cue p1 ########################################################## #learn association 1: cue = p1, recall = p2 OuterProduct <- p2 %o% p1 #R takes care of column and row! DeltaW <- LearningRate *OuterProduct #scale by learning rate Wts <- Wts + DeltaW #add DeltaW to existing weights #Learn association 2: cue = p2, recall = p3 #use the same method that was detailed above OuterProduct <- p3 %o% p2 #R takes care of column and row! DeltaW <- LearningRate*OuterProduct Wts <- Wts + DeltaW #Learn association 3: cue = p3, recall = p1 #use the same method that was detailed above OuterProduct <- p1 %o% p3 #R takes care of column and row! DeltaW <- LearningRate*OuterProduct Wts <- Wts + DeltaW ############################################################ #Now that learning is done, let us plot recall #Just use eachcue, and graph each recalled vector #Looking at the graph will show how the response changes #over time (i.e. over epochs) ############################################################ # --- Plot recall to p1 as cue #first present p1 as a cue to the existing connections #activity is p1 premultiplied by Wts #Is p2 correctly recalled? Look at graph to see! Recall <- Wts %*% p1 #recall = Wts premultiplying cue vector #display recalled vector in the graph arrows3D(origin,t(Recall),length=0.1,lwd=5,col="red") # --- Plot recall to p2 as cue #Repeat above using p2 as a cue #Is p3 correctly recalled? Look at graph to see! Recall <- Wts %*% p2 #recall = Wts premultiplying cue vector arrows3D(origin,t(Recall),length=0.1,lwd=5,col="green") # --- Plot recall to p3 as cue #Repeat above using p3 as a cue #Is p1 correctly recalled? Look at graph to see! Recall <- Wts %*% p3 #recall = Wts premultiplying cue vector arrows3D(origin,t(Recall),length=0.1,lwd=5,col="blue") } #for loop ends here! #------------------------------------------------------------