Skip to content

Commit

Permalink
Refactored. Fiddled with hpc version of pseudopattern generation. No …
Browse files Browse the repository at this point in the history
…promising results. Are Hattori's papers valid? TODO: Test sequential sub-set consolidation along with chaotic patterns, too?
  • Loading branch information
williampeer committed May 4, 2016
1 parent 72d51c6 commit 094da8d
Show file tree
Hide file tree
Showing 5 changed files with 63 additions and 37 deletions.
59 changes: 33 additions & 26 deletions Experiments_4_x.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import time, HPCWrappers
import time, HPCWrappers, NeocorticalModuleTraining
from HPCWrappers import hpc_learn_patterns_wrapper, hpc_chaotic_recall_wrapper
from Tools import set_contains_pattern, get_pattern_correlation, save_experiment_4_1_results, save_images_from
import Tools
Expand Down Expand Up @@ -167,36 +167,30 @@ def experiment_4_2_hpc_version(hpc, ann, training_set_size, original_training_pa
": ASYNC-flag:" + str(hpc._ASYNC_FLAG) + ". " + str(training_set_size) + "x5. " +
"Turnover mode: " + str(hpc._TURNOVER_MODE) + ". Turnover rate:" +
str(hpc._turnover_rate) + ", DG-weighting: " + str(hpc._weighting_dg) + ".")

pseudopattern_set_size = 10 # this should be set to 20. debugging mode: Small value.
pseudopattern_I_set_size = pseudopattern_set_size
pseudopattern_II_set_size = pseudopattern_set_size

chaotically_recalled_patterns = []
all_rand_ins = []

pseudopatterns_I = []
pseudopatterns_II = []
#
# chaotically_recalled_patterns = []
# all_rand_ins = []
#
# pseudopatterns_I = []
# pseudopatterns_II = []

for train_set_num in range(5): # always five training sets
current_set_hipp_chaotic_recall, current_set_random_ins = \
training_and_recall_hpc_helper(hpc, training_set_size, train_set_num, original_training_patterns)

for p_ctr in range(len(current_set_hipp_chaotic_recall)):
# if not Tools.set_contains_pattern(chaotically_recalled_patterns, current_set_hipp_chaotic_recall[p_ctr]):
chaotically_recalled_patterns.append([current_set_hipp_chaotic_recall[p_ctr]])
all_rand_ins.append([current_set_random_ins[p_ctr]])
# current_set_hipp_chaotic_recall, current_set_random_ins = \
# training_and_recall_hpc_helper(hpc, training_set_size, train_set_num, original_training_patterns)

# current_pseudopatterns_I = HPCWrappers.hpc_generate_pseudopatterns_I_wrapper(hpc, pseudopattern_I_set_size)
current_pseudopatterns_II = HPCWrappers.hpc_generate_pseudopatterns_II_wrapper(hpc, pseudopattern_II_set_size,
current_set_hipp_chaotic_recall,
flip_P=0.5)
# chaotic_training_set = []
# for p_ctr in range(len(current_set_hipp_chaotic_recall)):
# # if not Tools.set_contains_pattern(chaotically_recalled_patterns, current_set_hipp_chaotic_recall[p_ctr]):
# chaotically_recalled_patterns.append([current_set_hipp_chaotic_recall[p_ctr]])
# all_rand_ins.append([current_set_random_ins[p_ctr]])
# chaotic_training_set.append([current_set_random_ins[p_ctr], current_set_hipp_chaotic_recall[p_ctr]])

# ann.train(current_pseudopatterns_I)
ann.train(current_pseudopatterns_II)
for i in range(5):
# ann.train(chaotic_training_set)
start_index = train_set_num*training_set_size
ann.train(original_training_patterns[start_index: start_index + training_set_size])

# pseudopatterns_I += current_pseudopatterns_I
pseudopatterns_II += current_pseudopatterns_II
# ann = NeocorticalModuleTraining.global_sequential_FFBP_training()

# Store 4.1-specific material:
tar_patts = []
Expand All @@ -214,6 +208,19 @@ def experiment_4_2_hpc_version(hpc, ann, training_set_size, original_training_pa
neocortically_recalled_pairs = []
for [target_in, target_out] in original_training_patterns:
obtained_in, obtained_out = ann.get_IO(target_in)
# obtained_out = Tools.get_bipolar_in_out_values(obtained_out)
# identical = True
# for i in range(len(obtained_out)):
# for j in range(len(obtained_out[0])):
# if obtained_out[i][j] != target_out[i][j]:
# print "not identical, oo[i][j]:", obtained_out[i][j], ", taro[i][j]:", target_out[i][j]
# identical = False
# break
# if identical:
# if get_pattern_correlation(target_out, obtained_out) != 1.0:
# print "patterns are identical, but corr is:", get_pattern_correlation(target_out, obtained_out)
# else:
# print "current corr:", get_pattern_correlation(target_out, obtained_out)
sum_corr += get_pattern_correlation(target_out, obtained_out)
corr_ctr += 1
neocortically_recalled_pairs.append([obtained_in, obtained_out])
Expand Down
9 changes: 1 addition & 8 deletions HPC.py
Original file line number Diff line number Diff line change
Expand Up @@ -479,7 +479,7 @@ def recall(self):
self.ca3_out_weights.get_value(return_internal_type=True))

# Bipolar output:
self.set_output(self.get_bipolar_in_out_values(self.output_values.get_value(return_internal_type=True)))
self.set_output(Tools.get_bipolar_in_out_values(self.output_values.get_value(return_internal_type=True)))

def recall_until_stability_criteria(self, should_display_image, max_iterations):
# recall until output unchanged three iterations
Expand Down Expand Up @@ -516,13 +516,6 @@ def propagate(self, input_pattern):
self.recall()
return self.output_values.get_value()

def get_bipolar_in_out_values(self, values):
new_values = np.ones_like(values, dtype=np.float32)
for value_index in xrange(values.shape[1]):
if values[0][value_index] < 0:
new_values[0][value_index] = -1
return new_values

def reset_eta_and_zeta_values(self):
print "Resetting zeta and nu-values..."
ca3_values = self.ca3_values.get_value()
Expand Down
20 changes: 17 additions & 3 deletions NeocorticalModuleTraining.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from DataWrapper import training_patterns_associative
# from DataWrapper import training_patterns_heterogeneous


def traditional_training():
io_dim = 49
ann = NeocorticalNetwork(io_dim, 30, io_dim, 0.01, 0.9)
Expand All @@ -12,8 +13,21 @@ def traditional_training():
for i in range(5):
for training_iterations in range(5):
ann.train(training_set[i*ss:i*ss+ss])
for j in range(ss*5):
Tools.show_image_from(ann.get_IO(training_set[j][0])[1])
# for j in range(ss*5):
# Tools.show_image_from(ann.get_IO(training_set[j][0])[1])

return ann


def global_sequential_FFBP_training():
io_dim = 49
ann = NeocorticalNetwork(io_dim, 30, io_dim, 0.01, 0.9)
ss = 2
training_set = training_patterns_associative[:5*ss]
for i in range(10):
ann.train(training_set)

return ann


def retrieve_chaotic_patterns():
Expand Down Expand Up @@ -49,4 +63,4 @@ def train_on_chaotic_patterns():
Tools.show_image_from(ann.get_IO(training_patterns_associative[j][0])[1])
# Tools.show_image_from(training_patterns_associative[15][0])
# traditional_training()
train_on_chaotic_patterns()
# train_on_chaotic_patterns()
3 changes: 3 additions & 0 deletions NeocorticalNetwork.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import theano
import theano.tensor as T
import numpy as np
import Tools
from Tools import binomial_f, uniform_f
theano.config.floatX = 'float32'

Expand Down Expand Up @@ -46,6 +47,8 @@ def __init__(self, in_dim, h_dim, out_dim, alpha, momentum):
self.feed_forward = theano.function([new_input, input_hidden_Ws, hidden_output_Ws],
updates=[(self._in, new_input), (self._h, next_h), (self._out, next_out)])

self.set_output = theano.function([new_input], updates=[(self._out, new_input)])

Ws_h_out = T.fmatrix()
Ws_in_h = T.fmatrix()
prev_delta_W_in_h = T.fmatrix()
Expand Down
9 changes: 9 additions & 0 deletions Tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,14 @@
theano.config.floatX = 'float32'


def get_bipolar_in_out_values(values):
new_values = np.ones_like(values, dtype=np.float32)
for value_index in xrange(values.shape[1]):
if values[0][value_index] < 0:
new_values[0][value_index] = -1
return new_values


def show_image_from(out_now):
im = create_image_helper(out_now)
im.show()
Expand Down Expand Up @@ -91,6 +99,7 @@ def show_image_ca3(in_values):
random_f = theano.function([rows, columns], outputs=shared_random_generator.random_integers(
size=(rows, columns), low=0, high=10000, dtype='float32')/10000.)


def set_contains_pattern(patterns_set, pattern):
for pat in patterns_set:
if get_pattern_correlation(pat, pattern) == 1:
Expand Down

0 comments on commit 094da8d

Please sign in to comment.