-
Notifications
You must be signed in to change notification settings - Fork 0
/
DNMM.py
112 lines (98 loc) · 5.06 KB
/
DNMM.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
import time
import numpy as np
from Tools import binomial_f, uniform_f, show_image_from
from HPC import HPC
def hpc_learn_patterns_wrapper(hpc, patterns, max_training_iterations):
test_hpc = HPC([49, 240, 1600, 480, 49],
0.67, 0.25, 0.04, # connection rates: (in_ec, ec_dg, dg_ca3)
0.10, 0.01, 0.04, # firing rates: (ec, dg, ca3)
0.7, 1.0, 0.1, 0.5, # gamma, epsilon, nu, turnover rate
0.10, 0.95, 0.8, 2.0) # k_m, k_r, a_i, alpha. alpha is 2 in 4.1
print "Commencing learning of", len(patterns), "I/O patterns."
time_start_overall = time.time()
iter_ctr = 0
learned_all = False
# while iter_ctr < 3:
while not learned_all and iter_ctr < max_training_iterations:
p_ctr = 0
for [input_pattern, output_pattern] in patterns:
# Neuronal turnover, setting input and output in the hpc network.
setup_start = time.time()
# hpc.neuronal_turnover_dg()
hpc.setup_pattern(input_pattern, output_pattern)
setup_end = time.time()
print "Setup took:", "{:6.3f}".format(setup_end-setup_start), "seconds."
# one iteration of learning using Hebbian learning
time_before = time.time()
hpc.learn()
time_after = time.time()
print "Iterated over pattern", p_ctr, "in", \
"{:7.3f}".format(time_after - time_before), "seconds."
# hpc.print_activation_values_sum()
p_ctr += 1
learned_all = True
print "Attempting to recall patterns..."
for pattern_index in range(len(patterns)):
print "Recalling pattern #", pattern_index
test_hpc.in_ec_weights = hpc.in_ec_weights
test_hpc.ec_dg_weights = hpc.ec_dg_weights
test_hpc.ec_ca3_weights = hpc.ec_ca3_weights
test_hpc.dg_ca3_weights = hpc.dg_ca3_weights
test_hpc.ca3_ca3_weights = hpc.ca3_ca3_weights
test_hpc.ca3_out_weights = hpc.ca3_out_weights
test_hpc.setup_input(patterns[pattern_index][0])
test_hpc.recall()
test_hpc.recall()
test_hpc.recall()
out_values_row = test_hpc.output_values.get_value()[0]
cur_p_row = patterns[pattern_index][1][0]
# print "outvals:", out_values
# print "curp", cur_p
for el_index in range(len(cur_p_row)):
if out_values_row[el_index] != cur_p_row[el_index]:
learned_all = False
print "Patterns are not yet successfully learned. Learning more..."
# print "Displaying intermediary results... (output, target)"
# show_image_from(np.asarray([out_values_row], dtype=np.float32))
# show_image_from(np.asarray([cur_p_row], dtype=np.float32))
print "iter:", iter_ctr
break
if not learned_all:
break
iter_ctr += 1
time_stop_overall = time.time()
print "Learned", len(patterns), "pattern-associations in ", iter_ctr, "iterations, which took" "{:7.3f}". \
format(time_stop_overall-time_start_overall), "seconds."
def hpc_chaotic_recall_wrapper(hpc, display_images_of_stable_output, recall_iterations):
time_the_beginning_of_time = time.time()
time_before = time.time()
cur_iters = 0
# random_input = uniform_f(1, hpc.dims[0]) * 2 - np.ones_like(hpc.input_values, dtype=np.float32)
random_input = binomial_f(1, hpc.dims[0], 0.5) * 2 - np.ones_like(hpc.input_values, dtype=np.float32)
hpc.setup_input(random_input)
hpc_extracted_pseudopatterns = []
while cur_iters < recall_iterations:
[cur_iters_term, found_stable_output, output] = hpc.recall_until_stability_criteria(
should_display_image=display_images_of_stable_output, max_iterations=recall_iterations-cur_iters)
cur_iters += cur_iters_term
if found_stable_output:
hpc_extracted_pseudopatterns.append(output)
time_after = time.time()
prop_time_until_stable = time_after - time_before
print "Propagation time until stability:", "{:6.3f}".format(prop_time_until_stable), "seconds."
print "t =", cur_iters
time_before = time.time()
print "Total chaotic recall time:", "{:6.3f}".format(time.time()-time_the_beginning_of_time), "seconds."
return [hpc_extracted_pseudopatterns, random_input]
def generate_pseudopattern_II_hpc_outputs(dim, hpc_extracted_pseudopatterns, reverse_P, set_size):
extracted_set_size = len(hpc_extracted_pseudopatterns)
pseudopatterns_II = []
pseudopattern_ctr = 0
while pseudopattern_ctr < set_size:
pattern = hpc_extracted_pseudopatterns[pseudopattern_ctr % extracted_set_size]
# q=1-p because we're flipping the sign of the ones that are not flipped.
reverse_vector = binomial_f(1, dim, (1-reverse_P))
reverse_vector = reverse_vector * 2 - np.ones_like(reverse_vector)
pseudopatterns_II.append(pattern * reverse_vector)
pseudopattern_ctr += 1
return pseudopatterns_II