-
Notifications
You must be signed in to change notification settings - Fork 0
/
parameter_server.py
67 lines (50 loc) · 2.35 KB
/
parameter_server.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import operator
import threading
import numpy
class ParameterServer(object):
def __init__(self):
self.num_layers = 2
self.num_neuron_layer0 = 30
self.num_neuron_layer1 = 10
self.num_input = 784
self.global_gradient_dict = dict()
self.stats = dict()
self.lock = threading.Lock()
for layer in range(self.num_layers):
if layer == 0:
for neuron in range(self.num_neuron_layer0):
for input_parameter in range(self.num_input):
key = str(layer) + "_" + str(neuron) + "_" +str(input_parameter)
value = numpy.random.rand(1)[0]
self.global_gradient_dict[key] = value
self.stats[key] = 0
elif layer == 1:
for neuron in range(self.num_neuron_layer1):
for input_parameter in range(self.num_neuron_layer0):
key = str(layer) + "_" + str(neuron) + "_" +str(input_parameter)
value = numpy.random.rand(1)[0]
self.global_gradient_dict[key] = value
self.stats[key] = 0
print key
#self.gradient_list.append(gradient_tuple)
print self.global_gradient_dict
def upload_gradient(self, gradient_list):
self.lock.acquire()
for gradient in gradient_list:
key = gradient[0]
value = gradient[1]
self.global_gradient_dict[key] += value
self.stats[key] += 1
#print self.stats
self.lock.release()
def download_gradient(self, num_gradient_download):
sorted_stats = sorted(self.stats.items(), key=operator.itemgetter(1), reverse=True)
gradient_download_list = list()
self.lock.acquire()
for counter in range(num_gradient_download):
stats_tuple = sorted_stats[counter]
key = stats_tuple[0]
gradient_element = (key, self.global_gradient_dict[key])
gradient_download_list.append(gradient_element)
self.lock.release()
return gradient_download_list