Skip to content
Snippets Groups Projects
Commit fca7d189 authored by Hammouda Elbez's avatar Hammouda Elbez :computer:
Browse files

Multi-D update

parent b1fb92e3
No related branches found
No related tags found
1 merge request!25VS2N 0.36
......@@ -186,24 +186,28 @@ class Global_Var():
Returns:
Array: Heatmap vector
"""
#data = data[data.C == 0]
try:
heatmap = np.zeros((x, y))
heatmap[:] = -1
data = data.to_numpy()
print(data)
if(depth == 0): # single depth
for d in data:
heatmap[d[2][0]][d[2][1]] = d[2][2]
heatmap[int(d[0])][int(d[1])] = d[3]
else: # multiple dimensions
for d in data:
if(heatmap[d[2][0]][d[2][1]] == -1):
heatmap[d[2][0]][d[2][1]] = d[2][2]
if(heatmap[int(d[0])][int(d[1])] == -1):
heatmap[int(d[0])][int(d[1])] = d[3]
else:
if(heatmap[d[2][0]][d[2][1]] < d[2][2]):
heatmap[d[2][0]][d[2][1]] = -d[2][2]
if(heatmap[int(d[0])][int(d[1])] < d[3]):
heatmap[int(d[0])][int(d[1])] = -d[3]
return heatmap
except Exception as e:
print("createHeatMap: "+str(e))
def createVerticalHeatMap(self, data):
""" Create a vertical heat map from a given data array .
......
......@@ -354,14 +354,12 @@ class callbacks(callbacksOp):
html component that contains the global heatmaps
"""
heatMapX = max(list(data["x"])) + 1
heatMapY = max(list(data["y"])) + 1
depth = max(list(data["C"]))
heatMapX = data["x"].max() + 1
heatMapY = data["y"].max() + 1
depth = data["C"].max()
data = data.sort_values(["To", "C"])
data["data"] = [[x, y, v] for x, y, v in zip(data["x"], data["y"], data["V"])]
data = data[["To", "C", "data"]]
data = data[["x", "y", "C", "V"]]
data = data.sort_values(["C"])
for i in g.LayersNeuronsInfo:
if(i["layer"] == selectedLayer):
......@@ -381,7 +379,7 @@ class callbacks(callbacksOp):
zmin=-1,
zmax=1,
z=g.createHeatMap(
heatMapX, heatMapY, data[data.To == index].sort_values(by=['C']),depth),
heatMapX, heatMapY, data[data.To == index],depth),
colorscale='jet',
name=str(index)),
row=xx, col=yy)
......@@ -412,7 +410,7 @@ class callbacks(callbacksOp):
"""
try:
if isOn:
if data != None:
if not data.empty:
if str(index) not in yAxisList:
yAxisList[str(index)] = deque(maxlen=100)
......@@ -420,13 +418,11 @@ class callbacks(callbacksOp):
index)] = deque(maxlen=100)
# add data
if len(data) != 0:
yAxisList[str(index)].append(
float("{:.6f}".format(statistics.mean([float(d["V"]) for d in data]))))
HeatMapSynapseFreqGraph[str(index)].append(
[float(d["V"]) for d in data])
# Y X or the vertical Heatmaps
if data.count != 0:
yAxisList[str(index)].append(float("{:.6f}".format(data["V"].mean())))
HeatMapSynapseFreqGraph[str(index)].append([float(d) for d in data["V"].values])
# Y X or the vertical Heatmaps
heatMapsZ = list()
heatMapsZLabel = list()
for heatmap in HeatMapSynapseFreqGraph[str(index)]:
......@@ -587,28 +583,22 @@ class callbacks(callbacksOp):
paper_bgcolor= "rgba(255, 255, 255,0)",
plot_bgcolor= "rgba(255, 255, 255,0)")
if isOn:
if data != None:
data.sort(key=lambda d: d["C"])
depth = data[-1]["C"]
group = groupby(data, key=lambda d: d["C"])
# add neuron indexes to show heatmap correctly
data = [item for item in (
max(v, key=lambda k: k["V"]) for k, v in group)]
if not data.empty:
depth = data["C"].max()
data = data.sort_values(["T"])
data = data.groupby(["C","x","y"], as_index=False).agg({"To":"last","T":"max","L":"last","V":"last"})
# update heatmap in heatmaps
if str(index) not in heatmaps:
heatmaps[str(index)] = np.zeros(
(heatMapX, heatMapY))
for synapse in data:
for X,Y,V,C in zip(data["x"],data["y"],data["V"],data["C"]):
try:
heatmaps[str(index)][synapse["index"]["x"]
][synapse["index"]["y"]] = synapse["V"]
heatMapWithIndexs.append(
[synapse["index"]["x"], synapse["index"]["y"], synapse["V"]])
# TODO: multi-D
heatmaps[str(index)][X][Y] = V
heatMapWithIndexs.append([X, Y, C, V])
except Exception as e:
continue
print("heatMap1: "+str(e))
return {'data': [
go.Heatmap(
......@@ -638,7 +628,7 @@ class callbacks(callbacksOp):
colorscale="jet")],
'layout': layout}
except Exception as e:
print("heatMap: "+str(e))
print("heatMap2: "+str(e))
# ------------------------------------------------------------
# MongoDB operations
......@@ -668,13 +658,14 @@ class callbacks(callbacksOp):
{"T": {'$gt': timestamp, '$lte': (timestamp+g.updateInterval)}} ]}
}])
# ToJson----------------------
SynapseWeight = loads(dumps(SynapseWeight))
# ToDF----------------------
SynapseWeight = pd.DataFrame(list(SynapseWeight))
if(not SynapseWeight.empty):
SynapseWeight["x"] = SynapseWeight["index"].map(lambda i: i["x"])
SynapseWeight["y"] = SynapseWeight["index"].map(lambda i: i["y"])
SynapseWeight.drop(columns=["index"])
# ----------------------------
if not SynapseWeight:
return None
return SynapseWeight
def getGlobalSynapseWeights(self, g, layer):
......@@ -687,14 +678,17 @@ class callbacks(callbacksOp):
final synapses weights
"""
# MongoDB---------------------
df = g.sparkSession.read.format("com.mongodb.spark.sql") \
.option("spark.mongodb.input.uri", g.MONGODBURL + g.name + ".synapseWeightFinal?authSource=admin&readPreference=primaryPreferred") \
.option("pipeline","[{$match: { L: {$eq: '"+layer+"'}}}]")
df = df.load()
df = df.select("_id.To","_id.C","L","T","V","_id.index.x","_id.index.y")
# ToJson----------------------
globalSynapseWeights = df.toPandas()
col = pymongo.collection.Collection(g.db, 'synapseWeightFinal')
globalSynapseWeights = col.aggregate([{"$match": { "L": {"$eq": layer}}}])
# ToDF----------------------
globalSynapseWeights = pd.DataFrame(list(globalSynapseWeights))
globalSynapseWeights["To"] = globalSynapseWeights["_id"].map(lambda i: i["To"])
globalSynapseWeights["C"] = globalSynapseWeights["_id"].map(lambda i: i["C"])
globalSynapseWeights["x"] = globalSynapseWeights["_id"].map(lambda i: i["index"]["x"])
globalSynapseWeights["y"] = globalSynapseWeights["_id"].map(lambda i: i["index"]["y"])
globalSynapseWeights.drop(columns=["_id"])
# ----------------------------
return globalSynapseWeights
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment