diff --git a/src/Global_Var.py b/src/Global_Var.py index cdee7c800aeddb5684deddee55a25ef1031b10ad..69e6dcf3c6cafdb35669d49e42891f9cb3423bf6 100755 --- a/src/Global_Var.py +++ b/src/Global_Var.py @@ -114,8 +114,6 @@ class Global_Var(): """ self.db = self.client[name] - millis = int(round(time.time() * 1000)) - if ('labels' in self.db.list_collection_names()): col = pymongo.collection.Collection(self.db, 'labels') col.create_index([("T", 1)]) diff --git a/src/Modules/General/callbacks.py b/src/Modules/General/callbacks.py index b977683da557e5de078a42e9d06b8d0bd823914a..d7a98d44d2eff1e266de9929caed209e74ab763c 100755 --- a/src/Modules/General/callbacks.py +++ b/src/Modules/General/callbacks.py @@ -116,7 +116,7 @@ class callbacks(callbacksOp): mode='lines+markers', text=list(super.xAxisLabel), customdata=list(super.SynapseGraphY[layer]), - hovertemplate="%{text} <br> <b>%{customdata}</b> <br> <b>Max</b> "+str(super.MaxSynapse[layer]))) + hovertemplate="<span style='color:white;''>%{text} <br> <b>%{customdata}</b> <br> <b>Max</b> "+str(super.MaxSynapse[layer])+"<\span>")) if(f == "Potentials"): if(layer not in super.PotentialGraphY): super.PotentialGraphY[layer] = deque(maxlen=100) @@ -761,7 +761,6 @@ class callbacks(callbacksOp): if (str(l["Label"]) != str(a["Input"])): loss += 1 - print(loss) return round((loss*100) / len(spikes), 2) # --------------------------------------------------------------------- diff --git a/src/Modules/General/layout.py b/src/Modules/General/layout.py index 997ad787b59b2609822c48f61ad2c3b5ccb11c00..504b76e8c4df45ec407f82c1cbfc8429a7543302 100755 --- a/src/Modules/General/layout.py +++ b/src/Modules/General/layout.py @@ -136,16 +136,16 @@ class layout(layoutOp): id='GeneralGraphFilter', options=[{'label': "Spikes", 'value': "Spikes"}, {'label': "Synapses activity", 'value': "Synapses"}, { 'label': "Neurons potential", 'value': "Potentials"}], - value=['Spikes'], + value=["Spikes","Synapses","Potentials"], multi=True, style={"minWidth": "20%", "marginLeft": "10px", "textAlign": "start"}), # Layers filter dcc.Dropdown( id='GeneralLayerFilter', options=[{'label': str(i), 'value': str(i)} for i in ( - i for i in self.g.Layer_Neuron if i != "Input")], + i for i in self.g.Layer_Neuron if ("Input" not in i and "pool" not in i))], value=[str(i) for i in ( - i for i in self.g.Layer_Neuron if i != "Input")], + i for i in self.g.Layer_Neuron if ("Input" not in i and "pool" not in i))], multi=True, style={"minWidth": "20%", "marginLeft": "15px", "textAlign": "start"})], className="d-flex", style={"paddingLeft": "20px", 'width': '100%'}) ], className="col-12") diff --git a/src/Modules/Synapse/callbacks.py b/src/Modules/Synapse/callbacks.py index 0e4744278d2af0ece842b6566562cfc621a65456..6d73376175a3a3322817a0bae883610728ef8a3e 100755 --- a/src/Modules/Synapse/callbacks.py +++ b/src/Modules/Synapse/callbacks.py @@ -680,15 +680,18 @@ class callbacks(callbacksOp): # MongoDB--------------------- col = pymongo.collection.Collection(g.db, 'synapseWeightFinal') - globalSynapseWeights = col.aggregate([{"$match": { "L": {"$eq": layer}}}]) + globalSynapseWeights = col.aggregate([{"$match": { "_id.L": {"$eq": layer}}}]) # ToDF---------------------- globalSynapseWeights = pd.DataFrame(list(globalSynapseWeights)) - globalSynapseWeights["To"] = globalSynapseWeights["_id"].map(lambda i: i["To"]) - globalSynapseWeights["C"] = globalSynapseWeights["_id"].map(lambda i: i["C"]) - globalSynapseWeights["x"] = globalSynapseWeights["_id"].map(lambda i: i["index"]["x"]) - globalSynapseWeights["y"] = globalSynapseWeights["_id"].map(lambda i: i["index"]["y"]) - globalSynapseWeights.drop(columns=["_id"]) + + if(not globalSynapseWeights.empty): + globalSynapseWeights["To"] = globalSynapseWeights["_id"].map(lambda i: i["To"]) + globalSynapseWeights["C"] = globalSynapseWeights["_id"].map(lambda i: i["C"]) + globalSynapseWeights["x"] = globalSynapseWeights["_id"].map(lambda i: i["index"]["x"]) + globalSynapseWeights["y"] = globalSynapseWeights["_id"].map(lambda i: i["index"]["y"]) + globalSynapseWeights["L"] = globalSynapseWeights["_id"].map(lambda i: i["L"]) + globalSynapseWeights.drop(columns=["_id"]) # ---------------------------- return globalSynapseWeights diff --git a/src/Modules/Synapse/spark.py b/src/Modules/Synapse/spark.py index 8a2f89b2b5cbbda60928be0a06f4f4ac80a5a8f2..8bc9edf62640b028493b194b1375eb5677627e59 100755 --- a/src/Modules/Synapse/spark.py +++ b/src/Modules/Synapse/spark.py @@ -35,7 +35,7 @@ class spark(sparkOp): # -------------------------------------------------- df = self.g.sparkSession.read.format("com.mongodb.spark.sql") \ .option("spark.mongodb.input.uri", self.MONGODBURL + self.g.name + "."+self.DOCUMENT_NAME+"?authSource=admin&readPreference=primaryPreferred") \ - .option("pipeline", "[{ $sort: { T: 1 } },{$group : { _id : {To:'$To', C:'$C', index:'$index' }, L : { $last: '$L'}, T : { $last: '$T'},V : { $last: '$V'} } }]") + .option("pipeline", "[{ $sort: { T: 1 } },{$group : { _id : {To:'$To', C:'$C', index:'$index', L:'$L'}, T : { $last: '$T'},V : { $last: '$V'} } }]") df = df.load() @@ -54,7 +54,7 @@ class spark(sparkOp): col.create_index([("_id", 1)]) col.create_index([("_id.To", 1),("_id.C", 1)]) col.create_index([("T", 1)]) - col.create_index([("index", 1)]) + col.create_index([("_id.index", 1)]) col.create_index([("V", 1)]) # --------------------------------------------------------