diff --git a/CHANGELOG.md b/CHANGELOG.md index da5a20f526b022c5e2a231b945d357f3c2d21587..4cab538829aad9a7373dab8f7ac21d0075142e24 100755 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changes Log +## v0.36 + +### Core +- Multilayer support added +- Spark & MongoDB version updated + ## v0.35 ### Core diff --git a/README.md b/README.md index f1bba14357c3cfe8d5abca9eebfab0879891fec9..14d61874ae5c8c7f0d82fdb55d71b8643269bd11 100755 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ pip install -r requirements.txt - Add MongoDB credentials in `config.py` -4. Using Python >= 3.0 (**make sure MongoDB is running**) run `python VS2N.py` to start VS2N. If everything is fine, the web interface should appear automatically. +4. Using Python >= 3.0 (**make sure MongoDB is running**), run `python VS2N.py` to start VS2N. If everything is fine, the web interface should appear automatically. ## Docker image You can also use a docker image to run VS2N : https://hub.docker.com/r/helbez/vs2n @@ -74,6 +74,13 @@ src/ # Source directory. # [Changes Log](https://gitlab.univ-lille.fr/bioinsp/VS2N/-/blob/master/CHANGELOG.md) +## v0.36 + +### Core +- Multilayer support added +- Spark & MongoDB version updated +- Added support for more simulators (Norse `Python`, CSNN `C++`) + ## v0.35 ### Core diff --git a/default.config.py b/default.config.py index 9bff81d36b667afcff64df2a6fa42ee04ca54d6b..e94677370fbf450eece871e3738338c758d3db8a 100755 --- a/default.config.py +++ b/default.config.py @@ -6,7 +6,7 @@ class config(): VS2N_HOST = "0.0.0.0" VS2N_PORT = 8050 - VS2N_VERSION= "0.351" + VS2N_VERSION= "0.36" DEBUG = False DATABASE_URL = "mongodb://127.0.0.1:27017/" USERNAME = "" #USERNAME HERE diff --git a/requirements.txt b/requirements.txt index c4bf4df33319222aa875cf8076c39569f653924a..618cc18046c565e162fdd9bf07fb82f7a28a8025 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,16 +1,16 @@ pypandoc -pymongo==4.1.0 -pyspark==3.2.1 +pymongo +pyspark pandas -dash==2.3.1 -plotly==5.7.0 -dash-renderer==1.9.1 -dash-html-components==2.0.0 -dash-core-components==2.0.0 -dash-bootstrap-components==1.1.0 -dash-cytoscape==0.3.0 -dash-daq==0.5.0 -Flask-Login==0.6.0 -wheel==0.37.1 -werkzeug==2.1.1 +dash +plotly +dash-renderer +dash-html-components +dash-core-components +dash-bootstrap-components +dash-cytoscape +dash-daq +Flask-Login +wheel +werkzeug PyArrow diff --git a/src/Global_Var.py b/src/Global_Var.py index b524cad91ce61de4ea3cc6e7b84ffa6049c2fc1c..1bb137573ee5b83c248f61258ad4738622ce8b1b 100755 --- a/src/Global_Var.py +++ b/src/Global_Var.py @@ -36,7 +36,7 @@ class Global_Var(): Dataset = "" Input = 0 Date = "" - Accuracy = "0" + Accuracy = 0 finalLabels = None labelsExistance = False oldIdSpike = None @@ -48,13 +48,14 @@ class Global_Var(): self.Max = 0 self.LayersNeuronsInfo = [] + self.NeuronsSize = None self.Layer_Neuron = None self.NeuronsNbr = 0 self.LayersNbr = 0 self.Dataset = "" self.Input = 0 self.Date = "" - self.Accuracy = "0" + self.Accuracy = 0 self.finalLabels = None self.labelsExistance = False self.oldIdSpike = None @@ -113,8 +114,6 @@ class Global_Var(): """ self.db = self.client[name] - millis = int(round(time.time() * 1000)) - if ('labels' in self.db.list_collection_names()): col = pymongo.collection.Collection(self.db, 'labels') col.create_index([("T", 1)]) @@ -137,8 +136,10 @@ class Global_Var(): if ('synapseWeight' in self.db.list_collection_names()): col = pymongo.collection.Collection(self.db, 'synapseWeight') col.create_index([("T", 1)]) + col.create_index([("C", 1)]) col.create_index([("L", 1)]) col.create_index([("To", 1)]) + col.create_index([("index", 1)]) print("Synapses index done") def getLabelTime(self, step, value): @@ -175,7 +176,7 @@ class Global_Var(): return x, y - def createHeatMap(self, x, y, data): + def createHeatMap(self, x, y, data, depth): """ Create a heatmap from a set of synapses values . Args: @@ -185,12 +186,26 @@ class Global_Var(): Returns: Array: Heatmap vector """ - heatmap = np.zeros((x, y)) - heatmap[:] = None - for d in data: - heatmap[d[0]][d[1]] = d[2] - - return heatmap + try: + heatmap = np.zeros((x, y)) + heatmap[:] = -1 + data = data.to_numpy() + + if(depth == 0): # single depth + for d in data: + heatmap[int(d[0])][int(d[1])] = d[3] + else: # multiple dimensions + for d in data: + if(d[2] == 0): + #if(heatmap[int(d[0])][int(d[1])] == -1): + heatmap[int(d[0])][int(d[1])] = d[3] + #else: + # heatmap[int(d[0])][int(d[1])] = np.mean(heatmap[int(d[0])][int(d[1])]+d[3]) + + return heatmap + + except Exception as e: + print("createHeatMap: "+str(e)) def createVerticalHeatMap(self, data): """ Create a vertical heat map from a given data array . @@ -255,13 +270,15 @@ class Global_Var(): conf.setAppName(self.name) conf.set("spark.executor.instances", "8") - conf.set("spark.executor.memory", "8g") + conf.set("spark.executor.memory", "16g") conf.set("spark.sql.execution.arrow.pyspark.enabled", "true") self.sparkSession = SparkSession.builder.config(conf=conf) \ .config('spark.jars.packages', 'org.mongodb.spark:mongo-spark-connector_2.12:2.4.4') \ .getOrCreate() - + + if not self.config.DEBUG: + self.sparkSession.sparkContext.setLogLevel("Error") return self.sparkSession.version except Exception as e: print("createSparkSession:" + str(e)) diff --git a/src/Modules/General/callbacks.py b/src/Modules/General/callbacks.py index 2d4fa7b7ac93b98a69c830076790ffbc0919a064..aec930b5aa0c8d91360f602eaa5e102c414f5f63 100755 --- a/src/Modules/General/callbacks.py +++ b/src/Modules/General/callbacks.py @@ -116,7 +116,7 @@ class callbacks(callbacksOp): mode='lines+markers', text=list(super.xAxisLabel), customdata=list(super.SynapseGraphY[layer]), - hovertemplate="%{text} <br> <b>%{customdata}</b> <br> <b>Max</b> "+str(super.MaxSynapse[layer]))) + hovertemplate="<span style='color:white;'>%{text} <br> <b>%{customdata}</b> <br> <b>Max</b> "+str(super.MaxSynapse[layer]))) if(f == "Potentials"): if(layer not in super.PotentialGraphY): super.PotentialGraphY[layer] = deque(maxlen=100) @@ -233,7 +233,7 @@ class callbacks(callbacksOp): go.Treemap( labels=list(super.Label.keys() ) if super.Label != [] else [], - textfont_size=18, + textfont_size=20, textposition="middle center", parents=["" for i in range( len(super.Label))], diff --git a/src/Modules/General/layout.py b/src/Modules/General/layout.py index 8a602d70df73c79c20ca2546b652df15d109d38a..504b76e8c4df45ec407f82c1cbfc8429a7543302 100755 --- a/src/Modules/General/layout.py +++ b/src/Modules/General/layout.py @@ -136,21 +136,21 @@ class layout(layoutOp): id='GeneralGraphFilter', options=[{'label': "Spikes", 'value': "Spikes"}, {'label': "Synapses activity", 'value': "Synapses"}, { 'label': "Neurons potential", 'value': "Potentials"}], - value=['Spikes'], + value=["Spikes","Synapses","Potentials"], multi=True, style={"minWidth": "20%", "marginLeft": "10px", "textAlign": "start"}), # Layers filter dcc.Dropdown( id='GeneralLayerFilter', options=[{'label': str(i), 'value': str(i)} for i in ( - i for i in self.g.Layer_Neuron if i != "Input")], + i for i in self.g.Layer_Neuron if ("Input" not in i and "pool" not in i))], value=[str(i) for i in ( - i for i in self.g.Layer_Neuron if i != "Input")], + i for i in self.g.Layer_Neuron if ("Input" not in i and "pool" not in i))], multi=True, style={"minWidth": "20%", "marginLeft": "15px", "textAlign": "start"})], className="d-flex", style={"paddingLeft": "20px", 'width': '100%'}) ], className="col-12") ], className="d-flex"), - html.Div([dcc.Graph(id='general-graph', animate=False, config={"displaylogo": False})])], className="col-lg-9 col-sm-12 col-xs-12" if(self.g.labelsExistance) else "col-lg-12 col-sm-12 col-xs-12"), + html.Div([dcc.Graph(id='general-graph', config={"displaylogo": False})])], className="col-lg-9 col-sm-12 col-xs-12" if(self.g.labelsExistance) else "col-lg-12 col-sm-12 col-xs-12"), html.Div([ html.Div([ html.P("Inputs", style={ @@ -162,7 +162,7 @@ class layout(layoutOp): color="#28a745", style={"marginLeft": "10px"} )], className="d-flex"), - dcc.Graph(id='label-graph', animate=False, config={"displaylogo": False})], className="col-lg-3 col-sm-12 col-xs-12") if(self.g.labelsExistance) else []], className="row") + dcc.Graph(id='label-graph', config={"displaylogo": False})], className="col-lg-3 col-sm-12 col-xs-12") if(self.g.labelsExistance) else []], className="row") ], style={"padding": "5px"})), label="General information", value="General information"), dcc.Tab(dbc.Card( dbc.CardBody([ diff --git a/src/Modules/General/spark.py b/src/Modules/General/spark.py index 2b047979ee3317a6d93079e7048c6539c42a712b..697516dae6ab79ac0f272c7147aa9512872fc201 100755 --- a/src/Modules/General/spark.py +++ b/src/Modules/General/spark.py @@ -39,11 +39,14 @@ class spark(sparkOp): for l in LN: if(l == "Input"): self.g.Input = LN[l] - else: + elif("pool" not in l): self.g.LayersNeuronsInfo.append( {"layer": l, "neuronNbr": int(LN[l])}) self.g.NeuronsNbr += int(LN[l]) self.g.LayersNbr += 1 + + self.g.NeuronsSize = pymongo.collection.Collection(self.g.db, 'synapseWeight').find_one(sort=[("index.x", -1),("index.y", -1)])["index"] + # get date & time of the simulation self.g.Date = data["T"] # calculate simulation time diff --git a/src/Modules/Neuron/layout.py b/src/Modules/Neuron/layout.py index 83dc5d8cf6564d2bbdb989d69cf8e6f4e72b33e6..b0679f189c213a9f237e1bf14ccd273c3d7e485d 100755 --- a/src/Modules/Neuron/layout.py +++ b/src/Modules/Neuron/layout.py @@ -69,8 +69,6 @@ class layout(layoutOp): options=[{'label': str(i), 'value': str(i)} for i in ( i for i in self.g.Layer_Neuron if i != "Input")], multi=False, - value=[{'label': str(i), 'value': str(i)} for i in ( - i for i in self.g.Layer_Neuron if i != "Input")][0]["value"], style={'width': '150px', "marginLeft": "10px", "textAlign": "start"}), dcc.Dropdown( id='NeuronFilterNeuron', @@ -81,7 +79,7 @@ class layout(layoutOp): "fontWeight": "500", "marginLeft": "20px", "height": "36px", "backgroundColor": "rgb(68, 71, 99)", "borderColor": "rgb(68, 71, 99)"}), html.Div(id='clear-Neuron', children="False", style={'display': 'none'}), html.Div(id='display-Neuron', children="False", style={'display': 'none'}) ], className="d-flex"), html.Div(id={'type': "GraphsAreaNeuron"}, children=[html.Div(id={'type': "OutputNeurons"}, children=[dcc.Graph(id="SpikePerNeuronFreq", figure=self.SpikePerNeuron3D(self.g), config={"displaylogo": False}, className="col-6"), - dcc.Graph(id="SpikePerNeuronNbr", animate=False, config={"displaylogo": False}, className="col-6")], className="d-flex")], style={"textAlign": "-webkit-center", "paddingTop": "10px"}) if(self.g.finalLabels != None) else html.Div(id={'type': "GraphsAreaNeuron"}, children=[], style={"textAlign": "-webkit-center", "paddingTop": "10px"})]) + dcc.Graph(id="SpikePerNeuronNbr", config={"displaylogo": False}, className="col-6")], className="d-flex")], style={"textAlign": "-webkit-center", "paddingTop": "10px"}) if(self.g.finalLabels != None) else html.Div(id={'type': "GraphsAreaNeuron"}, children=[], style={"textAlign": "-webkit-center", "paddingTop": "10px"})]) ], style={"textAlign": "center", "padding": "10px"} )) diff --git a/src/Modules/Synapse/callbacks.py b/src/Modules/Synapse/callbacks.py index fc47ec03ccd50a93f6c790627e5b9b6a3b7df616..6d73376175a3a3322817a0bae883610728ef8a3e 100755 --- a/src/Modules/Synapse/callbacks.py +++ b/src/Modules/Synapse/callbacks.py @@ -353,22 +353,13 @@ class callbacks(callbacksOp): Returns: html component that contains the global heatmaps """ - - heatMapX = list(data["index"].max())[0] + 1 - heatMapY = list(data["index"].max())[1] + 1 - - dfTo = [i["To"] for i in data["_id"]] - dfC = [i["C"] for i in data["_id"]] - - data["To"] = dfTo - data["C"] = dfC - - data = data.drop("_id", 1) - data = data.sort_values(["To", "C"]) - data = data[["V", "index", "To"]] - data["data"] = [[i["x"], i["y"], v] for i, v in zip(data["index"], data["V"])] - data = data[["To", "data"]] + heatMapX = data["x"].max() + 1 + heatMapY = data["y"].max() + 1 + depth = data["C"].max() + + data = data[["x", "y", "C", "V", "To"]] + data = data.sort_values(["C"]) for i in g.LayersNeuronsInfo: if(i["layer"] == selectedLayer): @@ -388,7 +379,7 @@ class callbacks(callbacksOp): zmin=0, zmax=1, z=g.createHeatMap( - heatMapX, heatMapY, data[data.To == index]["data"].to_numpy()), + heatMapX, heatMapY, data[data.To == index],depth), colorscale='jet', name=str(index)), row=xx, col=yy) @@ -419,7 +410,7 @@ class callbacks(callbacksOp): """ try: if isOn: - if data != None: + if not data.empty: if str(index) not in yAxisList: yAxisList[str(index)] = deque(maxlen=100) @@ -427,13 +418,11 @@ class callbacks(callbacksOp): index)] = deque(maxlen=100) # add data - if len(data) != 0: - yAxisList[str(index)].append( - float("{:.6f}".format(statistics.mean([float(d["V"]) for d in data])))) - HeatMapSynapseFreqGraph[str(index)].append( - [float(d["V"]) for d in data]) + if data.count != 0: + yAxisList[str(index)].append(float("{:.6f}".format(data["V"].mean()))) + HeatMapSynapseFreqGraph[str(index)].append([float(d) for d in data["V"].values]) + # Y X or the vertical Heatmaps - heatMapsZ = list() heatMapsZLabel = list() for heatmap in HeatMapSynapseFreqGraph[str(index)]: @@ -583,10 +572,6 @@ class callbacks(callbacksOp): heatmap content (data and layout) """ try: - #if(data != []): - # print("*") - # heatMapX = list(data["index"].max())[0] + 1 - # heatMapY = list(data["index"].max())[1] + 1 heatMapWithIndexs = [] layout = go.Layout( @@ -598,30 +583,26 @@ class callbacks(callbacksOp): paper_bgcolor= "rgba(255, 255, 255,0)", plot_bgcolor= "rgba(255, 255, 255,0)") if isOn: - if data != None: - data.sort(key=lambda d: d["C"]) - group = groupby(data, key=lambda d: d["C"]) + if not data.empty: + depth = data["C"].max() + data = data.sort_values(["T"]) + data = data.groupby(["C","x","y"], as_index=False).agg({"To":"last","T":"max","L":"last","V":"last"}) - # add neuron idexes to show heatmap correctly - data = [item for item in ( - max(v, key=lambda k: k["V"]) for k, v in group)] # update heatmap in heatmaps if str(index) not in heatmaps: heatmaps[str(index)] = np.zeros( (heatMapX, heatMapY)) - - for synapse in data: + for X,Y,V,C in zip(data["x"],data["y"],data["V"],data["C"]): try: - heatmaps[str(index)][synapse["index"]["x"] - ][synapse["index"]["y"]] = synapse["V"] - heatMapWithIndexs.append( - [synapse["index"]["x"], synapse["index"]["y"], synapse["V"]]) + # TODO: multi-D + heatmaps[str(index)][X][Y] = V + heatMapWithIndexs.append([X, Y, C, V]) except Exception as e: - continue + print("heatMap1: "+str(e)) return {'data': [ go.Heatmap( - z=g.createHeatMap(heatMapX, heatMapY, heatMapWithIndexs), + z=g.createHeatMap(heatMapX, heatMapY, pd.DataFrame(heatMapWithIndexs), depth), zmin=0, zmax=1, colorscale="jet")], @@ -647,7 +628,7 @@ class callbacks(callbacksOp): colorscale="jet")], 'layout': layout} except Exception as e: - print("heatMap: "+str(e)) + print("heatMap2: "+str(e)) # ------------------------------------------------------------ # MongoDB operations @@ -677,14 +658,14 @@ class callbacks(callbacksOp): {"T": {'$gt': timestamp, '$lte': (timestamp+g.updateInterval)}} ]} }]) - # ToJson---------------------- - SynapseWeight = loads(dumps(SynapseWeight)) - + # ToDF---------------------- + SynapseWeight = pd.DataFrame(list(SynapseWeight)) + if(not SynapseWeight.empty): + SynapseWeight["x"] = SynapseWeight["index"].map(lambda i: i["x"]) + SynapseWeight["y"] = SynapseWeight["index"].map(lambda i: i["y"]) + SynapseWeight.drop(columns=["index"]) # ---------------------------- - if not SynapseWeight: - return None - return SynapseWeight def getGlobalSynapseWeights(self, g, layer): @@ -696,15 +677,21 @@ class callbacks(callbacksOp): Returns: final synapses weights """ - # MongoDB--------------------- - df = g.sparkSession.read.format("com.mongodb.spark.sql") \ - .option("spark.mongodb.input.uri", g.MONGODBURL + g.name + ".synapseWeightFinal?authSource=admin&readPreference=primaryPreferred") \ - .option("pipeline","[{$match: { L: {$eq: '"+layer+"'}}}]") + # MongoDB--------------------- + col = pymongo.collection.Collection(g.db, 'synapseWeightFinal') - df = df.load() - - # ToJson---------------------- - globalSynapseWeights = df.toPandas() + globalSynapseWeights = col.aggregate([{"$match": { "_id.L": {"$eq": layer}}}]) + + # ToDF---------------------- + globalSynapseWeights = pd.DataFrame(list(globalSynapseWeights)) + + if(not globalSynapseWeights.empty): + globalSynapseWeights["To"] = globalSynapseWeights["_id"].map(lambda i: i["To"]) + globalSynapseWeights["C"] = globalSynapseWeights["_id"].map(lambda i: i["C"]) + globalSynapseWeights["x"] = globalSynapseWeights["_id"].map(lambda i: i["index"]["x"]) + globalSynapseWeights["y"] = globalSynapseWeights["_id"].map(lambda i: i["index"]["y"]) + globalSynapseWeights["L"] = globalSynapseWeights["_id"].map(lambda i: i["L"]) + globalSynapseWeights.drop(columns=["_id"]) # ---------------------------- return globalSynapseWeights diff --git a/src/Modules/Synapse/layout.py b/src/Modules/Synapse/layout.py index 78ba9f2a735ee701d02a760609cf62e67fd0b5a6..7d253987e418c3c90278cbb1a930d928e3384426 100755 --- a/src/Modules/Synapse/layout.py +++ b/src/Modules/Synapse/layout.py @@ -67,23 +67,23 @@ class layout(layoutOp): "fontWeight": "500", "marginLeft": "20px", "height": "36px", "backgroundColor":"rgb(68, 71, 99)","borderColor":"rgb(68, 71, 99)"}), html.Div([ - dbc.Input(type="number",id="HeatMapX",value=28, min=0, step=1, + dbc.Input(type="number",id="HeatMapX",value=self.g.NeuronsSize["x"]+1, min=0, step=1, style={"width":"80px","marginLeft":"10px","textAlign":"center"}), html.P("X",style={"padding":"5px"}), - dbc.Input(type="number",id="HeatMapY",value=28, min=0, step=1, + dbc.Input(type="number",id="HeatMapY",value=self.g.NeuronsSize["y"]+1, min=0, step=1, style={"width":"80px","textAlign":"center"})], className="d-flex",style={"marginLeft":"3px"}) ,html.Div(id='clear-Synapse',children="False", style={'display': 'none'}) ,html.Div(id='display-Synapse',children="False", style={'display': 'none'}) ], className="d-flex"), dbc.Card( dbc.CardBody([dbc.Card([dbc.CardHeader( dbc.Button( "Layer HeatMap", color="none", - id=f"group-GlobalHeatMapAreaSynapse-toggle", + id="group-GlobalHeatMapAreaSynapse-toggle", style={"width":"100%","height":"100%","borderTop":"2px solid rgb(68, 71, 99)","padding":"10px"} ),style={"padding":"0px",}), dbc.Collapse( dbc.CardBody([ html.Div(id={'type': "GlobalHeatMapAreaSynapse"}, children=[], style={"textAlign": "-webkit-center"})]), - id=f"collapse-GlobalHeatMapAreaSynapse")])])), + id="collapse-GlobalHeatMapAreaSynapse")])])), html.Div(id={'type': "GraphsAreaSynapse"}, children=[], style={"textAlign": "-webkit-center", "paddingTop": "10px"})]), ], style={"textAlign": "center", "padding": "10px"} diff --git a/src/Modules/Synapse/spark.py b/src/Modules/Synapse/spark.py index 6ee4246b8c9a15a45a7f8b2466440625deb02385..d577247e156c012a2e57901c407dccb145d8e6fb 100755 --- a/src/Modules/Synapse/spark.py +++ b/src/Modules/Synapse/spark.py @@ -1,9 +1,9 @@ """ Spark pre-processing operations. """ +import pandas as pd import pymongo import traceback -from pyspark.sql import functions as F from src.templates.sparkOp import sparkOp def init(g): @@ -33,28 +33,23 @@ class spark(sparkOp): if self.g.sparkSession == None: self.g.createSparkSession() # -------------------------------------------------- - df = self.g.sparkSession.read.format("com.mongodb.spark.sql") \ - .option("spark.mongodb.input.uri", self.MONGODBURL + self.g.name + "."+self.DOCUMENT_NAME+"?authSource=admin&readPreference=primaryPreferred") \ - .option("pipeline", "[{ $sort: { T: 1 } },{$group : { _id : {To:'$To', C:'$C'}, L : { $last: '$L'}, T : { $last: '$T'},V : { $last: '$V'},index : { $last: '$index'} } }]") - - df = df.load() - + col = pymongo.collection.Collection(self.g.db, self.DOCUMENT_NAME) + globalSynapseWeights = col.aggregate([{ "$sort": { "T": 1 } },{"$group" : { "_id" : {"To":'$To', "C":'$C', "index":'$index', "L":'$L'}, "T" : { "$last": '$T'},"V" : { "$last": '$V'} } }]) + # Data save into MongoDB --------------------------------- - - df.write.format("com.mongodb.spark.sql.DefaultSource") \ - .option("spark.mongodb.output.uri", - self.MONGODBURL + self.g.name + "."+self.OUTPUT_DOCUMENT_NAME+"?authSource=admin&readPreference=primaryPreferred").mode('append').save() + col = pymongo.collection.Collection(self.g.db, self.OUTPUT_DOCUMENT_NAME) + globalSynapseWeights = pd.DataFrame(list(globalSynapseWeights)) + col.insert_many(globalSynapseWeights.to_dict('records')) # Indexes creation --------------------------------------- print("Indexes creation (please wait...)") - col = pymongo.collection.Collection(self.g.db, self.OUTPUT_DOCUMENT_NAME) col.create_index([("_id.L", 1)]) col.create_index([("_id", 1)]) col.create_index([("_id.To", 1),("_id.C", 1)]) col.create_index([("T", 1)]) - col.create_index([("index", 1)]) + col.create_index([("_id.index", 1)]) col.create_index([("V", 1)]) # --------------------------------------------------------