diff --git a/README.md b/README.md index 9da7721158cb4bb55ea3267f4588ce5c7382b431..14d61874ae5c8c7f0d82fdb55d71b8643269bd11 100755 --- a/README.md +++ b/README.md @@ -51,7 +51,7 @@ pip install -r requirements.txt - Add MongoDB credentials in `config.py` -4. Using Python >= 3.0 (**make sure MongoDB is running**) run `python VS2N.py` to start VS2N. If everything is fine, the web interface should appear automatically. +4. Using Python >= 3.0 (**make sure MongoDB is running**), run `python VS2N.py` to start VS2N. If everything is fine, the web interface should appear automatically. ## Docker image You can also use a docker image to run VS2N : https://hub.docker.com/r/helbez/vs2n @@ -79,6 +79,7 @@ src/ # Source directory. ### Core - Multilayer support added - Spark & MongoDB version updated +- Added support for more simulators (Norse `Python`, CSNN `C++`) ## v0.35 diff --git a/requirements.txt b/requirements.txt index c4bf4df33319222aa875cf8076c39569f653924a..618cc18046c565e162fdd9bf07fb82f7a28a8025 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,16 +1,16 @@ pypandoc -pymongo==4.1.0 -pyspark==3.2.1 +pymongo +pyspark pandas -dash==2.3.1 -plotly==5.7.0 -dash-renderer==1.9.1 -dash-html-components==2.0.0 -dash-core-components==2.0.0 -dash-bootstrap-components==1.1.0 -dash-cytoscape==0.3.0 -dash-daq==0.5.0 -Flask-Login==0.6.0 -wheel==0.37.1 -werkzeug==2.1.1 +dash +plotly +dash-renderer +dash-html-components +dash-core-components +dash-bootstrap-components +dash-cytoscape +dash-daq +Flask-Login +wheel +werkzeug PyArrow diff --git a/src/Modules/General/spark.py b/src/Modules/General/spark.py index c80ad8a5c4ddec45ea4646484e983dd4435e8502..697516dae6ab79ac0f272c7147aa9512872fc201 100755 --- a/src/Modules/General/spark.py +++ b/src/Modules/General/spark.py @@ -1,7 +1,6 @@ """ Spark pre-processing operations. """ -from operator import contains from bson.json_util import dumps from bson.json_util import loads import traceback diff --git a/src/Modules/Synapse/spark.py b/src/Modules/Synapse/spark.py index 68db8c7a3890c6f449ab3055e93d57e50f6370ec..d577247e156c012a2e57901c407dccb145d8e6fb 100755 --- a/src/Modules/Synapse/spark.py +++ b/src/Modules/Synapse/spark.py @@ -4,7 +4,6 @@ import pandas as pd import pymongo import traceback -from pyspark.sql import functions as F from src.templates.sparkOp import sparkOp def init(g):