diff --git a/n2s3/src/main/scala/fr/univ_lille/cristal/emeraude/n2s3/features/logging/SimMongoLog.scala b/n2s3/src/main/scala/fr/univ_lille/cristal/emeraude/n2s3/features/logging/SimMongoLog.scala index 3b046302e074b842b4d3eb3e6a9739b6968c7cb0..e330c7c554fc2c6e599f65a60ddf68d9d36c898b 100644 --- a/n2s3/src/main/scala/fr/univ_lille/cristal/emeraude/n2s3/features/logging/SimMongoLog.scala +++ b/n2s3/src/main/scala/fr/univ_lille/cristal/emeraude/n2s3/features/logging/SimMongoLog.scala @@ -21,18 +21,20 @@ import scala.sys.process._ * * Fichier responsable sur la collection des informations sur la simulation et le stockage dans la base de données Mongo * - * Utilisation : new SimMongoLog(n2s3, listOfConnexions, simName, true, true, true, true,"MNIST",SimTime) */ -class SimMongoLog(n2s3 : N2S3,list : IndexedSeq[IndexedSeq[IndexedSeq[ConnectionPath]]],name : String, Spikes : Boolean, Potential : Boolean, Label : Boolean,SynapticWeight : Boolean,Data:String,time: String) { +class SimMongoLog(n2s3 : N2S3,list : IndexedSeq[IndexedSeq[IndexedSeq[ConnectionPath]]],name : String, Spikes : Boolean, Potential : Boolean, Label : Boolean,SynapticWeight : Boolean,Data:String,time: String,SynapseIndex : List[(Integer, Integer, Integer)]) { /** ****************************************************************************************************************** * Setup DB connection & Collection * *****************************************************************************************************************/ println("Connecting to database ") - val mongoClient: MongoClient = MongoClient() + val USERNAME = "USERNAME" + val PASSWORD = "PASSWORD" + val HOST = "127.0.0.1:27017" + val mongoClient: MongoClient = MongoClient("mongodb://"+USERNAME+":"+PASSWORD+"@"+HOST+"/?authSource=admin&readPreference=primaryPreferred") val database: MongoDatabase = mongoClient.getDatabase(name+"-"+time.replaceAll("\\s", "-")) var singleLayersDocument = Document() var neuroneS,neuroneE = 0 @@ -128,13 +130,13 @@ class SimMongoLog(n2s3 : N2S3,list : IndexedSeq[IndexedSeq[IndexedSeq[Connection InfoCollection = database.getCollection("info") InfoCollection.deleteMany(new BasicDBObject()) PotentialCollection = database.getCollection("potential") - PotentialCollection.deleteMany(new BasicDBObject()) + //PotentialCollection.deleteMany(new BasicDBObject()) SpikesCollection = database.getCollection("spikes") - SpikesCollection.deleteMany(new BasicDBObject()) + //SpikesCollection.deleteMany(new BasicDBObject()) LabelCollection = database.getCollection("labels") - LabelCollection.deleteMany(new BasicDBObject()) + //LabelCollection.deleteMany(new BasicDBObject()) SynapseWeightCollection = database.getCollection("synapseWeight") - SynapseWeightCollection.deleteMany(new BasicDBObject()) + //SynapseWeightCollection.deleteMany(new BasicDBObject()) var neuronsFireLogDocuments : List[Document] = List.empty var neuronsPotentialLogDocuments : List[Document] = List.empty @@ -235,7 +237,7 @@ class SimMongoLog(n2s3 : N2S3,list : IndexedSeq[IndexedSeq[IndexedSeq[Connection if(SynapticWeight){ println("Setting SynapseWeight monitor") - synapticWeightMonitoring = new SynapticWeightMonitoring(list,500,SynapseWeightCollection,observer,"",step,globalTimestamps) + synapticWeightMonitoring = new SynapticWeightMonitoring(list,250,SynapseWeightCollection,observer,"",step,globalTimestamps,SynapseIndex) n2s3.addNetworkObserver(synapticWeightMonitoring)} // ExternalSender.askTo(n2s3.inputLayerRef.get.getContainer, Subscribe(SynapseUpdateEvent, ExternalSender.getReference(loggerSynapseWeight))) @@ -297,7 +299,7 @@ class NeuronsPotentialLog(name : String,mongoCollection: MongoCollection[Documen case NeuronPotentialResponse(timestamp, source, value) => if (((System.currentTimeMillis() - globalTimestamps) * 1000) - time > 250){ time = (System.currentTimeMillis() - globalTimestamps) * 1000 - documents = documents.::(Document("T" -> time,"L" -> Layers.get(source.actor.path.name.split(":")(0)), + documents = documents.::(Document("T" -> time,"L" -> source.actor.path.name.split(":")(0), "N" -> BsonInt32(Integer.valueOf(source.actor.path.name.split(":")(1))), "V" -> BsonDouble.apply(BigDecimal(value).setScale(6, BigDecimal.RoundingMode.HALF_UP).toDouble))) // "_id" -> id, @@ -362,20 +364,20 @@ class LabelMonitoring(n2s3 : N2S3, outputNeuron : Seq[NeuronGroupRef],mongoColle * Synapse weight Logger * *****************************************************************************************************************/ -class SynapticWeightMonitoring(list : IndexedSeq[IndexedSeq[IndexedSeq[ConnectionPath]]], refreshRate : Int = 1000/24,mongoCollection: MongoCollection[Document],observer: Observer[Completed], name : String = "",step:Int,globalTimestamps:Timestamp) extends NeuronGroupObserverRef { +class SynapticWeightMonitoring(list : IndexedSeq[IndexedSeq[IndexedSeq[ConnectionPath]]], refreshRate : Int = 1000/24,mongoCollection: MongoCollection[Document],observer: Observer[Completed], name : String = "",step:Int,globalTimestamps:Timestamp,SynapseIndex : List[(Integer, Integer, Integer)]) extends NeuronGroupObserverRef { var actor: Option[ActorRef] = None def getActors : Seq[ActorRef] = this.actor.toSeq override def deploy(n2s3: N2S3): Unit = { - this.actor = Some(n2s3.system.actorOf(Props(new SynapticWeight(list, refreshRate, name,mongoCollection,observer,step,globalTimestamps)), LocalActorDeploymentStrategy)) + this.actor = Some(n2s3.system.actorOf(Props(new SynapticWeight(list, refreshRate, name,mongoCollection,observer,step,globalTimestamps,SynapseIndex)), LocalActorDeploymentStrategy)) // this.actor = Some(n2s3.system.actorOf(Props(new ActivityStoreActor(n2s3)), LocalActorDeploymentStrategy)) } } -class SynapticWeight(list : Seq[Seq[Seq[ConnectionPath]]], refreshRate : Int = 1000/24, name : String,mongoCollection: MongoCollection[Document],observer: Observer[Completed],step:Int,globalTimestamps:Timestamp) extends AutoRefreshNetworkActor(refreshRate, list.size) { +class SynapticWeight(list : Seq[Seq[Seq[ConnectionPath]]], refreshRate : Int = 1000/24, name : String,mongoCollection: MongoCollection[Document],observer: Observer[Completed],step:Int,globalTimestamps:Timestamp,SynapseIndex : List[(Integer, Integer, Integer)]) extends AutoRefreshNetworkActor(refreshRate, list.size) { class WeightPanel(list : Seq[Seq[ConnectionPath]]){ @@ -385,6 +387,8 @@ class SynapticWeight(list : Seq[Seq[Seq[ConnectionPath]]], refreshRate : Int = 1 var valuesTmp: immutable.Iterable[(Int, Int, Float)] = values var i = 0 + var x = 0 + var y = 0 var documents: List[Document] = List.empty var tempdocs: List[Document] = List.empty var filteredconnectionList : Seq[(Int,Int,ConnectionPath)] = Seq.empty @@ -426,8 +430,17 @@ class SynapticWeight(list : Seq[Seq[Seq[ConnectionPath]]], refreshRate : Int = 1 if(valuesTmp.exists(y => y == x)){ values = values.dropWhile(y => y == x)} })} - values.foreach(x => { - documents = documents.::(Document("T" -> (System.currentTimeMillis() - globalTimestamps) * 1000, "C" -> x._1, "To" -> x._2, "V" -> f"${x._3}%1.6f")) + + + values.foreach(v => { + SynapseIndex.foreach(cnx => { + if (cnx._1 == v._1) { + x = cnx._3 + y = cnx._2 + } + }) + documents = documents.::(Document("T" -> (System.currentTimeMillis() - globalTimestamps) * 1000, "C" -> v._1, "To" -> v._2, "V" -> + BsonDouble.apply(BigDecimal(v._3).setScale(6, BigDecimal.RoundingMode.HALF_UP).toDouble),"index" -> Document("x" -> x, "y" -> y))) //"tt" -> step * i }) valuesTmp = values diff --git a/n2s3_examples/src/main/scala/fr/univ_lille/cristal/emeraude/n2s3/apps/ExampleFMnistPrune1Epoch.scala b/n2s3_examples/src/main/scala/fr/univ_lille/cristal/emeraude/n2s3/apps/ExampleFMnistPrune1Epoch.scala new file mode 100755 index 0000000000000000000000000000000000000000..a595732f447fb8264cc12c85e981db6c89a3d5a4 --- /dev/null +++ b/n2s3_examples/src/main/scala/fr/univ_lille/cristal/emeraude/n2s3/apps/ExampleFMnistPrune1Epoch.scala @@ -0,0 +1,509 @@ +package fr.univ_lille.cristal.emeraude.n2s3.apps + +import java.awt.image.BufferedImage +import java.io.File +import java.text.SimpleDateFormat +import java.util.Date + +import fr.univ_lille.cristal.emeraude.n2s3.core +import fr.univ_lille.cristal.emeraude.n2s3.core._ +import fr.univ_lille.cristal.emeraude.n2s3.core.actors.Config +import fr.univ_lille.cristal.emeraude.n2s3.core.models.properties.{MembranePotentialThreshold, SynapticWeightFloat} +import fr.univ_lille.cristal.emeraude.n2s3.features.builder.connection.types.FullConnection +import fr.univ_lille.cristal.emeraude.n2s3.features.builder.{InputNeuronGroupRef, N2S3, NeuronGroupRef} +import fr.univ_lille.cristal.emeraude.n2s3.features.io.input._ +import fr.univ_lille.cristal.emeraude.n2s3.features.io.report.BenchmarkMonitorRef +import fr.univ_lille.cristal.emeraude.n2s3.features.logging.graph.{SynapticWeightSelectGraph, SynapticWeightSelectGraphRef} +import fr.univ_lille.cristal.emeraude.n2s3.features.logging.{NetworkSaving, SimMongoLog} +import fr.univ_lille.cristal.emeraude.n2s3.models.neurons.LIF +import fr.univ_lille.cristal.emeraude.n2s3.models.synapses.{InhibitorySynapse, QBGParameters, SimplifiedSTDP} +import fr.univ_lille.cristal.emeraude.n2s3.support.N2S3ResourceManager +import fr.univ_lille.cristal.emeraude.n2s3.support.UnitCast._ +import javax.imageio.ImageIO +import squants.electro.ElectricPotential +import squants.electro.ElectricPotentialConversions.ElectricPotentialConversions + +object ExampleFMnistPrune1Epoch extends App { + + var n2s3 : N2S3 = _ + var SynapseIndex : List[(Integer, Integer, Integer)] = List.empty + var folderName = "" + var croppedCnx = List[ConnectionPath]() + var alpha = 0.1F + var ArrAlpha = Array(0.01F,0.05F,0.1F,0.15F,0.2F) + var ArrBeta = Array(0.01F,0.1F,0.15F,0.2F,0.5F) + var beta = 0.2F + var CTotal = 0F + var CRmaining = 0F + var Ratio = 0F + var inputStream = InputMnist.Entry >> SampleToSpikeTrainConverter[Float, InputSample2D[Float]](0, 22, 150 MilliSecond, 350 MilliSecond) >> N2S3Entry + var SimName = "" + var SimTime = "" + var NeuroneLabels = List[(Int, String)]() + var logger: SimMongoLog = null + var log = true + var prune = true + var globalIteration = 6 + var sizeChunk = 10000 + + for(x <- 1 to 1) { + implicit val timeout = Config.longTimeout + alpha = ArrAlpha((x - 1) / 5) + beta = ArrBeta((x - 1) % 5) + alpha = 0.05F + beta = 0.1F + SimName = "20N" + "-" + x + println("a:" + alpha + " b:" + beta) + QBGParameters.alf_m = 0.005f + QBGParameters.alf_p = 0.01f + QBGParameters.beta_m = 2f + QBGParameters.beta_p = 2f + val formatter = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss") + var iteration = 0 + var CropInfo = "" + var duration: Long = 0L + var simTime = "" + var benchmarkMonitor: BenchmarkMonitorRef = null + n2s3 = new N2S3("N2S3") + + /** Vars of Threshold **/ + + CTotal = 0F + CRmaining = 0F + Ratio = 0F + + /** **/ + + folderName = "" + SynapseIndex = List.empty + croppedCnx = List.empty + NeuroneLabels = List.empty + + + val inputLayer = n2s3.createInput(inputStream) + val dataFile = N2S3ResourceManager.getByName("fmnist-train-images").getAbsolutePath + val labelFile = N2S3ResourceManager.getByName("fmnist-train-labels").getAbsolutePath + var stream: MnistFileInputStreamP = null + //inputStream.append(stream) + + val unsupervisedLayer = n2s3.createNeuronGroup() + .setIdentifier("OutLayer") + .setNumberOfNeurons(20) + .setNeuronModel(LIF, Seq( + (MembranePotentialThreshold, 35 millivolts))) + + inputLayer.connectTo(unsupervisedLayer, new FullConnection(() => new SimplifiedSTDP())) + unsupervisedLayer.connectTo(unsupervisedLayer, new FullConnection(() => new InhibitorySynapse())) + + n2s3.create() + + iteration = 1 + inputLayer.neurons.toList.foreach(n => { + CTotal += n.group.connections.length + }) + CTotal = CTotal * unsupervisedLayer.neurons.size + CRmaining = CTotal + + //showHeatMap(inputLayer,unsupervisedLayer) + SynapsesIndex(inputLayer, unsupervisedLayer) + SimTime = formatter.format(new Date()) + folderName = SimName.split("-")(0) + " [" + x + "] " + SimTime + new File(folderName).mkdir() + + //println("Start loading "+System.currentTimeMillis()) + //NetworkLoading.from(n2s3,"1600N/1600N-1 Arch 6 cropped") + //println("Done loading "+System.currentTimeMillis()) + //NetworkLoading.fromWithDefaultWeights(n2s3,"100N [1] 03-02-2020 23:50:43/100N-1 Archce + // 6 cropped","100N [1] 03-02-2020 23:50:43/100N-1 Arch 6 cropped",loadRandomWeights = false,binary = false) + saveNeurones(unsupervisedLayer, "Initial") + NetworkSaving.save(n2s3, croppedCnx, SynapseIndex, folderName + "/" + SimName + " Arch") + + // ImageSynapsesWeight.save(unsupervisedLayer.neurons.map(n => (n.getNetworkAddress, 128, 128)), 4, 1, "freeway.layer1.png") + + // + + val inputToClassifierIndex = new ConnectionIndex(inputLayer, unsupervisedLayer) + var listOfConnexions = for (outputIndex <- 0 until unsupervisedLayer.shape.getNumberOfPoints) yield { + for (inputX <- 0 until InputMnist.shape.dimensions(0)) yield { + for { + inputY <- 0 until InputMnist.shape.dimensions(1) + input = inputLayer.getNeuronPathAt(inputX, inputY) + output = unsupervisedLayer.getNeuronPathAt(outputIndex) + if inputToClassifierIndex.getConnectionsBetween(input, output).nonEmpty + } yield inputToClassifierIndex.getConnectionsBetween(input, output).head + } + } + + val dataTestFile = N2S3ResourceManager.getByName("fmnist-test-images").getAbsolutePath + val labelTestFile = N2S3ResourceManager.getByName("fmnist-test-labels").getAbsolutePath + + + while (iteration <= globalIteration) { + simTime = "" + unsupervisedLayer.unfixNeurons() + n2s3.first = false + stream = InputMnist.DataParts(dataFile, labelFile, sizeChunk*(iteration-1), sizeChunk) + inputStream.append(stream) + stream.goto() + println("Start Training ... " + "[" + iteration + "]") + + if (iteration == 1 && log) { + logger = new SimMongoLog(n2s3, listOfConnexions, SimName.split("-")(0), true, true, true, true, "FMNIST", SimTime,SynapseIndex) + } + duration = System.currentTimeMillis() + simTime = "Train " + iteration + n2s3.runAndWait() + + println(System.currentTimeMillis() - duration) + simTime = simTime + " | " + (System.currentTimeMillis() - duration) + "\n" + + if (iteration == globalIteration && log) { + logger.storeSimInfoAndDestroy() + } + + println("saving : " + System.currentTimeMillis()) + NetworkSaving.save(n2s3, croppedCnx, SynapseIndex, folderName + "/" + SimName + " Arch " + iteration) + println("end : " + System.currentTimeMillis()) + println("saving cropped : " + System.currentTimeMillis()) + saveNeurones(unsupervisedLayer, "[" + iteration + "]") + println("end : " + System.currentTimeMillis()) + + /* + inputStream.append(InputMnist.DataFrom(dataTestFile, labelTestFile)) + + println("Start Testing " + "[" + iteration + "]") + + unsupervisedLayer.fixNeurons() + + n2s3.first = false + + benchmarkMonitor = n2s3.createBenchmarkMonitor(unsupervisedLayer) + + duration = System.currentTimeMillis() + simTime = simTime + "Test " + iteration + "\n" + duration + + n2s3.runAndWait() + + simTime = simTime + " | " + (System.currentTimeMillis() - duration) + "\n" + + benchmarkMonitor.exportToHtmlView(folderName + "/" + "test " + SimName + " [" + iteration + "]" + ".html") + benchmarkMonitor.saveCrops(folderName + "/" + "Sim " + SimName + " info a:" + alpha + " no b:" + beta, simTime, " Ratio " + Ratio) +*/ + + if (prune) { + Ratio = Ratio + (alpha * (CRmaining / CTotal)) + println("Ratio " + Ratio) + println("cropping : " + System.currentTimeMillis()) + CropInfo = cropNetwork(inputLayer, Ratio) + println("end : " + System.currentTimeMillis()) + // cropInput(n2s3.inputLayerRef.get, Ratio) + } + + if (iteration == globalIteration) { + inputStream.append(InputMnist.DataFrom(dataTestFile, labelTestFile)) + + println("Start Testing cropped " + "[" + iteration + "]" + " with " + Ratio) + + unsupervisedLayer.fixNeurons() + + benchmarkMonitor = n2s3.createBenchmarkMonitor(unsupervisedLayer) + + duration = System.currentTimeMillis() + simTime = simTime + "Test cropped " + iteration + + n2s3.runAndWait() + + println(System.currentTimeMillis() - duration) + + simTime = simTime + " | " + (System.currentTimeMillis() - duration) + "\n" + + saveNeurones(unsupervisedLayer, "cropped " + "[" + iteration + "]" + " with " + Ratio) + NetworkSaving.save(n2s3, croppedCnx, SynapseIndex, folderName + "/" + SimName + " Arch " + iteration + " cropped") + + benchmarkMonitor.exportToHtmlView(folderName + "/" + "test cropped " + SimName + " [" + iteration + "]" + ".html") + + benchmarkMonitor.saveCrops(folderName + "/" + "Sim " + SimName + " info a:" + alpha + " b:" + beta, simTime, CropInfo + " Ratio " + Ratio) + } + if (iteration == globalIteration && log) { + logger.storeBenchmarkTestInfo(benchmarkMonitor) + } + iteration += 1 + } + } + n2s3.destroy() + + sys.exit(0) + + def showHeatMap(inputL: InputNeuronGroupRef, layer : NeuronGroupRef): Unit ={ + val inputToClassifierIndex = new ConnectionIndex(inputL, layer) + + var listOfConnexions = for(outputIndex <- 0 until layer.shape.getNumberOfPoints) yield { + for(inputX <- 0 until InputMnist.shape.dimensions(0)) yield { + for(inputY <- 0 until InputMnist.shape.dimensions(1)) yield { + val input = inputL.getNeuronPathAt(inputX, inputY) + val output = layer.getNeuronPathAt(outputIndex) + inputToClassifierIndex.getConnectionsBetween(input, output).head + }}} + + n2s3.addNetworkObserver(new SynapticWeightSelectGraphRef( + listOfConnexions, + SynapticWeightSelectGraph.heatMap, + 4, + 100) + ) + } + + def showNeurones(n2s3 : N2S3){ + var x = 0 + n2s3.layers.foreach(l =>{ + l.neurons.foreach(n =>{ + x = 0 + n.group.connections.foreach(cnx =>{ + cnx.connections.list.foreach(c =>{ + if(c.connectionID >= 784){ + x+=1 + }else{ + if(c.connectionID == n.getIdentifier){ + x+=1 + } + } + }) + }) + }) + }) + } + + def SynapsesIndex(inputL: InputNeuronGroupRef,layer : NeuronGroupRef): Unit = { + val inputToClassifierIndex = new ConnectionIndex(inputL, layer) + + var listOfConnexions = for(outputIndex <- 0 until layer.shape.getNumberOfPoints) yield { + for(inputX <- 0 until InputMnist.shape.dimensions(0)) yield { + for(inputY <- 0 until InputMnist.shape.dimensions(1)) yield { + val input = inputL.getNeuronPathAt(inputX, inputY) + val output = layer.getNeuronPathAt(outputIndex) + inputToClassifierIndex.getConnectionsBetween(input, output).head + }}} + + var x = -1 + var y = 0 + SynapseIndex = List[(Integer,Integer,Integer)]() + + listOfConnexions.head.foreach( + cnx => { + cnx.toList.foreach(c => { + x+=1 + SynapseIndex = SynapseIndex.::(c.connectionID,x,y) + }) + y+=1 + x = -1 + } + ) + + SynapseIndex = SynapseIndex.reverse + } + + + def saveNeurones(layer : NeuronGroupRef,name : String): Unit ={ + + layer.neurons.foreach(n =>{ + var values : ConnectionPropertyValues[Float] = ExternalSender.askTo(n.getNetworkAddress, GetAllConnectionProperty(SynapticWeightFloat)) match { + case x:ConnectionPropertyValues[Float] => x + } + + val width = 28 + val height = 28 + val image = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB) + var x = 0 + var y = 0 + + for(x <- 0 until 28){ + for(x <- 0 until 28){ + image.setRGB(x,y,0 ) + } + } + + values.values.foreach(F => { + + SynapseIndex.foreach(nn => { + if(F._1 == nn._1){ + y = nn._2 + x = nn._3 + } + }) + + // println(index+" : X = "+x+" Y = "+y) + + var v = F._3 + + val rc = math.max(0f, -2f*v*v+4f*v-1f) + val bc = math.max(0f, -2f*v*v+1) + val gc = math.max(0f, 4f*v*(-v+1f)) + + image.setRGB(x, y, math.round(bc*255) + (math.round(gc*255) << 8) + (math.round(rc*255) << 16)) + + }) + + import java.awt.image.BufferedImage + val newImage = new BufferedImage(300, 300, BufferedImage.TYPE_INT_RGB) + + val g = newImage.createGraphics + g.drawImage(image, 0, 0, 300, 300, null) + g.dispose() + var file = new File(folderName+"/"+name) + if (!file.exists()) { + file.mkdir() + } + val outputfile = new File(folderName+"/"+file.getName +"/"+n.getIdentifier) + ImageIO.write(newImage, "png", outputfile) + }) + + } + + def cropNetwork(inputLayer: NeuronGroupRef,Ratio : Double): String = { + var d = 0 + var total = 0 + var SynUpdate = "" + try { + + + inputLayer.connections.foreach( + np => { + np.connections.list.foreach(cnx =>{ + if(!croppedCnx.contains(cnx)){ + + if(!cnx.toString.contains(inputLayer.identifier)){ + var Value = 0F + try { + Value = ExternalConnectionSender.askTo(cnx, GetConnectionProperty(SynapticWeightFloat)) + .asInstanceOf[PropertyValue[Float]].value + + if (Value < Ratio) { + // if( Random.nextInt(3) > 1) { + np.disconnectByPath(cnx) + croppedCnx = croppedCnx ::: List(cnx) + d += 1 + }else{ + ExternalConnectionSender.askTo(cnx,SetConnectionProperty(SynapticWeightFloat.asInstanceOf[ConnectionProperty[Any]] ,(Value + beta*Ratio).toFloat)) + SynUpdate += cnx+":"+(Value + beta*Ratio)+"\n" + } + + }catch { + case e @ (_ : Exception ) => println("Cnx doesn't exist "+e) + } + } + total += 1 + } + }) + }) + } catch { + case e @ (_ : Exception ) => println("Exc :"+e) + } + CTotal = total + CRmaining = CTotal - d + println("Deleted "+d+" / "+total) + d+" / "+total + + } + + def cropInput(inputLayer: InputNeuronGroupRef,Ratio : Double): Unit = { + var d = 0 + var total = 0 + try { + + inputLayer.connections.head.connections.list.foreach(cnx =>{ + try { + if(croppedCnx.contains(cnx)){ + inputLayer.connections.head.disconnectByPath(cnx) + d += 1 + } + }catch { + case e @ (_ : Exception ) => println("Cnx doesn't exist "+e) + } + total += 1 + }) + + } catch { + case e @ (_ : Exception ) => println("Exc :"+e) + } + CTotal = total + CRmaining = CTotal - d + println("Deleted "+d+" / "+total) + + } + + def NeuronThresholdVariation(layer : NeuronGroupRef): String ={ + + var res = "" + layer.neurons.foreach(n =>{ + var values : ConnectionPropertyValues[Float] = ExternalSender.askTo(n.getNetworkAddress, GetAllConnectionProperty(SynapticWeightFloat)) match { + case x: ConnectionPropertyValues[Float] => x + } + println() + var threshold : Double = ExternalSender.askTo(n.getNetworkAddress,core.GetProperty(MembranePotentialThreshold)).asInstanceOf[PropertyValue[ElectricPotential]].value.value * (values.values.length.toDouble/inputStream.shape.getNumberOfPoints) + + ExternalSender.askTo(n.getNetworkAddress,core.SetProperty(MembranePotentialThreshold, threshold millivolts)) + + res = res +"\nSetting threshold of "+n.getIdentifier+" to "+n.getNetworkAddress+" "+ExternalSender.askTo(n.getNetworkAddress,core.GetProperty(MembranePotentialThreshold)) + + }) + res + + } + + def cropLateral(Layer: NeuronGroupRef,benchmarkMonitor: BenchmarkMonitorRef): Unit = { + + var lateral = 0 + benchmarkMonitor.getResult.neurons.zipWithIndex.foreach { + case (actor, neuronIndex) => + benchmarkMonitor.getResult.labels.zipWithIndex.foreach { case (label, labelIndex) => + + if ( benchmarkMonitor.getResult.evaluationByMaxSpiking.labelAssoc(actor) == label) { + NeuroneLabels = NeuroneLabels.::(neuronIndex,label) + } + //else { + // benchmarkMonitor.getResult.evaluationByMaxSpiking.crossScore(labelIndex)(neuronIndex) + // } + } + } + println("####") + NeuroneLabels.foreach( (i) =>{ + println(i._1+" , "+i._2) + }) + println("####") + + Layer.connections.foreach( cnx => { + cnx.connections.list.foreach( cnxP => { + val t: Option[(Int, String)] = NeuroneLabels.find((x) => x._1 == (cnxP.connectionID - 784)) + val t2 : Option[(Int,String)] = NeuroneLabels.find((x) => x._1 == cnxP.outputNeuron.actor.path.name.split(":")(1).toInt) + + if(t.get._2 != t2.get._2){ + cnx.disconnectByPath(cnxP) + println("removed "+t.get._1+ " with "+t2.get._1) + lateral+=1 + } + + }) + }) + + println("lateral cropped : "+lateral) + } + +def checkTocrop(from:Integer,to:Integer,dimention:Integer,items: Array[Array[Integer]]): Boolean ={ + var b = 0 + var i = 1 + + if (Math.abs(to - from) < dimention && to != from){ + while(i < items.length){ + b = dimention * i + if(Math.min(from,to) < b && Math.max(from,to) >= b ){ + return false + } + i+=1 + } + true + }else{ + false + } +} + +}