Hello !

I want to try using NupiC.Studio . I create one sensor and give him csv-file. I got errors:
File “/home/sergey/.local/lib/python2.7/site-packages/nupic/data/”, line 385, in getNextRecord
IndexError: list index out of range.
I can download my project and csv-file.
Can we help me?

Thanx a lot,

I don’t think NuPIC Studio has been an active project for a couple of years. (cc @david-ragazzi)

4 posts were split to a new topic: Problems with NuPIC input data file format

I changed the flag from T to S. But I get the same errors.

I can’t tell if this is a nupic problem or a problem. Can you show your code?

I want to develop the small example in and run it:
The code below is automatically generated by NuPIC Studio and aims to help you to quickly integrate
this HTM network into your application using the NuPIC Network API.
For use, this code, just create a new Python file and copy/paste these lines into it. Run it and voila!

import json
import copy
from nupic.algorithms.anomaly import computeRawAnomalyScore
from import FileRecordStream
from nupic.engine import Network
from nupic.encoders import MultiEncoder

def createNetwork():
  Create the Network instance.

  network = Network()

  # Create "TopRegion" region
  createRegion(network=network, name="TopRegion", spParams={ 'spatialImp': 'py', 'columnCount': 4096, 'inputWidth': 7857, 'potentialRadius': 0, 'potentialPct': 0.5, 'globalInhibition': 0, 'localAreaDensity': -1.0, 'numActiveColumnsPerInhArea': 81.0, 'stimulusThreshold': 0, 'synPermInactiveDec': 0.01, 'synPermActiveInc': 0.1, 'synPermConnected': 0.1, 'minPctOverlapDutyCycle': 0.001, 'minPctActiveDutyCycle': 0.001, 'dutyCyclePeriod': 1000, 'maxBoost': 10.0, 'seed': -1, 'spVerbosity': 0 }, tpParams={ 'temporalImp': 'py', 'columnCount': 4096, 'inputWidth': 7857, 'cellsPerColumn': 10, 'initialPerm': 0.11, 'connectedPerm': 0.5, 'minThreshold': 8, 'newSynapseCount': 15, 'permanenceInc': 0.1, 'permanenceDec': 0.1, 'activationThreshold': 12, 'seed': 42 })

  # Create "L1_encoder" sensor
  createSensor(network=network, name="L1_encoder", params={ 'verbosity': 0 }, dataFile="/var/www/www/htm/nupic_project/words.csv", encodings={ 'L1': { 'name': 'L1', 'fieldname': 'L1', 'n': 873, 'minval': 31, 'maxval': 127, 'w': 21, 'type': 'PassThroughEncoder' },  })
  linkSensorToRegion(network=network, outName="L1_encoder", inName="TopRegion")

  return network

def createRegion(network, name, spParams, tpParams):
  Create a region given SP and TP parameters.

  # Create spatial node
  spNode = network.addRegion(name="sp" + name, nodeType="py.SPRegion", nodeParams=json.dumps(spParams)).getSelf()
  spNode.learningMode = True
  spNode.anomalyMode = False

  # Create temporal node
  tpNode = network.addRegion(name="tp" + name, nodeType="py.TPRegion", nodeParams=json.dumps(tpParams)).getSelf()
  tpNode.learningMode = True
  tpNode.inferenceMode = True
  tpNode.topDownMode = True
  tpNode.anomalyMode = True

  # Create link betwen SP and TP of the same region"sp" + name, destName="tp" + name, linkType="UniformLink", linkParams="")"tp" + name, destName="sp" + name, linkType="UniformLink", linkParams="", srcOutput="topDownOut", destInput="topDownIn")

def createSensor(network, name, params, dataFile, encodings):
  Create a sensor given its parameters.

  # Create database given file name
  dataSource = FileRecordStream(streamID=dataFile)

  # Create multi-encoder to handle all sub-encoders
  encoder = MultiEncoder()

  # Create sensor node
  sensor = network.addRegion(name="sensor" + name, nodeType="py.RecordSensor", nodeParams=json.dumps(params)).getSelf()
  sensor.dataSource = dataSource
  sensor.encoder = encoder

def linkRegionToRegion(network, outName, inName):
  Create links between a region and another higher one in the hierarchy.

  # Create link between TP from first region and SP of second region"tp" + outName, destName="sp" + inName, linkType="UniformLink", linkParams="")

def linkSensorToRegion(network, outName, inName):
  Create links between sensor and region in the hierarchy.

  # Create link between Sensor and SP of the region"sensor" + outName, destName="sp" + inName, linkType="UniformLink", linkParams="")"sensor" + outName, destName="sp" + inName, linkType="UniformLink", linkParams="", srcOutput="resetOut", destInput="resetIn")"sp" + inName, destName="sensor" + outName, linkType="UniformLink", linkParams="", srcOutput="spatialTopDownOut", destInput="spatialTopDownIn")"sp" + inName, destName="sensor" + outName, linkType="UniformLink", linkParams="", srcOutput="temporalTopDownOut", destInput="temporalTopDownIn")

def runNetwork(network, numIterations):
  Run the network up to n iterations.


  # Only encodings with "EnableInference" turned "ON" will be printed
  links = []
  links.append({ "sensorName": "L1_encoder", "regionName": "TopRegion", "encodings": [["L1", 0], ], "prevPredictedColumns": [] })

  # Run the network showing current values from sensors and their anomaly scores
  printRow("Iter", "Sensor", "Encoding", "Current", "Anomaly Score")
  for i in range(numIterations):

    for link in links:
      sensorName = link["sensorName"]
      regionName = link["regionName"]
      encodings = link["encodings"]
      prevPredictedColumns = link["prevPredictedColumns"]

      sensorNode = network.regions["sensor" + sensorName]
      spNode = network.regions["sp" + regionName]
      tpNode = network.regions["tp" + regionName]

      # The anomaly score is relation of active columns over previous predicted columns
      activeColumns = spNode.getOutputData("bottomUpOut").nonzero()[0]
      anomalyScore = computeRawAnomalyScore(activeColumns, prevPredictedColumns)

      for encoding in encodings:
        encodingFieldName = encoding[0]
        encodingFieldIdx = encoding[1]

        # Print the anomaly score along with the iteration number and current value of this encoding.
        currValue = sensorNode.getOutputData("sourceOut")[encodingFieldIdx]
        printRow(i, sensorName, encodingFieldName, currValue, anomalyScore)

      # Store the predicted columns for the next iteration
      predictedColumns = tpNode.getOutputData("topDownOut").nonzero()[0]
      link["prevPredictedColumns"] = copy.deepcopy(predictedColumns)

def printRow(iter, sensorName, encodingFieldName, currValue, anomalyScore):
  Print a row with fixed-length fields to default output.

  iter = str(iter).rjust(10) + " "
  sensorName = str(sensorName).ljust(20) + " "
  encodingFieldName = str(encodingFieldName).ljust(25) + " "
  currValue = str(currValue).rjust(15) + " "
  anomalyScore = str(anomalyScore).ljust(40)
  print iter, sensorName, encodingFieldName, currValue, anomalyScore

if __name__ == "__main__":
  network = createNetwork()

  numIterations = input("Type the number of iterations: ")
  runNetwork(network, numIterations)

I attached 2 photos:
One of them is my csv-file and the second is errors ,which I got,when I tried to run this project.


Your data file still looks wrong. The flags header has two fields, but everywhere else there is only one? How many fields are you trying to process?

In this stage only one field I can try to process. Do you know how to change this field? Because I tried to process when I had one field. And I got same errors.

To be clear, I have not used NuPIC Studio in a few years. I don’t think it is going to work with the latest version of NuPIC. And even if you do get it working with an older version, I don’t think I can help you because I didn’t work on the code and the project seems to be unmaintained.

What are you trying to do with NuPIC? Also, why did you decide to use NuPIC Studio?

We want to visualise our results to make easier our network parameters tuning

I mean it looks like you’re trying to do NLP but each row is going to be encoded as one string field. I don’t think that’s what you want. Be sure to read the NuPIC API docs to see some examples.