Home > Net >  JSON not working, is my code wrong or is the JSON wrong
JSON not working, is my code wrong or is the JSON wrong

Time:03-24

I have the tensorflow.js toxicity model loaded up and am trying to use the json values provided. However I cannot isolate any of the labels. The closest i have gotten is this code. Which just provides the whole string to my screen, i cannot seem to call upon it like a JSON obj no matter what i try, right now its just making a giant string.

 // Ask the model to classify inputs
      model.classify(sentences).then((predictions) => {
        // semi-pretty-print results
        console.log(JSON.stringify(predictions, null, 2));
        // Lets try to print out more relevant results on screen which is still messy but gets the job done
        let betterPredictions = JSON.stringify(predictions);
        // create body variable to attach everything
        var body = document.getElementsByTagName("body")[0];
        //Add containing div
        var div = document.createElement("div");
        
      
        //Add text to div
        var newHead = document.createElement("p");
      
        var newHeadText = document.createTextNode(betterPredictions);
        newHead.appendChild(newHeadText);
        div.appendChild(newHead, div.childNodes[0]);

[
  {
    "label": "identity_attack",
    "results": [
      {
        "probabilities": {
          "0": 0.9968510270118713,
          "1": 0.0031489399261772633
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9862365126609802,
          "1": 0.013763549737632275
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9998451471328735,
          "1": 0.00015478680143132806
        },
        "match": false
      }
    ]
  },
  {
    "label": "insult",
    "results": [
      {
        "probabilities": {
          "0": 0.02373320795595646,
          "1": 0.9762668609619141
        },
        "match": true
      },
      {
        "probabilities": {
          "0": 0.24276699125766754,
          "1": 0.7572329640388489
        },
        "match": true
      },
      {
        "probabilities": {
          "0": 0.997154951095581,
          "1": 0.002844985108822584
        },
        "match": false
      }
    ]
  },
  {
    "label": "obscene",
    "results": [
      {
        "probabilities": {
          "0": 0.997951090335846,
          "1": 0.0020489569287747145
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.16121098399162292,
          "1": 0.8387889862060547
        },
        "match": true
      },
      {
        "probabilities": {
          "0": 0.999924898147583,
          "1": 0.00007506388647016138
        },
        "match": false
      }
    ]
  },
  {
    "label": "severe_toxicity",
    "results": [
      {
        "probabilities": {
          "0": 0.9999984502792358,
          "1": 0.0000015903623307167436
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9985486268997192,
          "1": 0.0014513932401314378
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9999998807907104,
          "1": 1.036240959706447e-7
        },
        "match": false
      }
    ]
  },
  {
    "label": "sexual_explicit",
    "results": [
      {
        "probabilities": {
          "0": 0.9995192289352417,
          "1": 0.0004807313671335578
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.7500032782554626,
          "1": 0.24999678134918213
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9998985528945923,
          "1": 0.00010142526298295707
        },
        "match": false
      }
    ]
  },
  {
    "label": "threat",
    "results": [
      {
        "probabilities": {
          "0": 0.9987996816635132,
          "1": 0.0012002806179225445
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9483732581138611,
          "1": 0.051626741886138916
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9990767240524292,
          "1": 0.0009232169832102954
        },
        "match": false
      }
    ]
  },
  {
    "label": "toxicity",
    "results": [
      {
        "probabilities": {
          "0": 0.020521018654108047,
          "1": 0.9794790148735046
        },
        "match": true
      },
      {
        "probabilities": {
          "0": 0.05149741843342781,
          "1": 0.9485026001930237
        },
        "match": true
      },
      {
        "probabilities": {
          "0": 0.9911190867424011,
          "1": 0.008880933746695518
        },
        "match": false
      }
    ]
  }
]

CodePudding user response:

I cannot seem to call upon it like a JSON obj no matter what I try

This is because you are using JSON.stringify(predictions, null, 2) which converts the whole response as a string. You should have used JSON.parse(predictions) in place of that. I have attached an example below

const apiResponse = `[
  {
    "label": "identity_attack",
    "results": [
      {
        "probabilities": {
          "0": 0.9968510270118713,
          "1": 0.0031489399261772633
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9862365126609802,
          "1": 0.013763549737632275
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9998451471328735,
          "1": 0.00015478680143132806
        },
        "match": false
      }
    ]
  },
  {
    "label": "insult",
    "results": [
      {
        "probabilities": {
          "0": 0.02373320795595646,
          "1": 0.9762668609619141
        },
        "match": true
      },
      {
        "probabilities": {
          "0": 0.24276699125766754,
          "1": 0.7572329640388489
        },
        "match": true
      },
      {
        "probabilities": {
          "0": 0.997154951095581,
          "1": 0.002844985108822584
        },
        "match": false
      }
    ]
  },
  {
    "label": "obscene",
    "results": [
      {
        "probabilities": {
          "0": 0.997951090335846,
          "1": 0.0020489569287747145
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.16121098399162292,
          "1": 0.8387889862060547
        },
        "match": true
      },
      {
        "probabilities": {
          "0": 0.999924898147583,
          "1": 0.00007506388647016138
        },
        "match": false
      }
    ]
  },
  {
    "label": "severe_toxicity",
    "results": [
      {
        "probabilities": {
          "0": 0.9999984502792358,
          "1": 0.0000015903623307167436
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9985486268997192,
          "1": 0.0014513932401314378
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9999998807907104,
          "1": 1.036240959706447e-7
        },
        "match": false
      }
    ]
  },
  {
    "label": "sexual_explicit",
    "results": [
      {
        "probabilities": {
          "0": 0.9995192289352417,
          "1": 0.0004807313671335578
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.7500032782554626,
          "1": 0.24999678134918213
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9998985528945923,
          "1": 0.00010142526298295707
        },
        "match": false
      }
    ]
  },
  {
    "label": "threat",
    "results": [
      {
        "probabilities": {
          "0": 0.9987996816635132,
          "1": 0.0012002806179225445
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9483732581138611,
          "1": 0.051626741886138916
        },
        "match": false
      },
      {
        "probabilities": {
          "0": 0.9990767240524292,
          "1": 0.0009232169832102954
        },
        "match": false
      }
    ]
  },
  {
    "label": "toxicity",
    "results": [
      {
        "probabilities": {
          "0": 0.020521018654108047,
          "1": 0.9794790148735046
        },
        "match": true
      },
      {
        "probabilities": {
          "0": 0.05149741843342781,
          "1": 0.9485026001930237
        },
        "match": true
      },
      {
        "probabilities": {
          "0": 0.9911190867424011,
          "1": 0.008880933746695518
        },
        "match": false
      }
    ]
  }
]`;

const data = JSON.parse(apiResponse);
console.log('data', data);
console.log('first index', data[0]);
console.log('label', data[0]['label']);

  • Related