-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtoxicity.js
39 lines (36 loc) · 1.01 KB
/
toxicity.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
const toxicity = require('@tensorflow-models/toxicity');
const threshold = 0.9;
// Load the model. Users optionally pass in a threshold and an array of
// labels to include.
async function createClassifier() {
return toxicity.load(threshold)
}
module.exports = {
createClassifier,
}
// classifier.classify(sentences).then(predictions => {
// `predictions` is an array of objects, one for each prediction head,
// that contains the raw probabilities for each input along with the
// final prediction in `match` (either `true` or `false`).
// If neither prediction exceeds the threshold, `match` is `null`.
// console.log(predictions);
/*
prints:
{
"label": "identity_attack",
"results": [{
"probabilities": [0.9659664034843445, 0.03403361141681671],
"match": false
}]
},
{
"label": "insult",
"results": [{
"probabilities": [0.08124706149101257, 0.9187529683113098],
"match": true
}]
},
...
*/
// });
// });