-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtoxicityprediction.js
More file actions
36 lines (32 loc) · 1.1 KB
/
toxicityprediction.js
File metadata and controls
36 lines (32 loc) · 1.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
// get all text from the current html page
function getText(){
return document.body.innerText
}
// call getText()
let allText = getText().split("\n");
// split into phrases
let splitText = allText.filter(word => word.length > 2);
console.log("NUMBER OF PHRASES:", splitText.length);
// The minimum prediction confidence.
const threshold = 0.05;
// boolean values for whether each phrase is toxic
let filter = [];
// call the TensorFlow.js sentiment analysis model
toxicity.load(threshold).then(model => {
let sentences = splitText;
// predict toxicity using the model
model.classify(sentences).then(prediction => {
for(let j=0; j<prediction[6].results.length;j++) {
// push the boolean value to the array
filter.push(prediction[6].results[j].match);
};
// if a phrase is toxic, replace it in the current page
for(let i=0; i<filter.length; i++) {
if(filter[i]==true) {
document.body.innerHTML = document.body.innerHTML.replace(sentences[i], '[removed due to toxicity]');
console.log(sentences[i], "REPLACED");
}
};
});
});
console.log("PREDICTONS LOADED");