Update README.md
Browse files
README.md
CHANGED
|
@@ -19,6 +19,7 @@ model-index:
|
|
| 19 |
value: 0.0937
|
| 20 |
name: Loss
|
| 21 |
license: apache-2.0
|
|
|
|
| 22 |
---
|
| 23 |
|
| 24 |
# vit-base-nsfw-detector
|
|
@@ -66,6 +67,41 @@ print("Predicted class:", model.config.id2label[predicted_class_idx])
|
|
| 66 |
# Predicted class: sfw
|
| 67 |
```
|
| 68 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 69 |
The model has been trained on a variety of images (realistic, 3D, drawings), yet it is not perfect and some images may be wrongly classified as NSFW when they are not.
|
| 70 |
|
| 71 |
## Training and evaluation data
|
|
|
|
| 19 |
value: 0.0937
|
| 20 |
name: Loss
|
| 21 |
license: apache-2.0
|
| 22 |
+
library_name: transformers.js
|
| 23 |
---
|
| 24 |
|
| 25 |
# vit-base-nsfw-detector
|
|
|
|
| 67 |
# Predicted class: sfw
|
| 68 |
```
|
| 69 |
|
| 70 |
+
Usage with Transformers.js
|
| 71 |
+
```js
|
| 72 |
+
import { pipeline, env } from 'https://cdn.jsdelivr.net/npm/@xenova/transformers@2.17.1';
|
| 73 |
+
|
| 74 |
+
// Load the image classification model
|
| 75 |
+
const classifier = await pipeline('image-classification', 'AdamCodd/vit-base-nsfw-detector');
|
| 76 |
+
|
| 77 |
+
// Function to fetch and classify an image from a URL
|
| 78 |
+
async function classifyImage(url) {
|
| 79 |
+
try {
|
| 80 |
+
const response = await fetch(url);
|
| 81 |
+
if (!response.ok) throw new Error('Failed to load image');
|
| 82 |
+
|
| 83 |
+
const blob = await response.blob();
|
| 84 |
+
const image = new Image();
|
| 85 |
+
const imagePromise = new Promise((resolve, reject) => {
|
| 86 |
+
image.onload = () => resolve(image);
|
| 87 |
+
image.onerror = reject;
|
| 88 |
+
image.src = URL.createObjectURL(blob);
|
| 89 |
+
});
|
| 90 |
+
|
| 91 |
+
const img = await imagePromise; // Ensure the image is loaded
|
| 92 |
+
const classificationResults = await classifier([img.src]); // Classify the image
|
| 93 |
+
console.log('Predicted class: ', classificationResults[0].label);
|
| 94 |
+
} catch (error) {
|
| 95 |
+
console.error('Error classifying image:', error);
|
| 96 |
+
}
|
| 97 |
+
}
|
| 98 |
+
|
| 99 |
+
// Example usage
|
| 100 |
+
classifyImage('http://images.cocodataset.org/val2017/000000039769.jpg');
|
| 101 |
+
// Predicted class: sfw
|
| 102 |
+
```
|
| 103 |
+
|
| 104 |
+
|
| 105 |
The model has been trained on a variety of images (realistic, 3D, drawings), yet it is not perfect and some images may be wrongly classified as NSFW when they are not.
|
| 106 |
|
| 107 |
## Training and evaluation data
|