File size: 2,858 Bytes
97f9a3c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
{
    "description": "Binary classifier on harmful text in Singapore context",
    "embedding": {
        "tokenizer": "BAAI/bge-large-en-v1.5",
        "model": "BAAI/bge-large-en-v1.5",
        "max_length": 512,
        "batch_size": 32
    },
    "classifier": {
        "binary": {
            "calibrated": true,
            "threshold": {
                "high_recall": 0.2,
                "balanced": 0.5,
                "high_precision": 0.8
            },
            "model_type": "ridge_classifier",
            "model_fp": "models/lionguard-binary.onnx"
        },
        "hateful": {
            "calibrated": false,
            "threshold": {
                "high_recall": 0.516,
                "balanced": 0.827,
                "high_precision": 1.254
            },
            "model_type": "ridge_classifier",
            "model_fp": "models/lionguard-harassment.onnx"
        },
        "harassment": {
            "calibrated": false,
            "threshold": {
                "high_recall": 1.326,
                "balanced": 1.326,
                "high_precision": 1.955
            },
            "model_type": "ridge_classifier",
            "model_fp": "models/lionguard-harassment.onnx"
        },
        "public_harm": {
            "calibrated": false,
            "threshold": {
                "high_recall": 0.953,
                "balanced": 0.953,
                "high_precision": 0.953
            },
            "model_type": "ridge_classifier",
            "model_fp": "models/lionguard-public_harm.onnx"
        },
        "self_harm": {
            "calibrated": false,
            "threshold": {
                "high_recall": 0.915,
                "balanced": 0.915,
                "high_precision": 0.915
            },
            "model_type": "ridge_classifier",
            "model_fp": "models/lionguard-self_harm.onnx"
        },
        "sexual": {
            "calibrated": false,
            "threshold": {
                "high_recall": 0.388,
                "balanced": 0.500,
                "high_precision": 0.702
            },
            "model_type": "ridge_classifier",
            "model_fp": "models/lionguard-sexual.onnx"
        },
        "toxic": {
            "calibrated": false,
            "threshold": {
                "high_recall": -0.089,
                "balanced": 0.136,
                "high_precision": 0.327
            },
            "model_type": "ridge_classifier",
            "model_fp": "models/lionguard-toxic.onnx"
        },
        "violent": {
            "calibrated": false,
            "threshold": {
                "high_recall": 0.317,
                "balanced": 0.981,
                "high_precision": 0.981
            },
            "model_type": "ridge_classifier",
            "model_fp": "models/lionguard-violent.onnx"
        }
    }
  }