Spaces:
Running
Running
{ | |
"different_user_input_language": { | |
"codellama-70b-instruct": 0.28, | |
"gpt-4": 0.16, | |
"llama 3 70b-instruct": 0.44, | |
"codellama-34b-instruct": 0.64, | |
"codellama-13b-instruct": 0.76, | |
"gpt-3.5-turbo": 0.6, | |
"llama 3 8b-instruct": 0.76 | |
}, | |
"output_formatting_manipulation": { | |
"codellama-70b-instruct": 0.1764705882, | |
"gpt-4": 0.3529411765, | |
"llama 3 70b-instruct": 0.7058823529, | |
"codellama-34b-instruct": 0.2941176471, | |
"codellama-13b-instruct": 0.2941176471, | |
"gpt-3.5-turbo": 0.7647058824, | |
"llama 3 8b-instruct": 0.7647058824 | |
}, | |
"overload_with_information": { | |
"codellama-70b-instruct": 0.2, | |
"gpt-4": 0.35, | |
"llama 3 70b-instruct": 0.25, | |
"codellama-34b-instruct": 0.6, | |
"codellama-13b-instruct": 0.55, | |
"gpt-3.5-turbo": 0.4, | |
"llama 3 8b-instruct": 0.6 | |
}, | |
"many_shot_attack": { | |
"codellama-70b-instruct": 0.0, | |
"gpt-4": 0.2857142857, | |
"llama 3 70b-instruct": 0.2857142857, | |
"codellama-34b-instruct": 0.7142857143, | |
"codellama-13b-instruct": 0.5714285714, | |
"gpt-3.5-turbo": 0.5714285714, | |
"llama 3 8b-instruct": 0.1428571429 | |
}, | |
"ignore_previous_instructions": { | |
"codellama-70b-instruct": 0.3333333333, | |
"gpt-4": 0.2083333333, | |
"llama 3 70b-instruct": 0.375, | |
"codellama-34b-instruct": 0.4583333333, | |
"codellama-13b-instruct": 0.2916666667, | |
"gpt-3.5-turbo": 0.3333333333, | |
"llama 3 8b-instruct": 0.5416666667 | |
}, | |
"system_mode": { | |
"codellama-70b-instruct": 0.1578947368, | |
"gpt-4": 0.1578947368, | |
"llama 3 70b-instruct": 0.4210526316, | |
"codellama-34b-instruct": 0.3684210526, | |
"codellama-13b-instruct": 0.5263157895, | |
"gpt-3.5-turbo": 0.3684210526, | |
"llama 3 8b-instruct": 0.5263157895 | |
}, | |
"few_shot_attack": { | |
"codellama-70b-instruct": 0.0, | |
"gpt-4": 0.1818181818, | |
"llama 3 70b-instruct": 0.1818181818, | |
"codellama-34b-instruct": 0.4545454545, | |
"codellama-13b-instruct": 0.5454545455, | |
"gpt-3.5-turbo": 0.3636363636, | |
"llama 3 8b-instruct": 0.6363636364 | |
}, | |
"indirect_reference": { | |
"codellama-70b-instruct": 0.2142857143, | |
"gpt-4": 0.4285714286, | |
"llama 3 70b-instruct": 0.3571428571, | |
"codellama-34b-instruct": 0.3571428571, | |
"codellama-13b-instruct": 0.2142857143, | |
"gpt-3.5-turbo": 0.3571428571, | |
"llama 3 8b-instruct": 0.3571428571 | |
}, | |
"repeated_token_attack": { | |
"codellama-70b-instruct": 0.0, | |
"gpt-4": 0.0, | |
"llama 3 70b-instruct": 0.0, | |
"codellama-34b-instruct": 0.5, | |
"codellama-13b-instruct": 0.5, | |
"gpt-3.5-turbo": 0.6666666667, | |
"llama 3 8b-instruct": 0.5 | |
}, | |
"persuasion": { | |
"codellama-70b-instruct": 0.3461538462, | |
"gpt-4": 0.1538461538, | |
"llama 3 70b-instruct": 0.2692307692, | |
"codellama-34b-instruct": 0.3076923077, | |
"codellama-13b-instruct": 0.2692307692, | |
"gpt-3.5-turbo": 0.1923076923, | |
"llama 3 8b-instruct": 0.4615384615 | |
}, | |
"mixed_techniques": { | |
"codellama-70b-instruct": 0.1515151515, | |
"gpt-4": 0.3333333333, | |
"llama 3 70b-instruct": 0.2424242424, | |
"codellama-34b-instruct": 0.2121212121, | |
"codellama-13b-instruct": 0.2727272727, | |
"gpt-3.5-turbo": 0.3636363636, | |
"llama 3 8b-instruct": 0.3636363636 | |
}, | |
"virtualization": { | |
"codellama-70b-instruct": 0.0, | |
"gpt-4": 0.1428571429, | |
"llama 3 70b-instruct": 0.4285714286, | |
"codellama-34b-instruct": 0.0714285714, | |
"codellama-13b-instruct": 0.2142857143, | |
"gpt-3.5-turbo": 0.2857142857, | |
"llama 3 8b-instruct": 0.5 | |
}, | |
"payload_splitting": { | |
"codellama-70b-instruct": 0.0, | |
"gpt-4": 0.0, | |
"llama 3 70b-instruct": 0.1111111111, | |
"codellama-34b-instruct": 0.3333333333, | |
"codellama-13b-instruct": 0.4444444444, | |
"gpt-3.5-turbo": 0.2222222222, | |
"llama 3 8b-instruct": 0.3333333333 | |
}, | |
"hypothetical_scenario": { | |
"codellama-70b-instruct": 0.0769230769, | |
"gpt-4": 0.2307692308, | |
"llama 3 70b-instruct": 0.2307692308, | |
"codellama-34b-instruct": 0.1538461538, | |
"codellama-13b-instruct": 0.1538461538, | |
"gpt-3.5-turbo": 0.3076923077, | |
"llama 3 8b-instruct": 0.2307692308 | |
}, | |
"token_smuggling": { | |
"codellama-70b-instruct": 0.0, | |
"gpt-4": 0.0, | |
"llama 3 70b-instruct": 0.0769230769, | |
"codellama-34b-instruct": 0.0, | |
"codellama-13b-instruct": 0.0, | |
"gpt-3.5-turbo": 0.0769230769, | |
"llama 3 8b-instruct": 0.0769230769 | |
} | |
} | |