Datasets:
Tasks:
Text Classification
Sub-tasks:
intent-classification
Languages:
English
Size:
10K<n<100K
License:
{"default": {"description": "These files contain text extracted from Stormfront, a white supremacist forum. A random set of \nforums posts have been sampled from several subforums and split into sentences. Those sentences \nhave been manually labelled as containing hate speech or not, according to certain annotation guidelines.\n", "citation": "@inproceedings{gibert2018hate,\n title = \"{Hate Speech Dataset from a White Supremacy Forum}\",\n author = \"de Gibert, Ona and\n Perez, Naiara and\n Garc{'\\i}a-Pablos, Aitor and\n Cuadros, Montse\",\n booktitle = \"Proceedings of the 2nd Workshop on Abusive Language Online ({ALW}2)\",\n month = oct,\n year = \"2018\",\n address = \"Brussels, Belgium\",\n publisher = \"Association for Computational Linguistics\",\n url = \"https://www.aclweb.org/anthology/W18-5102\",\n doi = \"10.18653/v1/W18-5102\",\n pages = \"11--20\",\n}\n", "homepage": "https://github.com/Vicomtech/hate-speech-dataset", "license": "", "features": {"text": {"dtype": "string", "id": null, "_type": "Value"}, "user_id": {"dtype": "int64", "id": null, "_type": "Value"}, "subforum_id": {"dtype": "int64", "id": null, "_type": "Value"}, "num_contexts": {"dtype": "int64", "id": null, "_type": "Value"}, "label": {"num_classes": 4, "names": ["noHate", "hate", "idk/skip", "relation"], "names_file": null, "id": null, "_type": "ClassLabel"}}, "post_processed": null, "supervised_keys": null, "builder_name": "hate_speech18", "config_name": "default", "version": {"version_str": "0.0.0", "description": null, "major": 0, "minor": 0, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1375340, "num_examples": 10944, "dataset_name": "hate_speech18"}}, "download_checksums": {"https://github.com/Vicomtech/hate-speech-dataset/archive/master.zip": {"num_bytes": 3664530, "checksum": "acc0d7ce40e22cf019daa752a5136049a45462b9ba4eab8bf40ea82dcd867eba"}}, "download_size": 3664530, "post_processing_size": null, "dataset_size": 1375340, "size_in_bytes": 5039870}} |