trojblue commited on
Commit
a5fb626
1 Parent(s): 7fa6660

Upload folder using huggingface_hub

Browse files
Files changed (3) hide show
  1. .argilla/dataset.json +1 -0
  2. .argilla/settings.json +1 -0
  3. README.md +370 -71
.argilla/dataset.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"id": "9927910e-57b3-46c7-b4e3-28171ac1aeb0", "inserted_at": "2024-10-02T22:30:56.432921", "updated_at": "2024-10-03T10:37:47.181396", "name": "test_anime_rating_100_v3", "status": "ready", "guidelines": "Classify the Images according to the rules.", "allow_extra_metadata": false, "distribution": {"strategy": "overlap", "min_submitted": 1}, "workspace_id": "1b54bbb4-ae99-404d-a046-022649991442", "last_activity_at": "2024-10-03T10:37:47.180272"}
.argilla/settings.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"guidelines": "Classify the Images according to the rules.", "questions": [{"id": "e5e4a154-e239-4eb3-9a6a-f8559670f527", "name": "composition_rating", "settings": {"type": "label_selection", "options": [{"value": "thoughtful composition", "text": "thoughtful composition", "description": null}, {"value": "has composition", "text": "has composition", "description": null}, {"value": "average or none composition", "text": "average or none composition", "description": null}, {"value": "bad composition", "text": "bad composition", "description": null}, {"value": "SKIP", "text": "SKIP", "description": null}], "visible_options": 5}, "title": "Composition? (framing, item arrangements, flow of lines, etc)", "description": "thoughtful composition: \u5728\u6784\u56fe\u4e0a\u6709\u4e00\u5b9a\u601d\u8003, \u6267\u884c\u5f97\u5f53, \u4f7f\u7528\u6444\u5f71/framing techniques\u4ea7\u751f\u4e86\u89c6\u89c9\u6548\u679c (\u6bd4\u5982\u62cd\u6444\u89d2\u5ea6, \u80cc\u666f\u548c\u524d\u666f\u7684\u642d\u914d, \u826f\u597d\u7684\u7a7a\u95f4\u5173\u7cfb, 2d\u6784\u56fe\u7b49) | The image should adhere to certain principles of professional photography composition, including the 'Rule Of Thirds', 'Depth and Layering', and more. https://danbooru.donmai.us/posts/6743649\n\nhas composition: \u5b58\u5728\u4e00\u4e9b\u6709\u610f\u8bc6\u7684\u6784\u56fe\u642d\u914d, \u4f46\u6709\u5c0f\u95ee\u9898 (\u4e0d\u662f\u6ca1\u6709\u7279\u70b9)\n\naverage or none composition: \u6ca1\u6709\u4ec0\u4e48\u7279\u70b9\u7684\u666e\u901a\u7ad9\u6869\u6784\u56fe\n\nbad composition: \u5b58\u5728\u8be1\u5f02, \u8ba9\u4eba\u4e0d\u8212\u670d, \u6a21\u578b\u591a\u534a\u4e5f\u5b66\u4e0d\u4f1a\u7684\u6784\u56fe\n\nSKIP: \u4e0d\u597d\u9009\u62e9", "required": true, "inserted_at": "2024-10-02T22:30:57.140173", "updated_at": "2024-10-02T22:30:57.140173", "type": "label_selection"}, {"id": "e63d1133-632d-4ad6-9bb0-efa5108c3edd", "name": "lighting_rating", "settings": {"type": "label_selection", "options": [{"value": "correct dynamic lighting", "text": "correct dynamic lighting", "description": null}, {"value": "has lighting", "text": "has lighting", "description": null}, {"value": "minimal or none lighting", "text": "minimal or none lighting", "description": null}, {"value": "bad lighting", "text": "bad lighting", "description": null}, {"value": "SKIP", "text": "SKIP", "description": null}], "visible_options": 5}, "title": "Lighting? (PBR only; fake lighting doesn't count)", "description": "correct dynamic lighting: \u5149\u5f71\u6b63\u786e\u5e73\u8861, \u886c\u6258\u4e86\u753b\u9762 | balanced exposure that enhances the image, for example, lighting that originates from an angle, casting highlights on select areas of the background and subject(s). https://danbooru.donmai.us/posts/6802960\n\nhas lighting: \u5b58\u5728\u4e00\u4e9b\u5149\u5f71\u642d\u914d, \u4f46\u6548\u679c\u6709\u5c0f\u95ee\u9898 (\u4e0d\u662f\u6ca1\u6709\u5149\u5f71)\n\nminimal or none lighting: \u6ca1\u6709\u4ec0\u4e48\u7279\u70b9\u7684\u5149\u5f71\u9009\u62e9 (\u63a5\u8fd1\u5e73\u6d82, \u53ef\u4ee5\u55b7\u67aa\u4e0a\u8fc7\u4e00\u4e9b\u9634\u5f71, \u4f46\u662f\u5149\u6e90\u4e0d\u5bf9/\u6f66\u8349/\u592a\u7b80\u7565)\n\nbad lighting: \u592a\u4eae/\u592a\u6697/\u592a\u6781\u7aef, \u753b\u9762\u8fc7\u66dd, etc | artificial or lackluster lighting, excessively dim or overexposed light\n\nSKIP: \u4e0d\u597d\u9009\u62e9", "required": true, "inserted_at": "2024-10-02T22:30:57.267134", "updated_at": "2024-10-02T22:38:35.142908", "type": "label_selection"}, {"id": "c79853fa-8b54-421a-a72b-fa96ff23bf05", "name": "color_contrast_rating", "settings": {"type": "label_selection", "options": [{"value": "vibrant and popping", "text": "vibrant and popping", "description": null}, {"value": "harmonious but not special", "text": "harmonious but not special", "description": null}, {"value": "average colors", "text": "average colors", "description": null}, {"value": "bad colors", "text": "bad colors", "description": null}, {"value": "SKIP", "text": "SKIP", "description": null}], "visible_options": 5}, "title": "Color and Contrast?", "description": "vibrant and popping: \u5728\u989c\u8272\u548c\u5bf9\u6bd4\u5ea6\u4e0a\u6709\u4e00\u5b9a\u601d\u8003, \u6267\u884c\u5f97\u5f53, \u4ea7\u751f\u4e86\u89c6\u89c9\u6548\u679c (\u6bd4\u5982*\u6709\u610f\u8bc6\u7684*\u51b7\u6696\u5bf9\u6bd4, \u8272\u5f69\u642d\u914d, \u649e\u8272, \u9971\u548c\u5ea6\u9009\u62e9\u7b49) | We prefer images with vibrant colors and strong color contrast. We avoid monochromatic images or those where a single color dominates the entire frame.\n\nharmonious but not special: \u989c\u8272\u548c\u8c10\u4f46\u4e0d\u7a81\u51fa, contrast\u4e0d\u591f\u6216\u8005\u6545\u610flow contrast\n\naverage colors: \u6ca1\u6709\u4ec0\u4e48\u7279\u70b9\u7684\u666e\u901a\u989c\u8272\u9009\u62e9, \u8fd8\u539f\u4e86\u89d2\u8272\u539f\u672c\u7684\u989c\u8272\n\nbad colors: \u914d\u8272\u8ba9\u4eba\u4e0d\u8212\u670d, \u989c\u8272\u810f\u4e71, \u5355\u4e00\u989c\u8272\u6ca1\u6709\u5bf9\u6bd4, \u753b\u9762\u8fc7\u4e8e\u5355\u8c03\n\nSKIP: \u4e0d\u597d\u9009\u62e9", "required": true, "inserted_at": "2024-10-02T22:30:57.383710", "updated_at": "2024-10-02T22:30:57.383710", "type": "label_selection"}, {"id": "14496415-f08b-4238-bc55-b921e694758e", "name": "subject_background_rating", "settings": {"type": "label_selection", "options": [{"value": "yes", "text": "yes", "description": null}, {"value": "mostly yes", "text": "mostly yes", "description": null}, {"value": "no", "text": "no", "description": null}, {"value": "SKIP", "text": "SKIP", "description": null}], "visible_options": 4}, "title": "Is the forground not obstructed or cutoff, and background not cluttered? (text is obstruction)", "description": "yes: \u7126\u70b9\u6e05\u6670, \u753b\u9762\u6e05\u6670, \u6ca1\u6709\u6a21\u7cca (\u6216\u8005\u6709\u610f\u8bc6\u7684\u6a21\u7cca)\n\nmostly yes: \u5927\u90e8\u5206\u6e05\u6670, \u4f46\u5b58\u5728\u4e00\u4e9b\u6a21\u7cca, \u6216\u8005\u6709\u610f\u8bc6\u7684\u6a21\u7cca\n\nno: \u7126\u70b9\u666e\u901a, \u753b\u9762\u504f\u6a21\u7cca, \u6ca1\u6709\u592a\u591a\u7ec6\u8282\n\nSKIP: \u4e0d\u597d\u9009\u62e9", "required": true, "inserted_at": "2024-10-02T22:30:57.507555", "updated_at": "2024-10-02T22:40:52.600660", "type": "label_selection"}, {"id": "0bfad964-5f4b-49ad-b868-0d066201b0c2", "name": "detail_rating", "settings": {"type": "label_selection", "options": [{"value": "highly detailed", "text": "highly detailed", "description": null}, {"value": "some detail", "text": "some detail", "description": null}, {"value": "average detail", "text": "average detail", "description": null}, {"value": "low detail", "text": "low detail", "description": null}, {"value": "SKIP", "text": "SKIP", "description": null}], "visible_options": 5}, "title": "Details?", "description": "highly detailed: \u9ad8\u7ec6\u8282, \u753b\u9762\u7cbe\u7ec6, \u6709\u4e00\u5b9a\u7684\u7eb9\u7406 (\u6216\u8005\u6709\u610f\u8bc6\u7684\u7b80\u5355\u6e05\u6670), \u770b\u8d77\u6765\u5f88\u7cbe\u81f4 | all critical details are clearly visible without compromise; the level of detail on the foreground subject is extremely important. https://x.com/SuperPig2046/status/1628983316811759616\n\nsome detail: \u5b58\u5728\u4e00\u4e9b\u7ec6\u8282/texture\u7684\u5229\u7528, \u4f46\u5b8c\u6210\u5ea6\u4e0d\u591f, \u6216\u8005\u7ec6\u8282\u4e0d\u591f\u7cbe\u7ec6\n\naverage detail: \u7ec6\u8282\u666e\u901a, \u753b\u9762\u504f\u7b80\u5355, \u6ca1\u6709\u592a\u591a\u7ec6\u8282 (\u6781\u7b80\u4e5f\u7b97)\n\nlow detail: \u7ec6\u8282\u4e0d\u8db3, \u753b\u9762\u7c97\u7cd9\n\nSKIP: \u4e0d\u597d\u9009\u62e9", "required": true, "inserted_at": "2024-10-02T22:30:57.637107", "updated_at": "2024-10-02T22:30:57.637107", "type": "label_selection"}, {"id": "80a0fd87-8f48-404a-a15a-cb2c066d905f", "name": "expressive_rating", "settings": {"type": "label_selection", "options": [{"value": "expressive image", "text": "expressive image", "description": null}, {"value": "mostly expressive", "text": "mostly expressive", "description": null}, {"value": "average expressiveness", "text": "average expressiveness", "description": null}, {"value": "stiff", "text": "stiff", "description": null}, {"value": "SKIP", "text": "SKIP", "description": null}], "visible_options": 5}, "title": "Expressiveness?", "description": "expressive image: \u7ebf\u6761\u6d41\u7545, \u6784\u9020\u7cbe\u81f4, \u8138\u90e8\u81ea\u7136, \u52a8\u4f5c\u751f\u52a8\u7b49, \u770b\u8d77\u6765\u5f88\u6709\u8868\u73b0\u529b, \u753b\u9762\u5f20\u529b\u9ad8 https://danbooru.donmai.us/posts/5844547\n\nmostly expressive: \u753b\u9762\u751f\u52a8\u6709\u4e00\u5b9a\u5f20\u529b, \u4f46\u5b8c\u6210\u5ea6\u592a\u4f4e\u6216\u8005\u5b58\u5728\u9519\u8bef (\u6bd4\u5982\u63a8\u7279\u7684\u5f88\u591arakugaki / \u968f\u624b\u753b)\n\naverage expressiveness: \u666e\u901a\u7684\u8868\u73b0\u529b, \u753b\u9762\u504f\u50f5\u786c, \u770b\u8d77\u6765\u4e0d\u751f\u52a8\n\nstiff: \u4e1a\u4f59\u6c34\u51c6, \u4f4e\u4e8e\u5e73\u5747\u6c34\u5e73, \u753b\u9762\u8868\u73b0\u529b\u4e0d\u8db3\u6216\u8005\u8be1\u5f02, \u8ba9\u4eba\u4e0d\u8212\u670d https://danbooru.donmai.us/posts/7431687\n\nSKIP: \u4e0d\u597d\u9009\u62e9", "required": true, "inserted_at": "2024-10-02T22:30:57.756862", "updated_at": "2024-10-02T22:30:57.756862", "type": "label_selection"}, {"id": "827fc231-1cfd-40ba-963c-ef59b60c606c", "name": "anatomy_rating", "settings": {"type": "label_selection", "options": [{"value": "good anatomy", "text": "good anatomy", "description": null}, {"value": "mostly good", "text": "mostly good", "description": null}, {"value": "average anatomy", "text": "average anatomy", "description": null}, {"value": "bad anatomy", "text": "bad anatomy", "description": null}, {"value": "SKIP", "text": "SKIP", "description": null}], "visible_options": 5}, "title": "Anatomy?", "description": "good anatomy: \u52a8\u6001\u611f\u597d, \u59ff\u52bf\u81ea\u7136\u6d41\u7545 https://danbooru.donmai.us/posts/5972188\n\nmostly good: \u603b\u4f53\u6bd4\u8f83\u597d, \u4f46\u4e3a\u827a\u672f\u6548\u679c\u505a\u4e86\u5938\u5f20, \u6bd4\u5982\u624b\u592a\u5c0f\u592a\u9f87\u7b49 https://danbooru.donmai.us/posts/6908818\n\naverage anatomy: anatomy\u666e\u901a, \u6709\u70b9\u6b7b\u677f(\u6807\u51c6\u7ad9\u6869, \u52a8\u4f5c\u751f\u786c\u7b49)\n\nbad anatomy: \u4e1a\u4f59\u6c34\u51c6, anatomy\u626d\u66f2, \u5b58\u5728\u5938\u5f20\u4e0d\u5408\u7406\u7684\u53d8\u5f62\u7b49, \u4e0d\u7b26\u5408\u4eba\u4f53\u7ed3\u6784 https://danbooru.donmai.us/posts/5030231\n\nSKIP: \u8df3\u8fc7 (\u4e0d\u662f\u4eba\u50cf, \u8138\u88ab\u906e\u4f4f, \u4eba\u7269\u592a\u8fdc\u7b49) *\u52a8\u7269\u7c7b\u5e94\u8be5\u6309\u6709\u8138\u6253\u6807", "required": true, "inserted_at": "2024-10-02T22:30:57.882535", "updated_at": "2024-10-02T22:30:57.882535", "type": "label_selection"}, {"id": "f7b95390-c907-4870-86cd-803a717abc52", "name": "five_category_rating", "settings": {"type": "label_selection", "options": [{"value": "best quality", "text": "best quality", "description": null}, {"value": "good quality", "text": "good quality", "description": null}, {"value": "average quality", "text": "average quality", "description": null}, {"value": "bad quality", "text": "bad quality", "description": null}, {"value": "worst quality", "text": "worst quality", "description": null}, {"value": "SKIP", "text": "SKIP", "description": null}], "visible_options": 6}, "title": "Overall Rating?", "description": "best quality: \u6311\u4e0d\u51fa\u6bdb\u75c5\u5168\u65b9\u4f4d\u7684\u597d\u56fe\n\ngood quality: \u6574\u4f53\u8fd8\u4e0d\u9519 \u770b\u8d77\u6765\u6709\u7f8e\u611f, \u4f46\u80fd\u770b\u51fa\u6709\u5c0f\u95ee\u9898\n\naverage quality: \u666e\u901a, \u6ca1\u6709\u592a\u591a\u7279\u70b9, \u4f46\u4e5f\u6ca1\u6709\u592a\u591a\u95ee\u9898\n\nbad quality: \u6709\u660e\u663e\u95ee\u9898\u7684\u56fe (\u4f46\u6ca1\u5230\u6050\u6016)\n\nworst quality: \u975e\u5e38\u574f\u6216\u8005\u8fbe\u5230\u4e86\u6050\u6016\u8c37\u7684\u56fe\n\nSKIP: Skip", "required": true, "inserted_at": "2024-10-02T22:30:58.005422", "updated_at": "2024-10-02T22:30:58.005422", "type": "label_selection"}], "fields": [{"id": "5cd4a13e-9c6f-4dda-9ca2-3949372155fd", "inserted_at": "2024-10-02T22:30:56.868763", "updated_at": "2024-10-02T22:30:56.868763", "name": "image_url", "settings": {"type": "image"}, "title": "image_url", "required": true, "description": null, "dataset_id": "9927910e-57b3-46c7-b4e3-28171ac1aeb0", "type": "image"}, {"id": "ec3309f3-a583-4842-876f-9836565e82dd", "inserted_at": "2024-10-02T22:30:56.997664", "updated_at": "2024-10-02T22:30:56.997664", "name": "s3_uri", "settings": {"type": "text", "use_markdown": false}, "title": "s3_uri", "required": true, "description": null, "dataset_id": "9927910e-57b3-46c7-b4e3-28171ac1aeb0", "type": "text"}], "vectors": [], "metadata": [], "allow_extra_metadata": false, "distribution": {"strategy": "overlap", "min_submitted": 1}, "mapping": null}
README.md CHANGED
@@ -1,73 +1,372 @@
1
  ---
2
- dataset_info:
3
- features:
4
- - name: id
5
- dtype: string
6
- - name: status
7
- dtype: string
8
- - name: _server_id
9
- dtype: string
10
- - name: image_url
11
- dtype: string
12
- - name: s3_uri
13
- dtype: string
14
- - name: composition_rating.responses
15
- sequence: string
16
- - name: composition_rating.responses.users
17
- sequence: string
18
- - name: composition_rating.responses.status
19
- sequence: string
20
- - name: lighting_rating.responses
21
- sequence: string
22
- - name: lighting_rating.responses.users
23
- sequence: string
24
- - name: lighting_rating.responses.status
25
- sequence: string
26
- - name: color_contrast_rating.responses
27
- sequence: string
28
- - name: color_contrast_rating.responses.users
29
- sequence: string
30
- - name: color_contrast_rating.responses.status
31
- sequence: string
32
- - name: subject_background_rating.responses
33
- sequence: string
34
- - name: subject_background_rating.responses.users
35
- sequence: string
36
- - name: subject_background_rating.responses.status
37
- sequence: string
38
- - name: detail_rating.responses
39
- sequence: string
40
- - name: detail_rating.responses.users
41
- sequence: string
42
- - name: detail_rating.responses.status
43
- sequence: string
44
- - name: expressive_rating.responses
45
- sequence: string
46
- - name: expressive_rating.responses.users
47
- sequence: string
48
- - name: expressive_rating.responses.status
49
- sequence: string
50
- - name: anatomy_rating.responses
51
- sequence: string
52
- - name: anatomy_rating.responses.users
53
- sequence: string
54
- - name: anatomy_rating.responses.status
55
- sequence: string
56
- - name: five_category_rating.responses
57
- sequence: string
58
- - name: five_category_rating.responses.users
59
- sequence: string
60
- - name: five_category_rating.responses.status
61
- sequence: string
62
- splits:
63
- - name: train
64
- num_bytes: 107614
65
- num_examples: 100
66
- download_size: 41851
67
- dataset_size: 107614
68
- configs:
69
- - config_name: default
70
- data_files:
71
- - split: train
72
- path: data/train-*
73
  ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ size_categories: n<1K
3
+ tags:
4
+ - rlfh
5
+ - argilla
6
+ - human-feedback
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  ---
8
+
9
+ # Dataset Card for test-anime-rating-v3-batch1
10
+
11
+
12
+
13
+
14
+
15
+
16
+
17
+ This dataset has been created with [Argilla](https://github.com/argilla-io/argilla). As shown in the sections below, this dataset can be loaded into your Argilla server as explained in [Load with Argilla](#load-with-argilla), or used directly with the `datasets` library in [Load with `datasets`](#load-with-datasets).
18
+
19
+
20
+ ## Using this dataset with Argilla
21
+
22
+ To load with Argilla, you'll just need to install Argilla as `pip install argilla --upgrade` and then use the following code:
23
+
24
+ ```python
25
+ import argilla as rg
26
+
27
+ ds = rg.Dataset.from_hub("trojblue/test-anime-rating-v3-batch1")
28
+ ```
29
+
30
+ This will load the settings and records from the dataset repository and push them to you Argilla server for exploration and annotation.
31
+
32
+ ## Using this dataset with `datasets`
33
+
34
+ To load the records of this dataset with `datasets`, you'll just need to install `datasets` as `pip install datasets --upgrade` and then use the following code:
35
+
36
+ ```python
37
+ from datasets import load_dataset
38
+
39
+ ds = load_dataset("trojblue/test-anime-rating-v3-batch1")
40
+ ```
41
+
42
+ This will only load the records of the dataset, but not the Argilla settings.
43
+
44
+ ## Dataset Structure
45
+
46
+ This dataset repo contains:
47
+
48
+ * Dataset records in a format compatible with HuggingFace `datasets`. These records will be loaded automatically when using `rg.Dataset.from_hub` and can be loaded independently using the `datasets` library via `load_dataset`.
49
+ * The [annotation guidelines](#annotation-guidelines) that have been used for building and curating the dataset, if they've been defined in Argilla.
50
+ * A dataset configuration folder conforming to the Argilla dataset format in `.argilla`.
51
+
52
+ The dataset is created in Argilla with: **fields**, **questions**, **suggestions**, **metadata**, **vectors**, and **guidelines**.
53
+
54
+ ### Fields
55
+
56
+ The **fields** are the features or text of a dataset's records. For example, the 'text' column of a text classification dataset of the 'prompt' column of an instruction following dataset.
57
+
58
+ | Field Name | Title | Type | Required | Markdown |
59
+ | ---------- | ----- | ---- | -------- | -------- |
60
+ | image_url | image_url | image | True | |
61
+ | s3_uri | s3_uri | text | True | False |
62
+
63
+
64
+ ### Questions
65
+
66
+ The **questions** are the questions that will be asked to the annotators. They can be of different types, such as rating, text, label_selection, multi_label_selection, or ranking.
67
+
68
+ | Question Name | Title | Type | Required | Description | Values/Labels |
69
+ | ------------- | ----- | ---- | -------- | ----------- | ------------- |
70
+ | composition_rating | Composition? (framing, item arrangements, flow of lines, etc) | label_selection | True | thoughtful composition: 在构图上有一定思考, 执行得当, 使用摄影/framing techniques产生了视觉效果 (比如拍摄角度, 背景和前景的搭配, 良好的空间关系, 2d构图等) | The image should adhere to certain principles of professional photography composition, including the 'Rule Of Thirds', 'Depth and Layering', and more. https://danbooru.donmai.us/posts/6743649
71
+
72
+ has composition: 存在一些有意识的构图搭配, 但有小问题 (不是没有特点)
73
+
74
+ average or none composition: 没有什么特点的普通站桩构图
75
+
76
+ bad composition: 存在诡异, 让人不舒服, 模型多半也学不会的构图
77
+
78
+ SKIP: 不好选择 | ['thoughtful composition', 'has composition', 'average or none composition', 'bad composition', 'SKIP'] |
79
+ | lighting_rating | Lighting? (PBR only; fake lighting doesn't count) | label_selection | True | correct dynamic lighting: 光影正确平衡, 衬托了画面 | balanced exposure that enhances the image, for example, lighting that originates from an angle, casting highlights on select areas of the background and subject(s). https://danbooru.donmai.us/posts/6802960
80
+
81
+ has lighting: 存在一些光影搭配, 但效果有小问题 (不是没有光影)
82
+
83
+ minimal or none lighting: 没有什么特点的光影选择 (接近平涂, 可以喷枪上过一些阴影, 但是光源不对/潦草/太简略)
84
+
85
+ bad lighting: 太亮/太暗/太极端, 画面过曝, etc | artificial or lackluster lighting, excessively dim or overexposed light
86
+
87
+ SKIP: 不好选择 | ['correct dynamic lighting', 'has lighting', 'minimal or none lighting', 'bad lighting', 'SKIP'] |
88
+ | color_contrast_rating | Color and Contrast? | label_selection | True | vibrant and popping: 在颜色和对比度上有一定思考, 执行得当, 产生了视觉效果 (比如*有意识的*冷暖对比, 色彩搭配, 撞色, 饱和度选择等) | We prefer images with vibrant colors and strong color contrast. We avoid monochromatic images or those where a single color dominates the entire frame.
89
+
90
+ harmonious but not special: 颜色和谐但不突出, contrast不够或者故意low contrast
91
+
92
+ average colors: 没有什么特点的普通颜色选择, 还原了角色原本的颜色
93
+
94
+ bad colors: 配色让人不舒服, 颜色脏乱, 单一颜色没有对比, 画面过于单调
95
+
96
+ SKIP: 不好选择 | ['vibrant and popping', 'harmonious but not special', 'average colors', 'bad colors', 'SKIP'] |
97
+ | subject_background_rating | Is the forground not obstructed or cutoff, and background not cluttered? (text is obstruction) | label_selection | True | yes: 焦点清晰, 画面清晰, 没有模糊 (或者有意识的模糊)
98
+
99
+ mostly yes: 大部分清晰, 但存在一些模糊, 或者有意识的模糊
100
+
101
+ no: 焦点普通, 画面偏模糊, 没有太多细节
102
+
103
+ SKIP: 不好选择 | ['yes', 'mostly yes', 'no', 'SKIP'] |
104
+ | detail_rating | Details? | label_selection | True | highly detailed: 高细节, 画面精细, 有一定的纹理 (或者有意识的简单清晰), 看起来很精致 | all critical details are clearly visible without compromise; the level of detail on the foreground subject is extremely important. https://x.com/SuperPig2046/status/1628983316811759616
105
+
106
+ some detail: 存在一些细节/texture的利用, 但完成度不够, 或者细节不够精细
107
+
108
+ average detail: 细节普通, 画面偏简单, 没有太多细节 (极简也算)
109
+
110
+ low detail: 细节不足, 画面粗糙
111
+
112
+ SKIP: 不好选择 | ['highly detailed', 'some detail', 'average detail', 'low detail', 'SKIP'] |
113
+ | expressive_rating | Expressiveness? | label_selection | True | expressive image: 线条流畅, 构造精致, 脸部自然, 动作生动等, 看起来很有表现力, 画面张力高 https://danbooru.donmai.us/posts/5844547
114
+
115
+ mostly expressive: 画面生动有一定张力, 但完成度太低或者存在错误 (比如推特的很多rakugaki / 随手画)
116
+
117
+ average expressiveness: 普通的表现力, 画面偏僵硬, 看起来不生动
118
+
119
+ stiff: 业余水准, 低于平均水平, 画面表现力不足或者诡异, 让人不舒服 https://danbooru.donmai.us/posts/7431687
120
+
121
+ SKIP: 不好选择 | ['expressive image', 'mostly expressive', 'average expressiveness', 'stiff', 'SKIP'] |
122
+ | anatomy_rating | Anatomy? | label_selection | True | good anatomy: 动态感好, 姿势自然流畅 https://danbooru.donmai.us/posts/5972188
123
+
124
+ mostly good: 总体比较好, 但为艺术效果做了夸张, 比如手太小太龇等 https://danbooru.donmai.us/posts/6908818
125
+
126
+ average anatomy: anatomy普通, 有点死板(标准站桩, 动作生硬等)
127
+
128
+ bad anatomy: 业余水准, anatomy扭曲, 存在夸张不合理的变形等, 不符合人体结构 https://danbooru.donmai.us/posts/5030231
129
+
130
+ SKIP: 跳过 (不是人像, 脸被遮住, 人物太远等) *动物类应该按有脸打标 | ['good anatomy', 'mostly good', 'average anatomy', 'bad anatomy', 'SKIP'] |
131
+ | five_category_rating | Overall Rating? | label_selection | True | best quality: 挑不出毛病全方位的好图
132
+
133
+ good quality: 整体还不错 看起来有美感, 但能看出有小问题
134
+
135
+ average quality: 普通, 没有太多特点, 但也没有太多问题
136
+
137
+ bad quality: 有明显问题的图 (但没到恐怖)
138
+
139
+ worst quality: 非常坏或者达到了恐怖谷的图
140
+
141
+ SKIP: Skip | ['best quality', 'good quality', 'average quality', 'bad quality', 'worst quality', 'SKIP'] |
142
+
143
+
144
+ <!-- check length of metadata properties -->
145
+
146
+
147
+
148
+
149
+
150
+ ### Data Instances
151
+
152
+ An example of a dataset instance in Argilla looks as follows:
153
+
154
+ ```json
155
+ {
156
+ "_server_id": "25779056-2f36-4042-a87c-1988d0e25321",
157
+ "fields": {
158
+ "image_url": "https://bucket-external.s3.amazonaws.com/dataset/dataset_qft/qft_v5c_twitter-logfav_9.6_60k/image_6567598.webp?AWSAccessKeyId=AKIAVVWUPRZ2AC6O2S3I\u0026Signature=M%2BVwsOUf7HOsOaqMRX%2Bl3f8ZyyQ%3D\u0026Expires=1727994782",
159
+ "s3_uri": "s3://bucket-external/dataset/dataset_qft/qft_v5c_twitter-logfav_9.6_60k/image_6567598.webp"
160
+ },
161
+ "id": "d0f58939-a0ab-45c5-a013-da54504c587a",
162
+ "metadata": {},
163
+ "responses": {
164
+ "anatomy_rating": [
165
+ {
166
+ "user_id": "6951ac60-243a-468a-b9fc-926520320d48",
167
+ "value": "good anatomy"
168
+ }
169
+ ],
170
+ "color_contrast_rating": [
171
+ {
172
+ "user_id": "6951ac60-243a-468a-b9fc-926520320d48",
173
+ "value": "vibrant and popping"
174
+ }
175
+ ],
176
+ "composition_rating": [
177
+ {
178
+ "user_id": "6951ac60-243a-468a-b9fc-926520320d48",
179
+ "value": "has composition"
180
+ }
181
+ ],
182
+ "detail_rating": [
183
+ {
184
+ "user_id": "6951ac60-243a-468a-b9fc-926520320d48",
185
+ "value": "highly detailed"
186
+ }
187
+ ],
188
+ "expressive_rating": [
189
+ {
190
+ "user_id": "6951ac60-243a-468a-b9fc-926520320d48",
191
+ "value": "expressive image"
192
+ }
193
+ ],
194
+ "five_category_rating": [
195
+ {
196
+ "user_id": "6951ac60-243a-468a-b9fc-926520320d48",
197
+ "value": "best quality"
198
+ }
199
+ ],
200
+ "lighting_rating": [
201
+ {
202
+ "user_id": "6951ac60-243a-468a-b9fc-926520320d48",
203
+ "value": "has lighting"
204
+ }
205
+ ],
206
+ "subject_background_rating": [
207
+ {
208
+ "user_id": "6951ac60-243a-468a-b9fc-926520320d48",
209
+ "value": "no"
210
+ }
211
+ ]
212
+ },
213
+ "status": "completed",
214
+ "suggestions": {},
215
+ "vectors": {}
216
+ }
217
+ ```
218
+
219
+ While the same record in HuggingFace `datasets` looks as follows:
220
+
221
+ ```json
222
+ {
223
+ "_server_id": "25779056-2f36-4042-a87c-1988d0e25321",
224
+ "anatomy_rating.responses": [
225
+ "good anatomy"
226
+ ],
227
+ "anatomy_rating.responses.status": [
228
+ "submitted"
229
+ ],
230
+ "anatomy_rating.responses.users": [
231
+ "6951ac60-243a-468a-b9fc-926520320d48"
232
+ ],
233
+ "color_contrast_rating.responses": [
234
+ "vibrant and popping"
235
+ ],
236
+ "color_contrast_rating.responses.status": [
237
+ "submitted"
238
+ ],
239
+ "color_contrast_rating.responses.users": [
240
+ "6951ac60-243a-468a-b9fc-926520320d48"
241
+ ],
242
+ "composition_rating.responses": [
243
+ "has composition"
244
+ ],
245
+ "composition_rating.responses.status": [
246
+ "submitted"
247
+ ],
248
+ "composition_rating.responses.users": [
249
+ "6951ac60-243a-468a-b9fc-926520320d48"
250
+ ],
251
+ "detail_rating.responses": [
252
+ "highly detailed"
253
+ ],
254
+ "detail_rating.responses.status": [
255
+ "submitted"
256
+ ],
257
+ "detail_rating.responses.users": [
258
+ "6951ac60-243a-468a-b9fc-926520320d48"
259
+ ],
260
+ "expressive_rating.responses": [
261
+ "expressive image"
262
+ ],
263
+ "expressive_rating.responses.status": [
264
+ "submitted"
265
+ ],
266
+ "expressive_rating.responses.users": [
267
+ "6951ac60-243a-468a-b9fc-926520320d48"
268
+ ],
269
+ "five_category_rating.responses": [
270
+ "best quality"
271
+ ],
272
+ "five_category_rating.responses.status": [
273
+ "submitted"
274
+ ],
275
+ "five_category_rating.responses.users": [
276
+ "6951ac60-243a-468a-b9fc-926520320d48"
277
+ ],
278
+ "id": "d0f58939-a0ab-45c5-a013-da54504c587a",
279
+ "image_url": "https://bucket-external.s3.amazonaws.com/dataset/dataset_qft/qft_v5c_twitter-logfav_9.6_60k/image_6567598.webp?AWSAccessKeyId=AKIAVVWUPRZ2AC6O2S3I\u0026Signature=M%2BVwsOUf7HOsOaqMRX%2Bl3f8ZyyQ%3D\u0026Expires=1727994782",
280
+ "lighting_rating.responses": [
281
+ "has lighting"
282
+ ],
283
+ "lighting_rating.responses.status": [
284
+ "submitted"
285
+ ],
286
+ "lighting_rating.responses.users": [
287
+ "6951ac60-243a-468a-b9fc-926520320d48"
288
+ ],
289
+ "s3_uri": "s3://bucket-external/dataset/dataset_qft/qft_v5c_twitter-logfav_9.6_60k/image_6567598.webp",
290
+ "status": "completed",
291
+ "subject_background_rating.responses": [
292
+ "no"
293
+ ],
294
+ "subject_background_rating.responses.status": [
295
+ "submitted"
296
+ ],
297
+ "subject_background_rating.responses.users": [
298
+ "6951ac60-243a-468a-b9fc-926520320d48"
299
+ ]
300
+ }
301
+ ```
302
+
303
+
304
+ ### Data Splits
305
+
306
+ The dataset contains a single split, which is `train`.
307
+
308
+ ## Dataset Creation
309
+
310
+ ### Curation Rationale
311
+
312
+ [More Information Needed]
313
+
314
+ ### Source Data
315
+
316
+ #### Initial Data Collection and Normalization
317
+
318
+ [More Information Needed]
319
+
320
+ #### Who are the source language producers?
321
+
322
+ [More Information Needed]
323
+
324
+ ### Annotations
325
+
326
+ #### Annotation guidelines
327
+
328
+ Classify the Images according to the rules.
329
+
330
+ #### Annotation process
331
+
332
+ [More Information Needed]
333
+
334
+ #### Who are the annotators?
335
+
336
+ [More Information Needed]
337
+
338
+ ### Personal and Sensitive Information
339
+
340
+ [More Information Needed]
341
+
342
+ ## Considerations for Using the Data
343
+
344
+ ### Social Impact of Dataset
345
+
346
+ [More Information Needed]
347
+
348
+ ### Discussion of Biases
349
+
350
+ [More Information Needed]
351
+
352
+ ### Other Known Limitations
353
+
354
+ [More Information Needed]
355
+
356
+ ## Additional Information
357
+
358
+ ### Dataset Curators
359
+
360
+ [More Information Needed]
361
+
362
+ ### Licensing Information
363
+
364
+ [More Information Needed]
365
+
366
+ ### Citation Information
367
+
368
+ [More Information Needed]
369
+
370
+ ### Contributions
371
+
372
+ [More Information Needed]