Upload folder using huggingface_hub
Browse files- coleaf_dataset.py +88 -0
coleaf_dataset.py
ADDED
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from PIL import Image
|
3 |
+
from datasets import Dataset
|
4 |
+
from huggingface_hub import HfApi
|
5 |
+
|
6 |
+
# Set the path to the CoLeaf dataset directory
|
7 |
+
coleaf_dir = "/home/goya/CV_Plant_Disease/Datasets/CoLeaf_dataset"
|
8 |
+
|
9 |
+
# Set the name and description for the Huggingface dataset
|
10 |
+
dataset_name = "bhugxer/CoLeafLabels"
|
11 |
+
dataset_description = "CoLeaf dataset for fine-tuning Stable Diffusion"
|
12 |
+
|
13 |
+
# Create lists to store the image paths and labels
|
14 |
+
image_paths = []
|
15 |
+
labels = []
|
16 |
+
|
17 |
+
api_token = os.environ.get("HUGGINGFACE_API_TOKEN")
|
18 |
+
|
19 |
+
# Iterate over the CoLeaf dataset directory
|
20 |
+
for label in os.listdir(coleaf_dir):
|
21 |
+
label_dir = os.path.join(coleaf_dir, label)
|
22 |
+
if os.path.isdir(label_dir):
|
23 |
+
for image_file in os.listdir(label_dir):
|
24 |
+
image_path = os.path.join(label_dir, image_file)
|
25 |
+
image_paths.append(image_path)
|
26 |
+
labels.append(label)
|
27 |
+
|
28 |
+
# Create a Huggingface dataset
|
29 |
+
dataset = Dataset.from_dict({"image_path": image_paths, "label": labels})
|
30 |
+
|
31 |
+
# Define the image loading function
|
32 |
+
def load_image(example):
|
33 |
+
with open(example["image_path"], "rb") as f:
|
34 |
+
image_data = f.read()
|
35 |
+
return {"image": image_data}
|
36 |
+
|
37 |
+
# Apply the image loading function to the dataset
|
38 |
+
dataset = dataset.map(load_image, batched=False, num_proc=4)
|
39 |
+
|
40 |
+
# Remove the "image_path" column from the dataset
|
41 |
+
dataset = dataset.remove_columns("image_path")
|
42 |
+
|
43 |
+
# Push the dataset to the Huggingface Hub
|
44 |
+
api = HfApi(token=api_token)
|
45 |
+
api.upload_folder(
|
46 |
+
folder_path=".",
|
47 |
+
repo_id=dataset_name,
|
48 |
+
repo_type="dataset",
|
49 |
+
ignore_patterns=["**/.*", "**/__pycache__"],
|
50 |
+
use_auth_token=api_token,
|
51 |
+
)
|
52 |
+
|
53 |
+
# Create a dataset card
|
54 |
+
dataset_card = f"""
|
55 |
+
# {dataset_name}
|
56 |
+
|
57 |
+
{dataset_description}
|
58 |
+
|
59 |
+
## Dataset Structure
|
60 |
+
|
61 |
+
- `image`: The image data.
|
62 |
+
- `label`: The label or text description of the image.
|
63 |
+
|
64 |
+
## Dataset Info
|
65 |
+
|
66 |
+
- Number of examples: {len(dataset)}
|
67 |
+
- Image format: Various (PNG, JPEG, etc.)
|
68 |
+
|
69 |
+
## License
|
70 |
+
|
71 |
+
[Insert license information here]
|
72 |
+
|
73 |
+
## Citation
|
74 |
+
|
75 |
+
[Insert citation information here]
|
76 |
+
"""
|
77 |
+
|
78 |
+
# Push the dataset card to the Huggingface Hub
|
79 |
+
with open("README.md", "w") as f:
|
80 |
+
f.write(dataset_card)
|
81 |
+
|
82 |
+
api.upload_file(
|
83 |
+
path_or_fileobj="README.md",
|
84 |
+
path_in_repo="README.md",
|
85 |
+
repo_id=dataset_name,
|
86 |
+
repo_type="dataset",
|
87 |
+
use_auth_token=api_token,
|
88 |
+
)
|