ZOD-Mini-2D-Road-Scenes / extract_yolo_seg_lane_marking_dataset.py
yasintuncerr's picture
automatically export yolo seg dataset from base dataset files
c3262a7
raw
history blame
5.38 kB
import os
import json
import random
import argparse
import shutil
from tqdm import tqdm
import yaml
import utils
from safe_executor import SafeExecutor
class_mapping = {
"lm_dashed": 1,
"lm_solid": 0,
"lm_botts_dot": 0, # Treating as lm_solid
"lm_shaded": 0 # Treating as lm_solid
}
def extract_base_dataset(from_res):
os.system(f"python extract_base_dataset.py --from_res {from_res}")
def remove_cache_dir(cache_dir):
if os.path.exists(cache_dir):
shutil.rmtree(cache_dir)
def create_cache_dir(cache_dir):
utils.check_and_create_dir(cache_dir)
def load_annotations(file):
with open(file) as f:
return json.load(f)
def convert_and_save_annotations(annotated_files, cache_dir, from_res):
width, height = map(int, from_res.split('x'))
for file in tqdm(annotated_files, desc="Converting and saving annotations"):
base_name = os.path.basename(file)
output_file_path = os.path.join(cache_dir, f'{base_name}.txt')
lane_annotations_path = os.path.join(file, "annotations", "lane_markings.json")
try:
lane_annotations = load_annotations(lane_annotations_path)
except FileNotFoundError:
with open(output_file_path, 'w') as f:
f.write("")
continue
yolo_annotations = utils.convert_lane_annotations_to_yolo_seg_format(lane_annotations, class_mapping, width, height)
with open(output_file_path, 'w') as f:
if yolo_annotations:
for line in yolo_annotations:
f.write(f"{line}\n")
else:
# Create empty file if no annotations
f.write("")
def split_files(list_of_files, train_split=0.8):
random.shuffle(list_of_files)
split_index = int(len(list_of_files) * train_split)
return list_of_files[:split_index], list_of_files[split_index:]
def prepare_yolo_dataset(train_files, val_files, from_res):
dataset_dir = os.path.join(utils.ROOT_DIR, "dataset", f"yolo_seg_lane_{from_res}")
train_dir = os.path.join(dataset_dir, "train")
val_dir = os.path.join(dataset_dir, "val")
if os.path.exists(dataset_dir):
user_input = input(f"The dataset directory {dataset_dir} already exists. Do you want to remove it? (y/n): ")
if user_input.lower() == 'y':
shutil.rmtree(dataset_dir)
else:
print("Exiting without making changes.")
return
utils.check_and_create_dir(train_dir)
utils.check_and_create_dir(val_dir)
for file in tqdm(train_files, desc="Preparing YOLO train dataset"):
base_name = os.path.splitext(os.path.basename(file))[0]
image_file = os.path.join(utils.ROOT_DIR, "dataset", f'{from_res}_images', f'{base_name}.jpg')
if os.path.exists(image_file):
shutil.copy(os.path.join(utils.ROOT_DIR, '.cache', f'{from_res}_annotations', file), train_dir)
shutil.copy(image_file, train_dir)
for file in tqdm(val_files, desc="Preparing YOLO val dataset"):
base_name = os.path.splitext(os.path.basename(file))[0]
image_file = os.path.join(utils.ROOT_DIR, "dataset", f'{from_res}_images', f'{base_name}.jpg')
if os.path.exists(image_file):
shutil.copy(os.path.join(utils.ROOT_DIR, '.cache', f'{from_res}_annotations', file), val_dir)
shutil.copy(image_file, val_dir)
create_yaml_file(dataset_dir, train_dir, val_dir)
def create_yaml_file(dataset_dir, train_dir, val_dir):
yaml_content = {
'path': dataset_dir,
'train': 'train', # relative to 'path'
'val': 'val', # relative to 'path'
'names': {
0: 'lm_solid',
1: 'lm_dashed',
}
}
yaml_file_path = os.path.join(dataset_dir, 'dataset.yaml')
with open(yaml_file_path, 'w') as yaml_file:
yaml.dump(yaml_content, yaml_file, default_flow_style=False)
def main():
parser = argparse.ArgumentParser()
supported_resolutions = utils.get_supported_resolutions()
str_supported_resolutions = ', '.join(supported_resolutions)
parser.add_argument('--from_res', type=str, help=f'Choose available dataset: {str_supported_resolutions}', required=True)
parser.add_argument('--cache_enabled', type=bool, help='Enable caching', default=False)
args = parser.parse_args()
if args.from_res not in supported_resolutions:
print(f"Unsupported resolution. Supported resolutions are: {str_supported_resolutions}")
exit(1)
extract_base_dataset(args.from_res)
annotated_files = utils.get_annotated_files_list()
cache_dir = os.path.join(utils.ROOT_DIR, ".cache", f"{args.from_res}_annotations")
if not args.cache_enabled:
remove_cache_dir(cache_dir)
create_cache_dir(cache_dir)
paths_to_cleanup = [cache_dir, os.path.join(utils.ROOT_DIR, "dataset", f"yolo_seg_lane_{args.from_res}")]
with SafeExecutor(paths_to_cleanup):
convert_and_save_annotations(annotated_files, cache_dir, args.from_res)
list_of_files = os.listdir(cache_dir)
train_files, val_files = split_files(list_of_files)
prepare_yolo_dataset(train_files, val_files, args.from_res)
print("Annotations extracted and YOLO dataset prepared successfully")
if __name__ == "__main__":
main()