|
|
|
|
|
import numpy as np |
|
import json |
|
import os |
|
import numpy as np |
|
from PIL import Image |
|
from multiprocessing import Pool, cpu_count |
|
from urllib.parse import unquote |
|
from datetime import datetime |
|
import pandas as pd |
|
import os |
|
import tempfile |
|
import argparse |
|
import glob |
|
|
|
class InputStream: |
|
def __init__(self, data): |
|
self.data = data |
|
self.i = 0 |
|
|
|
def read(self, size): |
|
out = self.data[self.i : self.i + size] |
|
self.i += size |
|
return int(out, 2) |
|
|
|
|
|
def access_bit(data, num): |
|
"""from bytes array to bits by num position""" |
|
base = int(num // 8) |
|
shift = 7 - int(num % 8) |
|
return (data[base] & (1 << shift)) >> shift |
|
|
|
|
|
def bytes2bit(data): |
|
"""get bit string from bytes data""" |
|
return ''.join([str(access_bit(data, i)) for i in range(len(data) * 8)]) |
|
|
|
|
|
def decode_rle(rle, print_params: bool = False): |
|
"""from LS RLE to numpy uint8 3d image [width, height, channel] |
|
|
|
Args: |
|
print_params (bool, optional): If true, a RLE parameters print statement is suppressed |
|
""" |
|
input = InputStream(bytes2bit(rle)) |
|
num = input.read(32) |
|
word_size = input.read(5) + 1 |
|
rle_sizes = [input.read(4) + 1 for _ in range(4)] |
|
|
|
if print_params: |
|
print( |
|
'RLE params:', num, 'values', word_size, 'word_size', rle_sizes, 'rle_sizes' |
|
) |
|
|
|
i = 0 |
|
out = np.zeros(num, dtype=np.uint8) |
|
while i < num: |
|
x = input.read(1) |
|
j = i + 1 + input.read(rle_sizes[input.read(2)]) |
|
if x: |
|
val = input.read(word_size) |
|
out[i:j] = val |
|
i = j |
|
else: |
|
while i < j: |
|
val = input.read(word_size) |
|
out[i] = val |
|
i += 1 |
|
return out |
|
|
|
def log(message): |
|
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S') |
|
print(f"[{timestamp}] {message}") |
|
|
|
def save_image(mask_image: Image.Image, save_path: str): |
|
mask_image.save(save_path, format='PNG') |
|
log(f'Saved mask: {save_path}') |
|
|
|
def process_files_in_parallel(files_to_process, masks_save_directory, source_files): |
|
with Pool(processes=cpu_count()//2) as pool: |
|
results = pool.starmap(process_file, [(file, masks_save_directory, source_files) for file in files_to_process]) |
|
|
|
return [e for r in results for e in r] |
|
|
|
|
|
def process_file(file_path, masks_save_directory, source_files): |
|
log(f"Opening file: {file_path}") |
|
total_metadata = [] |
|
try: |
|
with open(file_path, 'r') as file: |
|
data = json.load(file) |
|
except Exception as e: |
|
log(f'Error reading file {file_path}: {e}') |
|
return total_metadata |
|
|
|
image_name = data['task']['data']['image'].split('/')[-1] |
|
|
|
if image_name not in source_files: |
|
log(f"Requested file {image_name} does not exist in source data!") |
|
return total_metadata |
|
|
|
image_name_prefix = unquote(image_name.rsplit('.', 1)[0]) |
|
log(f"Processing image: {image_name_prefix}") |
|
label_counts = {} |
|
|
|
for result in data['result']: |
|
if 'rle' not in result['value']: |
|
log(f"No 'rle' key found in result: {result.get('id', 'Unknown ID')}") |
|
continue |
|
|
|
rle_data = result['value']['rle'] |
|
rle_bytes = bytes.fromhex(''.join(format(x, '02x') for x in rle_data)) |
|
mask = decode_rle(rle_bytes) |
|
|
|
original_height = result['original_height'] |
|
original_width = result['original_width'] |
|
mask = mask.reshape((original_height, original_width, 4)) |
|
|
|
alpha_channel = mask[:, :, 3] |
|
mask_image = np.zeros((original_height, original_width, 3), dtype=np.uint8) |
|
mask_image[alpha_channel == 255] = [255, 255, 255] |
|
|
|
if 'brushlabels' in result['value']: |
|
for label in result['value']['brushlabels']: |
|
|
|
label_counts[label] = label_counts.get(label, 0) + 1 |
|
save_path = os.path.join(masks_save_directory, f"{image_name_prefix}-{label}-{label_counts[label]}.png") |
|
save_image(Image.fromarray(mask_image).convert('L'), save_path) |
|
metadata = { |
|
"original_height": result['original_height'], |
|
"original_width": result['original_width'], |
|
"image": os.path.join('sourcedata/labeled/', os.path.basename(data['task']['data']['image'])), |
|
"score": result['score'] if 'score' in result.keys() else 0, |
|
"mask": save_path, |
|
"class": label, |
|
} |
|
total_metadata.append(metadata) |
|
|
|
return total_metadata |
|
|
|
def merge_file_masks(mask_info, target_mask_dir, label2id, img): |
|
final_mask = np.zeros( |
|
np.asarray(Image.open(mask_info['mask'].iloc[0])).shape, dtype=np.uint8) |
|
for i, r in mask_info.iterrows(): |
|
mask = np.asarray(Image.open(r['mask'])) |
|
final_mask = np.where(mask == 0, final_mask, label2id[r['class']]) |
|
|
|
mask_path = os.path.join(target_mask_dir, f"{os.path.basename(img).split('.')[0]}_mask.png") |
|
Image.fromarray(final_mask).convert('L').save(mask_path, format='PNG') |
|
return { |
|
'mask': mask_path, |
|
'image': img, |
|
'original_height': r['original_height'], |
|
'original_width': r['original_width'] |
|
} |
|
|
|
def merge_masks(mask_metadata, target_mask_dir, label2id): |
|
new_metadata = [] |
|
imgs = [ |
|
( |
|
mask_metadata[mask_metadata['image'] == img], |
|
target_mask_dir, |
|
label2id, |
|
img |
|
) for img in mask_metadata['image'].unique()] |
|
|
|
|
|
with Pool(processes=cpu_count()//2) as pool: |
|
new_metadata = pool.starmap(merge_file_masks, imgs) |
|
|
|
return new_metadata |
|
|
|
|
|
def main(): |
|
parser = argparse.ArgumentParser('maskconvert') |
|
parser.add_argument('dataset_root') |
|
|
|
arguments = parser.parse_args() |
|
annotations_folder_path = os.path.join(arguments.dataset_root, 'labels_raw') |
|
tmp_mask_path = tempfile.mkdtemp('masks') |
|
|
|
files_to_process = glob.glob(f"{annotations_folder_path}/*") |
|
|
|
|
|
source_files = [os.path.basename(name) for name in glob.glob(f"sourcedata/**/*.jpg")] |
|
|
|
metadata = pd.DataFrame(process_files_in_parallel(files_to_process, tmp_mask_path, source_files)) |
|
|
|
id2label = {int(k): v for k, v in enumerate(['void', 'Fruit', 'Leaf', 'Flower', 'Stem'])} |
|
|
|
label2id = {v: k for k, v in id2label.items()} |
|
|
|
result = merge_masks(metadata, os.path.join(arguments.dataset_root, 'semantic_masks'), label2id) |
|
result = pd.DataFrame(result).drop_duplicates() |
|
result.to_csv( |
|
os.path.join(arguments.dataset_root, 'semantic_metadata.csv'), |
|
index=False) |
|
|
|
|
|
if __name__ == '__main__': |
|
main() |