Spaces:
Runtime error
Runtime error
File size: 2,031 Bytes
3a0062c f4ab1d8 3a0062c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 |
import albumentations as A
import cv2
from albumentations.pytorch import ToTensorV2
from .config import IMAGE_SIZE, scale
# train_transforms = A.Compose(
# [
# A.LongestMaxSize(max_size=int(IMAGE_SIZE * scale)),
# A.PadIfNeeded(
# min_height=int(IMAGE_SIZE * scale),
# min_width=int(IMAGE_SIZE * scale),
# border_mode=cv2.BORDER_CONSTANT,
# ),
# A.Rotate(limit=10, interpolation=1, border_mode=4),
# A.RandomCrop(width=IMAGE_SIZE, height=IMAGE_SIZE),
# A.ColorJitter(brightness=0.6, contrast=0.6, saturation=0.6, hue=0.6, p=0.4),
# A.OneOf(
# [
# A.ShiftScaleRotate(
# rotate_limit=20, p=0.5, border_mode=cv2.BORDER_CONSTANT
# ),
# # A.Affine(shear=15, p=0.5, mode="constant"),
# ],
# p=1.0,
# ),
# A.HorizontalFlip(p=0.5),
# A.Blur(p=0.1),
# A.CLAHE(p=0.1),
# A.Posterize(p=0.1),
# A.ToGray(p=0.1),
# A.ChannelShuffle(p=0.05),
# A.Normalize(
# mean=[0, 0, 0],
# std=[1, 1, 1],
# max_pixel_value=255,
# ),
# ToTensorV2(),
# ],
# bbox_params=A.BboxParams(
# format="yolo",
# min_visibility=0.4,
# label_fields=[],
# ),
# )
test_transforms = A.Compose(
[
A.LongestMaxSize(max_size=IMAGE_SIZE),
A.PadIfNeeded(
min_height=IMAGE_SIZE, min_width=IMAGE_SIZE, border_mode=cv2.BORDER_CONSTANT
),
A.Normalize(
mean=[0, 0, 0],
std=[1, 1, 1],
max_pixel_value=255,
),
ToTensorV2(),
]
# bbox_params=A.BboxParams(format="yolo", min_visibility=0.4, label_fields=[]),
)
resize_transforms = A.Compose(
[
A.LongestMaxSize(max_size=IMAGE_SIZE),
A.PadIfNeeded(
min_height=IMAGE_SIZE, min_width=IMAGE_SIZE, border_mode=cv2.BORDER_CONSTANT
),
]
) |