Spaces:
Sleeping
Sleeping
File size: 3,509 Bytes
899c526 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 |
import numpy as np
import torch
import glob
import cv2
import os
import os.path as osp
from ..lietorch import SE3
from .base import RGBDDataset
# cur_path = osp.dirname(osp.abspath(__file__))
# test_split = osp.join(cur_path, 'tartan_test.txt')
# test_split = open(test_split).read().split()
test_split = [
"abandonedfactory/abandonedfactory/Easy/P011",
"abandonedfactory/abandonedfactory/Hard/P011",
"abandonedfactory_night/abandonedfactory_night/Easy/P013",
"abandonedfactory_night/abandonedfactory_night/Hard/P014",
"amusement/amusement/Easy/P008",
"amusement/amusement/Hard/P007",
"carwelding/carwelding/Easy/P007",
"endofworld/endofworld/Easy/P009",
"gascola/gascola/Easy/P008",
"gascola/gascola/Hard/P009",
"hospital/hospital/Easy/P036",
"hospital/hospital/Hard/P049",
"japanesealley/japanesealley/Easy/P007",
"japanesealley/japanesealley/Hard/P005",
"neighborhood/neighborhood/Easy/P021",
"neighborhood/neighborhood/Hard/P017",
"ocean/ocean/Easy/P013",
"ocean/ocean/Hard/P009",
"office2/office2/Easy/P011",
"office2/office2/Hard/P010",
"office/office/Hard/P007",
"oldtown/oldtown/Easy/P007",
"oldtown/oldtown/Hard/P008",
"seasidetown/seasidetown/Easy/P009",
"seasonsforest/seasonsforest/Easy/P011",
"seasonsforest/seasonsforest/Hard/P006",
"seasonsforest_winter/seasonsforest_winter/Easy/P009",
"seasonsforest_winter/seasonsforest_winter/Hard/P018",
"soulcity/soulcity/Easy/P012",
"soulcity/soulcity/Hard/P009",
"westerndesert/westerndesert/Easy/P013",
"westerndesert/westerndesert/Hard/P007",
]
class TartanAir(RGBDDataset):
# scale depths to balance rot & trans
DEPTH_SCALE = 5.0
def __init__(self, mode='training', **kwargs):
self.mode = mode
self.n_frames = 2
super(TartanAir, self).__init__(name='TartanAir', **kwargs)
@staticmethod
def is_test_scene(scene):
# print(scene, any(x in scene for x in test_split))
return any(x in scene for x in test_split)
def _build_dataset(self):
from tqdm import tqdm
print("Building TartanAir dataset")
scene_info = {}
scenes = glob.glob(osp.join(self.root, '*/*/*/*'))
for scene in tqdm(sorted(scenes)):
images = sorted(glob.glob(osp.join(scene, 'image_left/*.png')))
depths = sorted(glob.glob(osp.join(scene, 'depth_left/*.npy')))
if len(images) != len(depths):
continue
poses = np.loadtxt(osp.join(scene, 'pose_left.txt'), delimiter=' ')
poses = poses[:, [1, 2, 0, 4, 5, 3, 6]]
poses[:,:3] /= TartanAir.DEPTH_SCALE
intrinsics = [TartanAir.calib_read()] * len(images)
# graph of co-visible frames based on flow
graph = self.build_frame_graph(poses, depths, intrinsics)
scene = '/'.join(scene.split('/'))
scene_info[scene] = {'images': images, 'depths': depths,
'poses': poses, 'intrinsics': intrinsics, 'graph': graph}
return scene_info
@staticmethod
def calib_read():
return np.array([320.0, 320.0, 320.0, 240.0])
@staticmethod
def image_read(image_file):
return cv2.imread(image_file)
@staticmethod
def depth_read(depth_file):
depth = np.load(depth_file) / TartanAir.DEPTH_SCALE
depth[depth==np.nan] = 1.0
depth[depth==np.inf] = 1.0
return depth
|