-
-
Notifications
You must be signed in to change notification settings - Fork 907
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
common: add Python utility for reading DMAP files (#1059)
- Loading branch information
4CJ7T
authored
Sep 21, 2023
1 parent
91c9c2c
commit c1b1cc9
Showing
5 changed files
with
109 additions
and
0 deletions.
There are no files selected for viewing
File renamed without changes.
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,42 @@ | ||
''' | ||
Example usage of MvsUtils.py for exporting dmap file content. | ||
usage: MvsReadDMAP.py [-h] [--input INPUT] [--output OUTPUT] | ||
''' | ||
|
||
from argparse import ArgumentParser | ||
from concurrent.futures import ProcessPoolExecutor | ||
from glob import glob | ||
from MvsUtils import loadDMAP | ||
import numpy as np | ||
import os | ||
import pyvips | ||
|
||
def exportDMAPContent(dmap_path): | ||
dmap = loadDMAP(dmap_path) | ||
|
||
basename = os.path.splitext(os.path.basename(dmap['file_name']))[0] | ||
|
||
pyvips.Image.new_from_array(np.uint8(dmap['depth_map'] * (1 / dmap['depth_max']) * 255)).write_to_file('%s_depth_map.png' % basename) | ||
if dmap['has_normal']: | ||
pyvips.Image.new_from_array(np.uint8((dmap['normal_map'] @ -dmap['R'] + 1) * 0.5 * 255)).write_to_file('%s_normal_map.png' % basename) | ||
if dmap['has_conf']: | ||
pyvips.Image.new_from_array(np.uint8(dmap['confidence_map'] * (1 / dmap['confidence_map'].max()) * 255)).write_to_file('%s_confidence_map.png' % basename) | ||
|
||
def main(): | ||
parser = ArgumentParser() | ||
parser.add_argument('-i', '--input', type=str, required=True, help='path to the depth map directory') | ||
parser.add_argument('-t', '--threads', type=int, default=int(os.cpu_count() * 0.5) - 1, help='number of parallel computations') | ||
parser.add_argument('-o', '--output', type=str, required=True, help='path to the output directory') | ||
args = parser.parse_args() | ||
|
||
dmap_paths = glob(os.path.join(args.input, '*.dmap')) | ||
|
||
os.makedirs(args.output, exist_ok = True) | ||
os.chdir(args.output) | ||
|
||
with ProcessPoolExecutor(max_workers=args.threads) as executor: | ||
executor.map(exportDMAPContent, dmap_paths) | ||
|
||
if __name__ == '__main__': | ||
main() |
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
''' | ||
OpenMVS python utilities. | ||
E.g., from MvsUtils import loadDMAP | ||
''' | ||
|
||
import numpy as np | ||
|
||
def loadDMAP(dmap_path): | ||
with open(dmap_path, 'rb') as dmap: | ||
file_type = dmap.read(2).decode() | ||
content_type = np.frombuffer(dmap.read(1), dtype=np.dtype('B')) | ||
reserve = np.frombuffer(dmap.read(1), dtype=np.dtype('B')) | ||
|
||
has_depth = content_type > 0 | ||
has_normal = content_type in [3, 7, 11, 15] | ||
has_conf = content_type in [5, 7, 13, 15] | ||
has_views = content_type in [9, 11, 13, 15] | ||
|
||
image_width, image_height = np.frombuffer(dmap.read(8), dtype=np.dtype('I')) | ||
depth_width, depth_height = np.frombuffer(dmap.read(8), dtype=np.dtype('I')) | ||
|
||
if (file_type != 'DR' or has_depth == False or depth_width <= 0 or depth_height <= 0 or image_width < depth_width or image_height < depth_height): | ||
print('error: opening file \'%s\' for reading depth-data' % dmap_path) | ||
return None | ||
|
||
depth_min, depth_max = np.frombuffer(dmap.read(8), dtype=np.dtype('f')) | ||
|
||
file_name_length = np.frombuffer(dmap.read(2), dtype=np.dtype('H'))[0] | ||
file_name = dmap.read(file_name_length).decode() | ||
|
||
view_ids_length = np.frombuffer(dmap.read(4), dtype=np.dtype('I'))[0] | ||
reference_view_id, *neighbor_view_ids = np.frombuffer(dmap.read(4 * view_ids_length), dtype=np.dtype('I')) | ||
|
||
K = np.frombuffer(dmap.read(72), dtype=np.dtype('d')).reshape(3, 3) | ||
R = np.frombuffer(dmap.read(72), dtype=np.dtype('d')).reshape(3, 3) | ||
C = np.frombuffer(dmap.read(24), dtype=np.dtype('d')) | ||
|
||
depth_length = depth_width * depth_height | ||
depth_map = np.frombuffer(dmap.read(4 * depth_length), dtype=np.dtype('f')).reshape(depth_height, depth_width) | ||
normal_map = np.frombuffer(dmap.read(4 * depth_length * 3), dtype=np.dtype('f')).reshape(depth_height, depth_width, 3) if has_normal else np.asarray([]) | ||
confidence_map = np.frombuffer(dmap.read(4 * depth_length), dtype=np.dtype('f')).reshape(depth_height, depth_width) if has_conf else np.asarray([]) | ||
views_map = np.frombuffer(dmap.read(depth_length * 4), dtype=np.dtype('B')).reshape(depth_height, depth_width, 4) if has_views else np.asarray([]) | ||
|
||
data = { | ||
'has_normal': has_normal, | ||
'has_conf': has_conf, | ||
'has_views': has_views, | ||
'image_width': image_width, | ||
'image_height': image_height, | ||
'depth_width': depth_width, | ||
'depth_height': depth_height, | ||
'depth_min': depth_min, | ||
'depth_max': depth_max, | ||
'file_name': file_name, | ||
'reference_view_id': reference_view_id, | ||
'neighbor_view_ids': neighbor_view_ids, | ||
'K': K, | ||
'R': R, | ||
'C': C, | ||
'depth_map': depth_map, | ||
'normal_map': normal_map, | ||
'confidence_map': confidence_map, | ||
'views_map': views_map | ||
} | ||
|
||
return data |