|
- import os
- import glob
- import argparse
- import subprocess
- import face_alignment
- import numpy as np
-
-
- parser = argparse.ArgumentParser()
-
- parser.add_argument('-n', '--num', type=int, default=-1,
- help="Number of samples to use, `-1` means all")
-
- args = parser.parse_args()
-
- if os.path.exists('/tmp/code'):
- proj_name = os.listdir("/tmp/code/")[0]
- subprocess.run(["ln", "-sf", "/tmp/code/" + proj_name, "/code"])
- if os.path.exists('/tmp/dataset'):
- subprocess.run(["ln", "-sf", "/tmp/dataset", "/dataset"])
- if os.path.exists('/tmp/output'):
- subprocess.run(["ln", "-sf", "/tmp/output", "/model/task_out"])
- subprocess.run(["mkdir", "-p", "/model/task_out"])
- subprocess.run(["mkdir", "-p", "/model/task_out/FFHQ_lmks"])
-
-
- sfd_path = glob.glob('/dataset/**/s3fd-619a316812.pth', recursive=True)[0]
- cmd = f"cp {sfd_path} /code/face_alignment/detection/sfd/"
- print(cmd)
- subprocess.run(cmd.split(" "))
- fan_path = glob.glob('/dataset/**/2DFAN4-cd938726ad.zip', recursive=True)[0]
- cmd = f"cp {fan_path} /code/face_alignment/"
- print(cmd)
- subprocess.run(cmd.split(" "))
-
- ffhq_paths = sorted(glob.glob('/dataset/images256x256/**/*.png', recursive=True))
- if args.num == -1:
- args.num = len(ffhq_paths)
- ffhq_paths = ffhq_paths[:args.num]
- print('Datas size:', len(ffhq_paths))
-
- fan = face_alignment.FaceAlignment(face_alignment.LandmarksType._2D, flip_input=False)
-
- for i, path in enumerate(ffhq_paths):
- print(f'[{i+1}/{len(ffhq_paths)}] Extract landmarks from {path}')
- lmks = fan.get_landmarks_from_image(path)
- print(f'{len(lmks)} faces detected from {path}')
- selected_face_idx = np.abs(np.stack(lmks, 0)[:,33] - 127.5).mean(-1).argmin(-1)
- print(f'Select the {int(selected_face_idx)+1}th face as output landmarks face')
- lmk = lmks[int(selected_face_idx)]
- save_path = os.path.join("/model/task_out/FFHQ_lmks", os.path.basename(path).split('.')[0] + '.npy')
- np.save(save_path, lmk)
-
- os.chdir("/model/task_out/")
- os.system("tar cfz FFHQ_lmks.tar.gz FFHQ_lmks/*")
|