Skip to content
This repository was archived by the owner on Nov 13, 2024. It is now read-only.

Commit f3b4658

Browse files
committed
fixes
1 parent 2300da4 commit f3b4658

File tree

3 files changed

+14
-14
lines changed

3 files changed

+14
-14
lines changed

core/imagelib/warp.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,8 @@ def gen_warp_params (w, flip, rotation_range=[-10,10], scale_range=[-0.5, 0.5],
2525

2626
half_cell_size = cell_size // 2
2727

28-
mapx = cv2.resize(mapx, (w+cell_size,)*2 )[half_cell_size:-half_cell_size-1,half_cell_size:-half_cell_size-1].astype(np.float32)
29-
mapy = cv2.resize(mapy, (w+cell_size,)*2 )[half_cell_size:-half_cell_size-1,half_cell_size:-half_cell_size-1].astype(np.float32)
28+
mapx = cv2.resize(mapx, (w+cell_size,)*2 )[half_cell_size:-half_cell_size,half_cell_size:-half_cell_size].astype(np.float32)
29+
mapy = cv2.resize(mapy, (w+cell_size,)*2 )[half_cell_size:-half_cell_size,half_cell_size:-half_cell_size].astype(np.float32)
3030

3131
#random transform
3232
random_transform_mat = cv2.getRotationMatrix2D((w // 2, w // 2), rotation, scale)

samplelib/Sample.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,7 @@ def __init__(self, sample_type=None,
6464

6565
def get_pitch_yaw_roll(self):
6666
if self.pitch_yaw_roll is None:
67-
self.pitch_yaw_roll = LandmarksProcessor.estimate_pitch_yaw_roll(landmarks, size=self.shape[1])
67+
self.pitch_yaw_roll = LandmarksProcessor.estimate_pitch_yaw_roll(self.landmarks, size=self.shape[1])
6868
return self.pitch_yaw_roll
6969

7070
def set_filename_offset_size(self, filename, offset, size):

samplelib/SampleProcessor.py

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import collections
2+
import math
23
from enum import IntEnum
34

45
import cv2
@@ -7,6 +8,7 @@
78
from core import imagelib
89
from facelib import FaceType, LandmarksProcessor
910

11+
1012
class SampleProcessor(object):
1113
class SampleType(IntEnum):
1214
NONE = 0
@@ -114,8 +116,8 @@ def get_eyes_mask():
114116
if sample_type == SPST.FACE_IMAGE or sample_type == SPST.FACE_MASK:
115117
if not is_face_sample:
116118
raise ValueError("face_samples should be provided for sample_type FACE_*")
117-
118-
if is_face_sample:
119+
120+
if sample_type == SPST.FACE_IMAGE or sample_type == SPST.FACE_MASK:
119121
face_type = opts.get('face_type', None)
120122
face_mask_type = opts.get('face_mask_type', SPFMT.NONE)
121123

@@ -125,7 +127,6 @@ def get_eyes_mask():
125127
if face_type > sample.face_type:
126128
raise Exception ('sample %s type %s does not match model requirement %s. Consider extract necessary type of faces.' % (sample.filename, sample.face_type, face_type) )
127129

128-
if sample_type == SPST.FACE_IMAGE or sample_type == SPST.FACE_MASK:
129130

130131
if sample_type == SPST.FACE_MASK:
131132

@@ -156,7 +157,7 @@ def get_eyes_mask():
156157
img = cv2.resize( img, (resolution, resolution), cv2.INTER_CUBIC )
157158

158159
img = imagelib.warp_by_params (params_per_resolution[resolution], img, warp, transform, can_flip=True, border_replicate=border_replicate, cv2_inter=cv2.INTER_LINEAR)
159-
160+
160161
if len(img.shape) == 2:
161162
img = img[...,None]
162163

@@ -175,11 +176,11 @@ def get_eyes_mask():
175176
else:
176177
if w != resolution:
177178
img = cv2.resize( img, (resolution, resolution), cv2.INTER_CUBIC )
178-
img = imagelib.warp_by_params (params_per_resolution[resolution], img, warp, transform, can_flip=True, border_replicate=border_replicate)
179179

180+
img = imagelib.warp_by_params (params_per_resolution[resolution], img, warp, transform, can_flip=True, border_replicate=border_replicate)
181+
180182
img = np.clip(img.astype(np.float32), 0, 1)
181-
182-
183+
183184

184185
# Apply random color transfer
185186
if ct_mode is not None and ct_sample is not None:
@@ -273,17 +274,16 @@ def get_eyes_mask():
273274
l = np.clip(l, 0.0, 1.0)
274275
out_sample = l
275276
elif sample_type == SPST.PITCH_YAW_ROLL or sample_type == SPST.PITCH_YAW_ROLL_SIGMOID:
276-
pitch_yaw_roll = sample.get_pitch_yaw_roll()
277-
278-
if params['flip']:
277+
pitch,yaw,roll = sample.get_pitch_yaw_roll()
278+
if params_per_resolution[resolution]['flip']:
279279
yaw = -yaw
280280

281281
if sample_type == SPST.PITCH_YAW_ROLL_SIGMOID:
282282
pitch = np.clip( (pitch / math.pi) / 2.0 + 0.5, 0, 1)
283283
yaw = np.clip( (yaw / math.pi) / 2.0 + 0.5, 0, 1)
284284
roll = np.clip( (roll / math.pi) / 2.0 + 0.5, 0, 1)
285285

286-
out_sample = (pitch, yaw, roll)
286+
out_sample = (pitch, yaw)
287287
else:
288288
raise ValueError ('expected sample_type')
289289

0 commit comments

Comments
 (0)