|
| 1 | +analyzer: |
| 2 | + device: cuda |
| 3 | + optimize_transforms: true |
| 4 | + reader: |
| 5 | + _target_: facetorch.analyzer.reader.ImageReader |
| 6 | + device: |
| 7 | + _target_: torch.device |
| 8 | + type: ${analyzer.device} |
| 9 | + optimize_transform: ${analyzer.optimize_transforms} |
| 10 | + transform: |
| 11 | + _target_: torchvision.transforms.Compose |
| 12 | + transforms: |
| 13 | + - _target_: facetorch.transforms.SquarePad |
| 14 | + - _target_: torchvision.transforms.Resize |
| 15 | + size: |
| 16 | + - 1080 |
| 17 | + detector: |
| 18 | + _target_: facetorch.analyzer.detector.FaceDetector |
| 19 | + downloader: |
| 20 | + _target_: facetorch.downloader.DownloaderGDrive |
| 21 | + file_id: 154x2VjmTQVqmowB0yZw4Uck7uQs2vVBs |
| 22 | + path_local: /code/models/torchscript/detector/1/model.pt |
| 23 | + device: |
| 24 | + _target_: torch.device |
| 25 | + type: ${analyzer.device} |
| 26 | + reverse_colors: true |
| 27 | + preprocessor: |
| 28 | + _target_: facetorch.analyzer.detector.pre.DetectorPreProcessor |
| 29 | + transform: |
| 30 | + _target_: torchvision.transforms.Compose |
| 31 | + transforms: |
| 32 | + - _target_: torchvision.transforms.Normalize |
| 33 | + mean: |
| 34 | + - 104.0 |
| 35 | + - 117.0 |
| 36 | + - 123.0 |
| 37 | + std: |
| 38 | + - 1.0 |
| 39 | + - 1.0 |
| 40 | + - 1.0 |
| 41 | + device: |
| 42 | + _target_: torch.device |
| 43 | + type: ${analyzer.device} |
| 44 | + optimize_transform: ${analyzer.optimize_transforms} |
| 45 | + reverse_colors: ${analyzer.detector.reverse_colors} |
| 46 | + postprocessor: |
| 47 | + _target_: facetorch.analyzer.detector.post.PostRetFace |
| 48 | + transform: None |
| 49 | + device: |
| 50 | + _target_: torch.device |
| 51 | + type: ${analyzer.device} |
| 52 | + optimize_transform: ${analyzer.optimize_transforms} |
| 53 | + confidence_threshold: 0.02 |
| 54 | + top_k: 5000 |
| 55 | + nms_threshold: 0.4 |
| 56 | + keep_top_k: 750 |
| 57 | + score_threshold: 0.6 |
| 58 | + prior_box: |
| 59 | + _target_: facetorch.analyzer.detector.post.PriorBox |
| 60 | + min_sizes: |
| 61 | + - - 16 |
| 62 | + - 32 |
| 63 | + - - 64 |
| 64 | + - 128 |
| 65 | + - - 256 |
| 66 | + - 512 |
| 67 | + steps: |
| 68 | + - 8 |
| 69 | + - 16 |
| 70 | + - 32 |
| 71 | + clip: false |
| 72 | + variance: |
| 73 | + - 0.1 |
| 74 | + - 0.2 |
| 75 | + reverse_colors: ${analyzer.detector.reverse_colors} |
| 76 | + expand_box_ratio: 0.1 |
| 77 | + unifier: |
| 78 | + _target_: facetorch.analyzer.unifier.FaceUnifier |
| 79 | + transform: |
| 80 | + _target_: torchvision.transforms.Compose |
| 81 | + transforms: |
| 82 | + - _target_: torchvision.transforms.Normalize |
| 83 | + mean: |
| 84 | + - -123.0 |
| 85 | + - -117.0 |
| 86 | + - -104.0 |
| 87 | + std: |
| 88 | + - 255.0 |
| 89 | + - 255.0 |
| 90 | + - 255.0 |
| 91 | + - _target_: torchvision.transforms.Resize |
| 92 | + size: |
| 93 | + - 380 |
| 94 | + - 380 |
| 95 | + device: |
| 96 | + _target_: torch.device |
| 97 | + type: ${analyzer.device} |
| 98 | + optimize_transform: ${analyzer.optimize_transforms} |
| 99 | + predictor: |
| 100 | + embed: |
| 101 | + _target_: facetorch.analyzer.predictor.FacePredictor |
| 102 | + downloader: |
| 103 | + _target_: facetorch.downloader.DownloaderGDrive |
| 104 | + file_id: 19h3kqar1wlELAmM5hDyj9tlrUh8yjrCl |
| 105 | + path_local: /code/models/torchscript/predictor/embed/1/model.pt |
| 106 | + device: |
| 107 | + _target_: torch.device |
| 108 | + type: ${analyzer.device} |
| 109 | + preprocessor: |
| 110 | + _target_: facetorch.analyzer.predictor.pre.PredictorPreProcessor |
| 111 | + transform: |
| 112 | + _target_: torchvision.transforms.Compose |
| 113 | + transforms: |
| 114 | + - _target_: torchvision.transforms.Resize |
| 115 | + size: |
| 116 | + - 244 |
| 117 | + - 244 |
| 118 | + - _target_: torchvision.transforms.Normalize |
| 119 | + mean: |
| 120 | + - 0.485 |
| 121 | + - 0.456 |
| 122 | + - 0.406 |
| 123 | + std: |
| 124 | + - 0.228 |
| 125 | + - 0.224 |
| 126 | + - 0.225 |
| 127 | + device: |
| 128 | + _target_: torch.device |
| 129 | + type: ${analyzer.predictor.fer.device.type} |
| 130 | + optimize_transform: ${analyzer.optimize_transforms} |
| 131 | + reverse_colors: false |
| 132 | + postprocessor: |
| 133 | + _target_: facetorch.analyzer.predictor.post.PostEmbedder |
| 134 | + transform: None |
| 135 | + device: |
| 136 | + _target_: torch.device |
| 137 | + type: ${analyzer.predictor.fer.device.type} |
| 138 | + optimize_transform: ${analyzer.optimize_transforms} |
| 139 | + labels: |
| 140 | + - abstract |
| 141 | + verify: |
| 142 | + _target_: facetorch.analyzer.predictor.FacePredictor |
| 143 | + downloader: |
| 144 | + _target_: facetorch.downloader.DownloaderGDrive |
| 145 | + file_id: 1WI-mP_0mGW31OHfriPUsuFS_usYh_W8p |
| 146 | + path_local: /code/models/torchscript/predictor/verify/2/model.pt |
| 147 | + device: |
| 148 | + _target_: torch.device |
| 149 | + type: ${analyzer.device} |
| 150 | + preprocessor: |
| 151 | + _target_: facetorch.analyzer.predictor.pre.PredictorPreProcessor |
| 152 | + transform: |
| 153 | + _target_: torchvision.transforms.Compose |
| 154 | + transforms: |
| 155 | + - _target_: torchvision.transforms.Resize |
| 156 | + size: |
| 157 | + - 112 |
| 158 | + - 112 |
| 159 | + - _target_: torchvision.transforms.Normalize |
| 160 | + mean: |
| 161 | + - 0.5 |
| 162 | + - 0.5 |
| 163 | + - 0.5 |
| 164 | + std: |
| 165 | + - 0.5 |
| 166 | + - 0.5 |
| 167 | + - 0.5 |
| 168 | + device: |
| 169 | + _target_: torch.device |
| 170 | + type: ${analyzer.predictor.verify.device.type} |
| 171 | + optimize_transform: ${analyzer.optimize_transforms} |
| 172 | + reverse_colors: true |
| 173 | + postprocessor: |
| 174 | + _target_: facetorch.analyzer.predictor.post.PostEmbedder |
| 175 | + transform: None |
| 176 | + device: |
| 177 | + _target_: torch.device |
| 178 | + type: ${analyzer.predictor.verify.device.type} |
| 179 | + optimize_transform: ${analyzer.optimize_transforms} |
| 180 | + labels: |
| 181 | + - abstract |
| 182 | + fer: |
| 183 | + _target_: facetorch.analyzer.predictor.FacePredictor |
| 184 | + downloader: |
| 185 | + _target_: facetorch.downloader.DownloaderGDrive |
| 186 | + file_id: 1xoB5VYOd0XLjb-rQqqHWCkQvma4NytEd |
| 187 | + path_local: /code/models/torchscript/predictor/fer/2/model.pt |
| 188 | + device: |
| 189 | + _target_: torch.device |
| 190 | + type: ${analyzer.device} |
| 191 | + preprocessor: |
| 192 | + _target_: facetorch.analyzer.predictor.pre.PredictorPreProcessor |
| 193 | + transform: |
| 194 | + _target_: torchvision.transforms.Compose |
| 195 | + transforms: |
| 196 | + - _target_: torchvision.transforms.Resize |
| 197 | + size: |
| 198 | + - 260 |
| 199 | + - 260 |
| 200 | + - _target_: torchvision.transforms.Normalize |
| 201 | + mean: |
| 202 | + - 0.485 |
| 203 | + - 0.456 |
| 204 | + - 0.406 |
| 205 | + std: |
| 206 | + - 0.229 |
| 207 | + - 0.224 |
| 208 | + - 0.225 |
| 209 | + device: |
| 210 | + _target_: torch.device |
| 211 | + type: ${analyzer.predictor.fer.device.type} |
| 212 | + optimize_transform: ${analyzer.optimize_transforms} |
| 213 | + reverse_colors: false |
| 214 | + postprocessor: |
| 215 | + _target_: facetorch.analyzer.predictor.post.PostArgMax |
| 216 | + transform: None |
| 217 | + device: |
| 218 | + _target_: torch.device |
| 219 | + type: ${analyzer.predictor.fer.device.type} |
| 220 | + optimize_transform: ${analyzer.optimize_transforms} |
| 221 | + dim: 1 |
| 222 | + labels: |
| 223 | + - Anger |
| 224 | + - Contempt |
| 225 | + - Disgust |
| 226 | + - Fear |
| 227 | + - Happiness |
| 228 | + - Neutral |
| 229 | + - Sadness |
| 230 | + - Surprise |
| 231 | + deepfake: |
| 232 | + _target_: facetorch.analyzer.predictor.FacePredictor |
| 233 | + downloader: |
| 234 | + _target_: facetorch.downloader.DownloaderGDrive |
| 235 | + file_id: 1GjDTwQpvrkCjXOdiBy1oMkzm7nt-bXFg |
| 236 | + path_local: /code/models/torchscript/predictor/deepfake/1/model.pt |
| 237 | + device: |
| 238 | + _target_: torch.device |
| 239 | + type: ${analyzer.device} |
| 240 | + preprocessor: |
| 241 | + _target_: facetorch.analyzer.predictor.pre.PredictorPreProcessor |
| 242 | + transform: |
| 243 | + _target_: torchvision.transforms.Compose |
| 244 | + transforms: |
| 245 | + - _target_: torchvision.transforms.Resize |
| 246 | + size: |
| 247 | + - 380 |
| 248 | + - 380 |
| 249 | + - _target_: torchvision.transforms.Normalize |
| 250 | + mean: |
| 251 | + - 0.485 |
| 252 | + - 0.456 |
| 253 | + - 0.406 |
| 254 | + std: |
| 255 | + - 0.229 |
| 256 | + - 0.224 |
| 257 | + - 0.225 |
| 258 | + device: |
| 259 | + _target_: torch.device |
| 260 | + type: ${analyzer.device} |
| 261 | + optimize_transform: ${analyzer.optimize_transforms} |
| 262 | + reverse_colors: false |
| 263 | + postprocessor: |
| 264 | + _target_: facetorch.analyzer.predictor.post.PostSigmoidBinary |
| 265 | + transform: None |
| 266 | + device: |
| 267 | + _target_: torch.device |
| 268 | + type: ${analyzer.device} |
| 269 | + optimize_transform: ${analyzer.optimize_transforms} |
| 270 | + labels: |
| 271 | + - Real |
| 272 | + - Fake |
| 273 | + threshold: 0.7 |
| 274 | + align: |
| 275 | + _target_: facetorch.analyzer.predictor.FacePredictor |
| 276 | + downloader: |
| 277 | + _target_: facetorch.downloader.DownloaderGDrive |
| 278 | + file_id: 16gNFQdEH2nWvW3zTbdIAniKIbPAp6qBA |
| 279 | + path_local: /code/models/torchscript/predictor/align/1/model.pt |
| 280 | + device: |
| 281 | + _target_: torch.device |
| 282 | + type: ${analyzer.device} |
| 283 | + preprocessor: |
| 284 | + _target_: facetorch.analyzer.predictor.pre.PredictorPreProcessor |
| 285 | + transform: |
| 286 | + _target_: torchvision.transforms.Compose |
| 287 | + transforms: |
| 288 | + - _target_: torchvision.transforms.Resize |
| 289 | + size: |
| 290 | + - 120 |
| 291 | + - 120 |
| 292 | + device: |
| 293 | + _target_: torch.device |
| 294 | + type: ${analyzer.predictor.align.device.type} |
| 295 | + optimize_transform: ${analyzer.optimize_transforms} |
| 296 | + reverse_colors: false |
| 297 | + postprocessor: |
| 298 | + _target_: facetorch.analyzer.predictor.post.PostEmbedder |
| 299 | + transform: None |
| 300 | + device: |
| 301 | + _target_: torch.device |
| 302 | + type: ${analyzer.predictor.align.device.type} |
| 303 | + optimize_transform: ${analyzer.optimize_transforms} |
| 304 | + labels: |
| 305 | + - abstract |
| 306 | + utilizer: |
| 307 | + align: |
| 308 | + _target_: facetorch.analyzer.utilizer.align.Lmk3DMeshPose |
| 309 | + transform: None |
| 310 | + device: |
| 311 | + _target_: torch.device |
| 312 | + type: ${analyzer.device} |
| 313 | + optimize_transform: false |
| 314 | + downloader_meta: |
| 315 | + _target_: facetorch.downloader.DownloaderGDrive |
| 316 | + file_id: 11tdAcFuSXqCCf58g52WT1Rpa8KuQwe2o |
| 317 | + path_local: /code/data/3dmm/meta.pt |
| 318 | + image_size: 120 |
| 319 | + draw_boxes: |
| 320 | + _target_: facetorch.analyzer.utilizer.draw.BoxDrawer |
| 321 | + transform: None |
| 322 | + device: |
| 323 | + _target_: torch.device |
| 324 | + type: ${analyzer.device} |
| 325 | + optimize_transform: false |
| 326 | + color: green |
| 327 | + line_width: 3 |
| 328 | + draw_landmarks: |
| 329 | + _target_: facetorch.analyzer.utilizer.draw.LandmarkDrawerTorch |
| 330 | + transform: None |
| 331 | + device: |
| 332 | + _target_: torch.device |
| 333 | + type: ${analyzer.device} |
| 334 | + optimize_transform: false |
| 335 | + width: 2 |
| 336 | + color: green |
| 337 | + logger: |
| 338 | + _target_: facetorch.logger.LoggerJsonFile |
| 339 | + name: facetorch |
| 340 | + level: 20 |
| 341 | + path_file: /code/logs/facetorch/main.log |
| 342 | + json_format: '%(asctime)s %(levelname)s %(message)s' |
| 343 | +main: |
| 344 | + sleep: 3 |
| 345 | +debug: true |
| 346 | +batch_size: 8 |
| 347 | +fix_img_size: true |
| 348 | +return_img_data: true |
| 349 | +include_tensors: true |
0 commit comments