Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 22 additions & 1 deletion app/processors/face_swappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -288,4 +288,25 @@ def run_swapper_ghostface(self, image, embedding, output, swapper_model='GhostFa
torch.cuda.synchronize()
elif self.models_processor.device != "cpu":
self.models_processor.syncvec.cpu()
ghostfaceswap_model.run_with_iobinding(io_binding)
ghostfaceswap_model.run_with_iobinding(io_binding)

def calc_swapper_latent_hyperswap256(self, source_embedding, version="A"):
latent = source_embedding / l2norm(source_embedding)
latent = latent.reshape((1, -1))
return latent

def run_hyperswap256(self, image, embedding, output, version="A"):
HS_MODEL_NAME = f'Hyperswap256 Version {version}'
if not self.models_processor.models[HS_MODEL_NAME]:
self.models_processor.models[HS_MODEL_NAME] = self.models_processor.load_model(HS_MODEL_NAME)

io_binding = self.models_processor.models[HS_MODEL_NAME].io_binding()
io_binding.bind_input(name='target', device_type=self.models_processor.device, device_id=0, element_type=np.float32, shape=(1,3,256,256), buffer_ptr=image.data_ptr())
io_binding.bind_input(name='source', device_type=self.models_processor.device, device_id=0, element_type=np.float32, shape=(1,512), buffer_ptr=embedding.data_ptr())
io_binding.bind_output(name='output', device_type=self.models_processor.device, device_id=0, element_type=np.float32, shape=(1,3,256,256), buffer_ptr=output.data_ptr())

if self.models_processor.device == "cuda":
torch.cuda.synchronize()
elif self.models_processor.device != "cpu":
self.models_processor.syncvec.cpu()
self.models_processor.models[HS_MODEL_NAME].run_with_iobinding(io_binding)
21 changes: 21 additions & 0 deletions app/processors/models_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@
'InStyleSwapper256 Version A': 'Inswapper128ArcFace',
'InStyleSwapper256 Version B': 'Inswapper128ArcFace',
'InStyleSwapper256 Version C': 'Inswapper128ArcFace',
'Hyperswap256 Version A': 'Inswapper128ArcFace',
'Hyperswap256 Version B': 'Inswapper128ArcFace',
'Hyperswap256 Version C': 'Inswapper128ArcFace',
'DeepFaceLive (DFM)': 'Inswapper128ArcFace',
'SimSwap512': 'SimSwapArcFace',
'GhostFace-v1': 'GhostArcFace',
Expand Down Expand Up @@ -467,5 +470,23 @@
"hash": "a6164debbf1e851c3dcefa622111c42a78afd9bb8f1540e7d01172ddf642c3b5",
"url": f"{assets_repo}/v0.1.0_lp/warping_spade-fix.onnx"

},
{
"model_name": "Hyperswap256 Version A",
"local_path": f"{models_dir}/hyperswap_1a_256.onnx",
"hash": "c0e98a8a03a238f461ed3d2570e426b49f46745ee400854a60dceeb70c246add",
"url": f"{assets_repo}/v0.1.0/hyperswap_1a_256.onnx"
},
{
"model_name": "Hyperswap256 Version B",
"local_path": f"{models_dir}/hyperswap_1b_256.onnx",
"hash": "5124031789c42f71b9558fb71954ef7aedb6da7ed9fac79293e23c61a792a73e",
"url": f"{assets_repo}/v0.1.0/hyperswap_1b_256.onnx"
},
{
"model_name": "Hyperswap256 Version C",
"local_path": f"{models_dir}/hyperswap_1c_256.onnx",
"hash": "5528c2d76fe9986c99d829278987ef9f3a630cb606db7628d02b57b330f406a5",
"url": f"{assets_repo}/v0.1.0/hyperswap_1c_256.onnx"
}
]
6 changes: 6 additions & 0 deletions app/processors/models_processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,6 +327,12 @@ def calc_swapper_latent_cscs(self, source_embedding):
def run_swapper_cscs(self, image, embedding, output):
self.face_swappers.run_swapper_cscs(image, embedding, output)

def calc_swapper_latent_hyperswap256(self, source_embedding, version="A"):
return self.face_swappers.calc_swapper_latent_hyperswap256(source_embedding, version)

def run_hyperswap256(self, image, embedding, output, version="A"):
self.face_swappers.run_hyperswap256(image, embedding, output, version)

def run_enhance_frame_tile_process(self, img, enhancer_type, tile_size=256, scale=1):
return self.frame_enhancers.run_enhance_frame_tile_process(img, enhancer_type, tile_size, scale)

Expand Down
32 changes: 32 additions & 0 deletions app/processors/workers/frame_worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -442,6 +442,18 @@ def get_affined_face_dim_and_swapping_latents(self, original_faces: tuple, swapp
latent = []
input_face_affined = original_face_512
dim = 4

elif swapper_model in ( 'Hyperswap256 Version A', 'Hyperswap256 Version B', 'Hyperswap256 Version C'):
version = swapper_model[-1]
latent = torch.from_numpy(self.models_processor.calc_swapper_latent_hyperswap256(s_e, version)).float().to(self.models_processor.device)
if parameters['FaceLikenessEnableToggle']:
factor = parameters['FaceLikenessFactorDecimalSlider']
dst_latent = torch.from_numpy(self.models_processor.calc_swapper_latent_hyperswap256(t_e, version)).float().to(self.models_processor.device)
latent = latent - (factor * dst_latent)

dim = 2
input_face_affined = original_face_256

return input_face_affined, dfm_model, dim, latent

def get_swapped_and_prev_face(self, output, input_face_affined, original_face_512, latent, itex, dim, swapper_model, dfm_model, parameters, ):
Expand Down Expand Up @@ -487,6 +499,26 @@ def get_swapped_and_prev_face(self, output, input_face_affined, original_face_51
output = torch.mul(output, 255)
output = torch.clamp(output, 0, 255)

elif swapper_model in ('Hyperswap256 Version A', 'Hyperswap256 Version B', 'Hyperswap256 Version C'):
version = swapper_model[-1] #Version Name
with torch.no_grad():
for _ in range(itex):
input_face_disc = input_face_affined.permute(2, 0, 1)
input_face_disc = torch.unsqueeze(input_face_disc, 0).contiguous()

swapper_output = torch.empty((1,3,256,256), dtype=torch.float32, device=self.models_processor.device).contiguous()
self.models_processor.run_hyperswap256(input_face_disc, latent, swapper_output, version)

swapper_output = torch.squeeze(swapper_output)
swapper_output = swapper_output.permute(1, 2, 0)

output = swapper_output.clone()
prev_face = input_face_affined.clone()
input_face_affined = output.clone()

output = torch.mul(output, 255)
output = torch.clamp(output, 0, 255)

elif swapper_model == 'SimSwap512':
for k in range(itex):
input_face_disc = input_face_affined.permute(2, 0, 1)
Expand Down
15 changes: 14 additions & 1 deletion app/ui/widgets/swapper_layout_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,20 @@
'SwapModelSelection': {
'level': 1,
'label': 'Swapper Model',
'options': ['Inswapper128', 'InStyleSwapper256 Version A', 'InStyleSwapper256 Version B', 'InStyleSwapper256 Version C', 'DeepFaceLive (DFM)', 'SimSwap512', 'GhostFace-v1', 'GhostFace-v2', 'GhostFace-v3', 'CSCS'], 'default': 'Inswapper128',
'options': [
'Inswapper128',
'InStyleSwapper256 Version A',
'InStyleSwapper256 Version B',
'InStyleSwapper256 Version C',
'Hyperswap256 Version A',
'Hyperswap256 Version B',
'Hyperswap256 Version C',
'DeepFaceLive (DFM)',
'SimSwap512',
'GhostFace-v1',
'GhostFace-v2',
'GhostFace-v3',
'CSCS'], 'default': 'Inswapper128',
'help': 'Choose which swapper model to use for face swapping.'
},
'SwapperResSelection': {
Expand Down