Fix/remote inference pool lookups (#848)

* Fix edge case when offline and inference session has no model, Prevent inference session creation

* Fix edge case when offline and inference session has no model, Prevent inference session creation

* Fix edge case when offline and inference session has no model, Prevent inference session creation

* Fix edge case when offline and inference session has no model, Prevent inference session creation
This commit is contained in:
Henry Ruhs
2025-01-07 13:44:01 +01:00
committed by henryruhs
parent 87350eb45f
commit c5bc7c50a5
4 changed files with 39 additions and 11 deletions

View File

@@ -6,6 +6,7 @@ from onnxruntime import InferenceSession
from facefusion import process_manager, state_manager from facefusion import process_manager, state_manager
from facefusion.app_context import detect_app_context from facefusion.app_context import detect_app_context
from facefusion.execution import create_inference_execution_providers from facefusion.execution import create_inference_execution_providers
from facefusion.filesystem import is_file
from facefusion.thread_helper import thread_lock from facefusion.thread_helper import thread_lock
from facefusion.typing import DownloadSet, ExecutionProvider, InferencePool, InferencePoolSet from facefusion.typing import DownloadSet, ExecutionProvider, InferencePool, InferencePoolSet
@@ -16,6 +17,18 @@ INFERENCE_POOLS : InferencePoolSet =\
} }
def has_inference_model(model_context : str, model_name : str) -> bool:
while process_manager.is_checking():
sleep(0.5)
app_context = detect_app_context()
inference_context = get_inference_context(model_context)
inference_pool = INFERENCE_POOLS.get(app_context).get(inference_context)
if inference_pool:
return model_name in inference_pool
return False
def get_inference_pool(model_context : str, model_sources : DownloadSet) -> InferencePool: def get_inference_pool(model_context : str, model_sources : DownloadSet) -> InferencePool:
global INFERENCE_POOLS global INFERENCE_POOLS
@@ -39,7 +52,10 @@ def create_inference_pool(model_sources : DownloadSet, execution_device_id : str
inference_pool : InferencePool = {} inference_pool : InferencePool = {}
for model_name in model_sources.keys(): for model_name in model_sources.keys():
inference_pool[model_name] = create_inference_session(model_sources.get(model_name).get('path'), execution_device_id, execution_providers) model_path = model_sources.get(model_name).get('path')
if is_file(model_path):
inference_pool[model_name] = create_inference_session(model_path, execution_device_id, execution_providers)
return inference_pool return inference_pool

View File

@@ -238,6 +238,10 @@ def create_static_model_set(download_scope : DownloadScope) -> ModelSet:
return model_set return model_set
def has_inference_model(model_name : str) -> bool:
return inference_manager.has_inference_model(__name__, model_name)
def get_inference_pool() -> InferencePool: def get_inference_pool() -> InferencePool:
model_sources = get_model_options().get('sources') model_sources = get_model_options().get('sources')
return inference_manager.get_inference_pool(__name__, model_sources) return inference_manager.get_inference_pool(__name__, model_sources)
@@ -357,11 +361,13 @@ def forward(crop_vision_frame : VisionFrame, deep_swapper_morph : DeepSwapperMor
def has_morph_input() -> bool: def has_morph_input() -> bool:
if has_inference_model('deep_swapper'):
deep_swapper = get_inference_pool().get('deep_swapper') deep_swapper = get_inference_pool().get('deep_swapper')
for deep_swapper_input in deep_swapper.get_inputs(): for deep_swapper_input in deep_swapper.get_inputs():
if deep_swapper_input.name == 'morph_value:0': if deep_swapper_input.name == 'morph_value:0':
return True return True
return False return False

View File

@@ -221,6 +221,10 @@ def create_static_model_set(download_scope : DownloadScope) -> ModelSet:
} }
def has_inference_model(model_name : str) -> bool:
return inference_manager.has_inference_model(__name__, model_name)
def get_inference_pool() -> InferencePool: def get_inference_pool() -> InferencePool:
model_sources = get_model_options().get('sources') model_sources = get_model_options().get('sources')
return inference_manager.get_inference_pool(__name__, model_sources) return inference_manager.get_inference_pool(__name__, model_sources)
@@ -324,11 +328,13 @@ def forward(crop_vision_frame : VisionFrame, face_enhancer_weight : FaceEnhancer
def has_weight_input() -> bool: def has_weight_input() -> bool:
if has_inference_model('face_enhancer'):
face_enhancer = get_inference_pool().get('face_enhancer') face_enhancer = get_inference_pool().get('face_enhancer')
for deep_swapper_input in face_enhancer.get_inputs(): for deep_swapper_input in face_enhancer.get_inputs():
if deep_swapper_input.name == 'weight': if deep_swapper_input.name == 'weight':
return True return True
return False return False

View File

@@ -31,7 +31,7 @@ def render() -> None:
step = calc_int_step(processors_choices.deep_swapper_morph_range), step = calc_int_step(processors_choices.deep_swapper_morph_range),
minimum = processors_choices.deep_swapper_morph_range[0], minimum = processors_choices.deep_swapper_morph_range[0],
maximum = processors_choices.deep_swapper_morph_range[-1], maximum = processors_choices.deep_swapper_morph_range[-1],
visible = has_deep_swapper and has_morph_input() visible = has_morph_input()
) )
register_ui_component('deep_swapper_model_dropdown', DEEP_SWAPPER_MODEL_DROPDOWN) register_ui_component('deep_swapper_model_dropdown', DEEP_SWAPPER_MODEL_DROPDOWN)
register_ui_component('deep_swapper_morph_slider', DEEP_SWAPPER_MORPH_SLIDER) register_ui_component('deep_swapper_morph_slider', DEEP_SWAPPER_MORPH_SLIDER)
@@ -48,7 +48,7 @@ def listen() -> None:
def remote_update(processors : List[str]) -> Tuple[gradio.Dropdown, gradio.Slider]: def remote_update(processors : List[str]) -> Tuple[gradio.Dropdown, gradio.Slider]:
has_deep_swapper = 'deep_swapper' in processors has_deep_swapper = 'deep_swapper' in processors
return gradio.Dropdown(visible = has_deep_swapper), gradio.Slider(visible = has_deep_swapper and has_morph_input()) return gradio.Dropdown(visible = has_deep_swapper), gradio.Slider(visible = has_morph_input())
def update_deep_swapper_model(deep_swapper_model : DeepSwapperModel) -> Tuple[gradio.Dropdown, gradio.Slider]: def update_deep_swapper_model(deep_swapper_model : DeepSwapperModel) -> Tuple[gradio.Dropdown, gradio.Slider]: