diff --git a/facefusion/inference_manager.py b/facefusion/inference_manager.py index cd0470f..703835e 100644 --- a/facefusion/inference_manager.py +++ b/facefusion/inference_manager.py @@ -5,7 +5,7 @@ from onnxruntime import InferenceSession from facefusion import process_manager, state_manager from facefusion.app_context import detect_app_context -from facefusion.execution import create_inference_execution_providers +from facefusion.execution import create_inference_execution_providers, has_execution_provider from facefusion.filesystem import is_file from facefusion.typing import DownloadSet, ExecutionProvider, InferencePool, InferencePoolSet @@ -29,7 +29,7 @@ def get_inference_pool(model_context : str, model_sources : DownloadSet) -> Infe if app_context == 'ui' and INFERENCE_POOLS.get('cli').get(inference_context): INFERENCE_POOLS['ui'][inference_context] = INFERENCE_POOLS.get('cli').get(inference_context) if not INFERENCE_POOLS.get(app_context).get(inference_context): - INFERENCE_POOLS[app_context][inference_context] = create_inference_pool(model_sources, state_manager.get_item('execution_device_id'), state_manager.get_item('execution_providers')) + INFERENCE_POOLS[app_context][inference_context] = create_inference_pool(model_sources, state_manager.get_item('execution_device_id'), resolve_execution_providers(model_context)) return INFERENCE_POOLS.get(app_context).get(inference_context) @@ -63,3 +63,9 @@ def create_inference_session(model_path : str, execution_device_id : str, execut def get_inference_context(model_context : str) -> str: inference_context = model_context + '.' + '_'.join(state_manager.get_item('execution_providers')) return inference_context + + +def resolve_execution_providers(model_context : str) -> List[ExecutionProvider]: + if has_execution_provider('coreml') and model_context == 'facefusion.processors.modules.frame_colorizer': + return [ 'cpu' ] + return state_manager.get_item('execution_providers')