Feat/simplify hashes sources download (#814)
* Extract download directory path from assets path * Fix lint * Fix force-download command, Fix urls in frame enhancer
This commit is contained in:
parent
74c61108dd
commit
9d0c377aa0
@ -54,11 +54,10 @@ def get_model_options() -> ModelOptions:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes = get_model_options().get('hashes')
|
||||
model_sources = get_model_options().get('sources')
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def analyse_stream(vision_frame : VisionFrame, video_fps : Fps) -> bool:
|
||||
|
@ -16,7 +16,7 @@ from facefusion.face_analyser import get_average_face, get_many_faces, get_one_f
|
||||
from facefusion.face_selector import sort_and_filter_faces
|
||||
from facefusion.face_store import append_reference_face, clear_reference_faces, get_reference_faces
|
||||
from facefusion.ffmpeg import copy_image, extract_frames, finalize_image, merge_video, replace_audio, restore_audio
|
||||
from facefusion.filesystem import filter_audio_paths, is_image, is_video, list_directory, resolve_file_pattern, resolve_relative_path
|
||||
from facefusion.filesystem import filter_audio_paths, is_image, is_video, list_directory, resolve_file_pattern
|
||||
from facefusion.jobs import job_helper, job_manager, job_runner
|
||||
from facefusion.jobs.job_list import compose_job_list
|
||||
from facefusion.memory import limit_system_memory
|
||||
@ -158,7 +158,6 @@ def conditional_append_reference_faces() -> None:
|
||||
|
||||
|
||||
def force_download() -> ErrorCode:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
available_processors = list_directory('facefusion/processors/modules')
|
||||
common_modules =\
|
||||
[
|
||||
@ -179,7 +178,7 @@ def force_download() -> ErrorCode:
|
||||
model_sources = model.get('sources')
|
||||
|
||||
if model_hashes and model_sources:
|
||||
if not conditional_download_hashes(download_directory_path, model_hashes) or not conditional_download_sources(download_directory_path, model_sources):
|
||||
if not conditional_download_hashes(model_hashes) or not conditional_download_sources(model_sources):
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
@ -48,7 +48,7 @@ def get_download_size(url : str) -> int:
|
||||
return 0
|
||||
|
||||
|
||||
def conditional_download_hashes(download_directory_path : str, hashes : DownloadSet) -> bool:
|
||||
def conditional_download_hashes(hashes : DownloadSet) -> bool:
|
||||
hash_paths = [ hashes.get(hash_key).get('path') for hash_key in hashes.keys() ]
|
||||
|
||||
process_manager.check()
|
||||
@ -58,6 +58,7 @@ def conditional_download_hashes(download_directory_path : str, hashes : Download
|
||||
for index in hashes:
|
||||
if hashes.get(index).get('path') in invalid_hash_paths:
|
||||
invalid_hash_url = hashes.get(index).get('url')
|
||||
download_directory_path = os.path.dirname(hashes.get(index).get('path'))
|
||||
conditional_download(download_directory_path, [ invalid_hash_url ])
|
||||
|
||||
valid_hash_paths, invalid_hash_paths = validate_hash_paths(hash_paths)
|
||||
@ -73,7 +74,7 @@ def conditional_download_hashes(download_directory_path : str, hashes : Download
|
||||
return not invalid_hash_paths
|
||||
|
||||
|
||||
def conditional_download_sources(download_directory_path : str, sources : DownloadSet) -> bool:
|
||||
def conditional_download_sources(sources : DownloadSet) -> bool:
|
||||
source_paths = [ sources.get(source_key).get('path') for source_key in sources.keys() ]
|
||||
|
||||
process_manager.check()
|
||||
@ -83,6 +84,7 @@ def conditional_download_sources(download_directory_path : str, sources : Downlo
|
||||
for index in sources:
|
||||
if sources.get(index).get('path') in invalid_source_paths:
|
||||
invalid_source_url = sources.get(index).get('url')
|
||||
download_directory_path = os.path.dirname(sources.get(index).get('path'))
|
||||
conditional_download(download_directory_path, [ invalid_source_url ])
|
||||
|
||||
valid_source_paths, invalid_source_paths = validate_source_paths(source_paths)
|
||||
|
@ -51,11 +51,10 @@ def get_model_options() -> ModelOptions:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes = get_model_options().get('hashes')
|
||||
model_sources = get_model_options().get('sources')
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def classify_face(temp_vision_frame : VisionFrame, face_landmark_5 : FaceLandmark5) -> Tuple[Gender, Age, Race]:
|
||||
|
@ -101,10 +101,9 @@ def collect_model_downloads() -> Tuple[DownloadSet, DownloadSet]:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes, model_sources = collect_model_downloads()
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def detect_faces(vision_frame : VisionFrame) -> Tuple[List[BoundingBox], List[Score], List[FaceLandmark5]]:
|
||||
|
@ -105,10 +105,9 @@ def collect_model_downloads() -> Tuple[DownloadSet, DownloadSet]:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes, model_sources = collect_model_downloads()
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def detect_face_landmarks(vision_frame : VisionFrame, bounding_box : BoundingBox, face_angle : Angle) -> Tuple[FaceLandmark68, Score]:
|
||||
|
@ -93,10 +93,9 @@ def collect_model_downloads() -> Tuple[DownloadSet, DownloadSet]:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes, model_sources = collect_model_downloads()
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
@lru_cache(maxsize = None)
|
||||
|
@ -49,11 +49,10 @@ def get_model_options() -> ModelOptions:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes = get_model_options().get('hashes')
|
||||
model_sources = get_model_options().get('sources')
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def calc_embedding(temp_vision_frame : VisionFrame, face_landmark_5 : FaceLandmark5) -> Tuple[Embedding, Embedding]:
|
||||
|
@ -91,11 +91,10 @@ def apply_args(args : Args, apply_state_item : ApplyStateItem) -> None:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes = get_model_options().get('hashes')
|
||||
model_sources = get_model_options().get('sources')
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def pre_process(mode : ProcessMode) -> bool:
|
||||
|
@ -33,7 +33,7 @@ def create_model_set() -> ModelSet:
|
||||
'deep_swapper':
|
||||
{
|
||||
'url': resolve_download_url_by_provider('huggingface', 'deepfacelive-models-iperov', 'emma_watson_224.hash'),
|
||||
'path': resolve_relative_path('../.assets/models/emma_watson_224.hash')
|
||||
'path': resolve_relative_path('../.assets/models/iperov/emma_watson_224.hash')
|
||||
}
|
||||
},
|
||||
'sources':
|
||||
@ -41,7 +41,7 @@ def create_model_set() -> ModelSet:
|
||||
'deep_swapper':
|
||||
{
|
||||
'url': resolve_download_url_by_provider('huggingface', 'deepfacelive-models-iperov', 'emma_watson_224.dfm'),
|
||||
'path': resolve_relative_path('../.assets/models/emma_watson_224.dfm')
|
||||
'path': resolve_relative_path('../.assets/models/iperov/emma_watson_224.dfm')
|
||||
}
|
||||
},
|
||||
'size': (224, 224),
|
||||
@ -55,7 +55,7 @@ def create_model_set() -> ModelSet:
|
||||
'deep_swapper':
|
||||
{
|
||||
'url': resolve_download_url_by_provider('huggingface', 'deepfacelive-models-iperov', 'jackie_chan_224.hash'),
|
||||
'path': resolve_relative_path('../.assets/models/jackie_chan_224.hash')
|
||||
'path': resolve_relative_path('../.assets/models/iperov/jackie_chan_224.hash')
|
||||
}
|
||||
},
|
||||
'sources':
|
||||
@ -63,7 +63,7 @@ def create_model_set() -> ModelSet:
|
||||
'deep_swapper':
|
||||
{
|
||||
'url': resolve_download_url_by_provider('huggingface', 'deepfacelive-models-iperov', 'jackie_chan_224.dfm'),
|
||||
'path': resolve_relative_path('../.assets/models/jackie_chan_224.dfm')
|
||||
'path': resolve_relative_path('../.assets/models/iperov/jackie_chan_224.dfm')
|
||||
}
|
||||
},
|
||||
'size': (224, 224),
|
||||
@ -77,7 +77,7 @@ def create_model_set() -> ModelSet:
|
||||
'deep_swapper':
|
||||
{
|
||||
'url': resolve_download_url_by_provider('huggingface', 'deepfacelive-models-iperov', 'keanu_reeves_320.hash'),
|
||||
'path': resolve_relative_path('../.assets/models/keanu_reeves_320.hash')
|
||||
'path': resolve_relative_path('../.assets/models/iperov/keanu_reeves_320.hash')
|
||||
}
|
||||
},
|
||||
'sources':
|
||||
@ -85,7 +85,7 @@ def create_model_set() -> ModelSet:
|
||||
'deep_swapper':
|
||||
{
|
||||
'url': resolve_download_url_by_provider('huggingface', 'deepfacelive-models-iperov', 'keanu_reeves_320.dfm'),
|
||||
'path': resolve_relative_path('../.assets/models/keanu_reeves_320.dfm')
|
||||
'path': resolve_relative_path('../.assets/models/iperov/keanu_reeves_320.dfm')
|
||||
}
|
||||
},
|
||||
'size': (320, 320),
|
||||
@ -99,7 +99,7 @@ def create_model_set() -> ModelSet:
|
||||
'deep_swapper':
|
||||
{
|
||||
'url': resolve_download_url_by_provider('huggingface', 'deepfacelive-models-iperov', 'sylvester_stallone_224.hash'),
|
||||
'path': resolve_relative_path('../.assets/models/sylvester_stallone_224.hash')
|
||||
'path': resolve_relative_path('../.assets/models/iperov/sylvester_stallone_224.hash')
|
||||
}
|
||||
},
|
||||
'sources':
|
||||
@ -107,7 +107,7 @@ def create_model_set() -> ModelSet:
|
||||
'deep_swapper':
|
||||
{
|
||||
'url': resolve_download_url_by_provider('huggingface', 'deepfacelive-models-iperov', 'sylvester_stallone_224.dfm'),
|
||||
'path': resolve_relative_path('../.assets/models/sylvester_stallone_224.dfm')
|
||||
'path': resolve_relative_path('../.assets/models/iperov/sylvester_stallone_224.dfm')
|
||||
}
|
||||
},
|
||||
'size': (224, 224),
|
||||
@ -121,7 +121,7 @@ def create_model_set() -> ModelSet:
|
||||
'deep_swapper':
|
||||
{
|
||||
'url': resolve_download_url_by_provider('huggingface', 'deepfacelive-models-iperov', 'taylor_swift_224.hash'),
|
||||
'path': resolve_relative_path('../.assets/models/taylor_swift_224.hash')
|
||||
'path': resolve_relative_path('../.assets/models/iperov/taylor_swift_224.hash')
|
||||
}
|
||||
},
|
||||
'sources':
|
||||
@ -129,7 +129,7 @@ def create_model_set() -> ModelSet:
|
||||
'deep_swapper':
|
||||
{
|
||||
'url': resolve_download_url_by_provider('huggingface', 'deepfacelive-models-iperov', 'taylor_swift_224.dfm'),
|
||||
'path': resolve_relative_path('../.assets/models/taylor_swift_224.dfm')
|
||||
'path': resolve_relative_path('../.assets/models/iperov/taylor_swift_224.dfm')
|
||||
}
|
||||
},
|
||||
'size': (224, 224),
|
||||
@ -167,11 +167,10 @@ def apply_args(args : Args, apply_state_item : ApplyStateItem) -> None:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes = get_model_options().get('hashes')
|
||||
model_sources = get_model_options().get('sources')
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def pre_process(mode : ProcessMode) -> bool:
|
||||
|
@ -102,11 +102,10 @@ def apply_args(args : Args, apply_state_item : ApplyStateItem) -> None:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes = get_model_options().get('hashes')
|
||||
model_sources = get_model_options().get('sources')
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def pre_process(mode : ProcessMode) -> bool:
|
||||
|
@ -158,11 +158,10 @@ def apply_args(args : Args, apply_state_item : ApplyStateItem) -> None:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes = get_model_options().get('hashes')
|
||||
model_sources = get_model_options().get('sources')
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def pre_process(mode : ProcessMode) -> bool:
|
||||
|
@ -249,11 +249,10 @@ def apply_args(args : Args, apply_state_item : ApplyStateItem) -> None:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes = get_model_options().get('hashes')
|
||||
model_sources = get_model_options().get('sources')
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def pre_process(mode : ProcessMode) -> bool:
|
||||
|
@ -365,11 +365,10 @@ def apply_args(args : Args, apply_state_item : ApplyStateItem) -> None:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes = get_model_options().get('hashes')
|
||||
model_sources = get_model_options().get('sources')
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def pre_process(mode : ProcessMode) -> bool:
|
||||
|
@ -157,11 +157,10 @@ def apply_args(args : Args, apply_state_item : ApplyStateItem) -> None:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes = get_model_options().get('hashes')
|
||||
model_sources = get_model_options().get('sources')
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def pre_process(mode : ProcessMode) -> bool:
|
||||
|
@ -238,7 +238,7 @@ def create_model_set() -> ModelSet:
|
||||
{
|
||||
'frame_enhancer':
|
||||
{
|
||||
'url': resolve_download_url('models-3.0.0', 'real_web_photo_x4.hash'),
|
||||
'url': resolve_download_url('models-3.1.0', 'real_web_photo_x4.hash'),
|
||||
'path': resolve_relative_path('../.assets/models/real_web_photo_x4.hash')
|
||||
}
|
||||
},
|
||||
@ -246,7 +246,7 @@ def create_model_set() -> ModelSet:
|
||||
{
|
||||
'frame_enhancer':
|
||||
{
|
||||
'url': resolve_download_url('models-3.0.0', 'real_web_photo_x4.onnx'),
|
||||
'url': resolve_download_url('models-3.1.0', 'real_web_photo_x4.onnx'),
|
||||
'path': resolve_relative_path('../.assets/models/real_web_photo_x4.onnx')
|
||||
}
|
||||
},
|
||||
@ -412,11 +412,10 @@ def apply_args(args : Args, apply_state_item : ApplyStateItem) -> None:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes = get_model_options().get('hashes')
|
||||
model_sources = get_model_options().get('sources')
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def pre_process(mode : ProcessMode) -> bool:
|
||||
|
@ -99,11 +99,10 @@ def apply_args(args : Args, apply_state_item : ApplyStateItem) -> None:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes = get_model_options().get('hashes')
|
||||
model_sources = get_model_options().get('sources')
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def pre_process(mode : ProcessMode) -> bool:
|
||||
|
@ -199,12 +199,19 @@ def create_execution_program() -> ArgumentParser:
|
||||
return program
|
||||
|
||||
|
||||
def create_download_program() -> ArgumentParser:
|
||||
def create_download_providers_program() -> ArgumentParser:
|
||||
program = ArgumentParser(add_help = False)
|
||||
group_download = program.add_argument_group('download')
|
||||
group_download.add_argument('--download-providers', help = wording.get('help.download_providers').format(choices = ', '.join(list(facefusion.choices.download_provider_set.keys()))), default = config.get_str_list('download.download_providers', 'github'), choices = list(facefusion.choices.download_provider_set.keys()), nargs = '+', metavar = 'DOWNLOAD_PROVIDERS')
|
||||
job_store.register_job_keys([ 'download_providers' ])
|
||||
return program
|
||||
|
||||
|
||||
def create_skip_download_program() -> ArgumentParser:
|
||||
program = ArgumentParser(add_help = False)
|
||||
group_download = program.add_argument_group('download')
|
||||
group_download.add_argument('--skip-download', help = wording.get('help.skip_download'), action = 'store_true', default = config.get_bool_value('misc.skip_download'))
|
||||
job_store.register_job_keys([ 'download_providers', 'skip_download' ])
|
||||
job_store.register_job_keys([ 'skip_download' ])
|
||||
return program
|
||||
|
||||
|
||||
@ -249,7 +256,7 @@ def collect_step_program() -> ArgumentParser:
|
||||
|
||||
|
||||
def collect_job_program() -> ArgumentParser:
|
||||
return ArgumentParser(parents= [ create_execution_program(), create_download_program(), create_memory_program(), create_log_level_program() ], add_help = False)
|
||||
return ArgumentParser(parents= [ create_execution_program(), create_download_providers_program(), create_skip_download_program(), create_memory_program(), create_log_level_program() ], add_help = False)
|
||||
|
||||
|
||||
def create_program() -> ArgumentParser:
|
||||
@ -261,7 +268,7 @@ def create_program() -> ArgumentParser:
|
||||
sub_program.add_parser('run', help = wording.get('help.run'), parents = [ create_config_path_program(), create_temp_path_program(), create_jobs_path_program(), create_source_paths_program(), create_target_path_program(), create_output_path_program(), collect_step_program(), create_uis_program(), collect_job_program() ], formatter_class = create_help_formatter_large)
|
||||
sub_program.add_parser('headless-run', help = wording.get('help.headless_run'), parents = [ create_config_path_program(), create_temp_path_program(), create_jobs_path_program(), create_source_paths_program(), create_target_path_program(), create_output_path_program(), collect_step_program(), collect_job_program() ], formatter_class = create_help_formatter_large)
|
||||
sub_program.add_parser('batch-run', help = wording.get('help.batch_run'), parents = [ create_config_path_program(), create_temp_path_program(), create_jobs_path_program(), create_source_pattern_program(), create_target_pattern_program(), create_output_pattern_program(), collect_step_program(), collect_job_program() ], formatter_class = create_help_formatter_large)
|
||||
sub_program.add_parser('force-download', help = wording.get('help.force_download'), parents = [ create_log_level_program() ], formatter_class = create_help_formatter_large)
|
||||
sub_program.add_parser('force-download', help = wording.get('help.force_download'), parents = [ create_download_providers_program(), create_log_level_program() ], formatter_class = create_help_formatter_large)
|
||||
# job manager
|
||||
sub_program.add_parser('job-list', help = wording.get('help.job_list'), parents = [ create_job_status_program(), create_jobs_path_program(), create_log_level_program() ], formatter_class = create_help_formatter_large)
|
||||
sub_program.add_parser('job-create', help = wording.get('help.job_create'), parents = [ create_job_id_program(), create_jobs_path_program(), create_log_level_program() ], formatter_class = create_help_formatter_large)
|
||||
|
@ -47,11 +47,10 @@ def get_model_options() -> ModelOptions:
|
||||
|
||||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../.assets/models')
|
||||
model_hashes = get_model_options().get('hashes')
|
||||
model_sources = get_model_options().get('sources')
|
||||
|
||||
return conditional_download_hashes(download_directory_path, model_hashes) and conditional_download_sources(download_directory_path, model_sources)
|
||||
return conditional_download_hashes(model_hashes) and conditional_download_sources(model_sources)
|
||||
|
||||
|
||||
def batch_extract_voice(audio : Audio, chunk_size : int, step_size : int) -> Audio:
|
||||
|
Loading…
Reference in New Issue
Block a user