mpt-7B-inference/download_model.py

22 lines
687 B
Python
Raw Normal View History

2023-06-26 05:36:27 +00:00
import os
from huggingface_hub import hf_hub_download
2023-06-26 07:10:40 +00:00
def download_mpt_quant(destination_folder: str, repo_id: str, model_filename: str):
2023-06-26 15:16:46 +00:00
local_path = os.path.abspath(destination_folder)
2023-06-26 05:36:27 +00:00
return hf_hub_download(
2023-06-26 07:10:40 +00:00
repo_id=repo_id,
filename=model_filename,
2023-06-26 17:23:01 +00:00
local_dir=local_path,
local_dir_use_symlinks=True
2023-06-26 05:36:27 +00:00
)
if __name__ == "__main__":
"""full url: https://huggingface.co/TheBloke/mpt-30B-chat-GGML/blob/main/mpt-30b-chat.ggmlv0.q4_1.bin"""
2023-06-26 07:10:40 +00:00
repo_id = "TheBloke/mpt-30B-chat-GGML"
model_filename = "mpt-30b-chat.ggmlv0.q4_1.bin"
2023-06-26 07:50:51 +00:00
destination_folder = "models"
2023-06-26 07:10:40 +00:00
download_mpt_quant(destination_folder, repo_id, model_filename)