mpt-7B-inference/download_model.py

21 lines
651 B
Python
Raw Normal View History

2023-06-26 05:36:27 +00:00
import os
from huggingface_hub import hf_hub_download
2023-06-26 07:10:40 +00:00
def download_mpt_quant(destination_folder: str, repo_id: str, model_filename: str):
2023-06-26 05:36:27 +00:00
local_path = os.path.relpath(destination_folder)
return hf_hub_download(
2023-06-26 07:10:40 +00:00
repo_id=repo_id,
filename=model_filename,
2023-06-26 05:36:27 +00:00
cache_dir=local_path,
)
if __name__ == "__main__":
"""full url: https://huggingface.co/TheBloke/mpt-30B-chat-GGML/blob/main/mpt-30b-chat.ggmlv0.q4_1.bin"""
2023-06-26 07:10:40 +00:00
repo_id = "TheBloke/mpt-30B-chat-GGML"
model_filename = "mpt-30b-chat.ggmlv0.q4_1.bin"
destination_folder = "modelz"
download_mpt_quant(destination_folder, repo_id, model_filename)