mpt-7B-inference/download_model.py

19 lines
528 B
Python
Raw Normal View History

2023-06-26 05:36:27 +00:00
import os
from huggingface_hub import hf_hub_download
def download_mpt_quant(destination_folder):
local_path = os.path.relpath(destination_folder)
return hf_hub_download(
repo_id="TheBloke/mpt-30B-chat-GGML",
filename="mpt-30b-chat.ggmlv0.q4_1.bin",
cache_dir=local_path,
)
if __name__ == "__main__":
"""full url: https://huggingface.co/TheBloke/mpt-30B-chat-GGML/blob/main/mpt-30b-chat.ggmlv0.q4_1.bin"""
destination_folder = "models"
download_mpt_quant(destination_folder)