当前位置:   article > 正文

wget hugging face 模型 cmd 命令生成

wget hugging face

wget hugging face 模型 cmd 命令生成
文档:wget hugging face 模型 cmd 命令生成…
链接:http://note.youdao.com/noteshare?id=294a8d1fab3afc774f5171a55659bcb1&sub=FB29F039A23B4055944D325DFDDCA19D
添加链接描述


links_anything_v3=[
    "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/unet/config.json",
    "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/unet/diffusion_pytorch_model.bin",
    "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/vae/config.json",
    "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/vae/diffusion_pytorch_model.bin",
     "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/tokenizer/merges.txt",
    "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/tokenizer/special_tokens_map.json",
    "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/tokenizer/tokenizer_config.json",
    "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/tokenizer/vocab.json",
     "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/text_encoder/config.json",
    "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/text_encoder/pytorch_model.bin",
     "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/scheduler/scheduler_config.json",
      "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/safety_checker/config.json",
    "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/safety_checker/pytorch_model.bin",
     "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/feature_extractor/preprocessor_config.json",
      "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/.gitattributes",
    "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/README.md",
    # "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/anything-v3-fp16-pruned.safetensors",
    # "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/anything-v3-fp32-pruned.safetensors",
    # "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/anything-v3-full.safetensors",
    "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/model_index.json",
]

links_openjourney=[
    "https://huggingface.co/prompthero/openjourney/resolve/main/.gitattributes",
    "https://huggingface.co/prompthero/openjourney/resolve/main/README.md",
    "https://huggingface.co/prompthero/openjourney/resolve/main/mdjrny-v4.ckpt",
    "https://huggingface.co/prompthero/openjourney/resolve/main/mdjrny-v4.safetensors",
    "https://huggingface.co/prompthero/openjourney/resolve/main/model.safetensors",
    "https://huggingface.co/prompthero/openjourney/resolve/main/model_index.json",
     "https://huggingface.co/prompthero/openjourney/resolve/main/vae/config.json",
    "https://huggingface.co/prompthero/openjourney/resolve/main/vae/diffusion_pytorch_model.bin",
    "https://huggingface.co/prompthero/openjourney/resolve/main/vae/diffusion_pytorch_model.safetensors",
      "https://huggingface.co/prompthero/openjourney/resolve/main/unet/config.json",
    "https://huggingface.co/prompthero/openjourney/resolve/main/unet/diffusion_pytorch_model.bin",
    "https://huggingface.co/prompthero/openjourney/resolve/main/unet/diffusion_pytorch_model.safetensors",
     "https://huggingface.co/prompthero/openjourney/resolve/main/tokenizer/merges.txt",
    "https://huggingface.co/prompthero/openjourney/resolve/main/tokenizer/special_tokens_map.json",
    "https://huggingface.co/prompthero/openjourney/resolve/main/tokenizer/tokenizer_config.json",
    "https://huggingface.co/prompthero/openjourney/resolve/main/tokenizer/vocab.json",
     "https://huggingface.co/prompthero/openjourney/resolve/main/text_encoder/config.json",
    "https://huggingface.co/prompthero/openjourney/resolve/main/text_encoder/model.safetensors",
    "https://huggingface.co/prompthero/openjourney/resolve/main/text_encoder/pytorch_model.bin",
    "https://huggingface.co/prompthero/openjourney/resolve/main/scheduler/scheduler_config.json",
     "https://huggingface.co/prompthero/openjourney/resolve/main/safety_checker/config.json",
    "https://huggingface.co/prompthero/openjourney/resolve/main/safety_checker/model.safetensors",
    "https://huggingface.co/prompthero/openjourney/resolve/main/safety_checker/pytorch_model.bin",
     "https://huggingface.co/prompthero/openjourney/resolve/main/feature_extractor/preprocessor_config.json",
]


links_llama=[
    "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/.gitattributes",
    "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/README.md",
    "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/config.json",
    "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/generation_config.json",
    "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/pytorch_model-00001-of-00002.bin",
    "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/pytorch_model-00002-of-00002.bin",
    "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/pytorch_model.bin.index.json",
    "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/special_tokens_map.json",
    "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/tokenizer.json",
    "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/tokenizer.model",
    "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/tokenizer_config.json"
]

links=[
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/.gitattributes",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/README.md",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/added_tokens.json",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/config.json",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/flax_model.msgpack",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/generation_config.json",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/merges.txt",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/normalizer.json",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/preprocessor_config.json",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/pytorch_model.bin",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/special_tokens_map.json",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/tf_model.h5",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/tokenizer.json",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/tokenizer_config.json",
    "https://huggingface.co/openai/whisper-large-v2/resolve/main/vocab.json"
]
links=links_llama

links=[
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/.gitattributes",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/added_tokens.json",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/mlc-chat-config.json",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/ndarray-cache.json",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_0.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_1.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_10.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_100.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_101.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_102.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_103.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_104.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_105.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_106.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_107.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_108.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_109.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_11.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_110.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_111.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_112.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_113.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_114.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_12.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_13.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_14.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_15.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_16.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_17.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_18.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_19.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_2.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_20.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_21.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_22.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_23.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_24.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_25.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_26.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_27.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_28.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_29.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_3.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_30.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_31.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_32.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_33.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_34.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_35.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_36.bin"
]



links=[
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/.gitattributes",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/added_tokens.json",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/mlc-chat-config.json",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/ndarray-cache.json",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_0.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_1.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_10.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_100.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_101.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_102.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_103.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_104.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_105.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_106.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_107.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_108.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_109.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_11.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_110.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_111.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_112.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_113.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_114.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_12.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_13.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_14.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_15.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_16.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_17.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_18.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_19.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_2.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_20.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_21.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_22.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_23.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_24.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_25.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_26.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_27.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_28.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_29.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_3.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_30.bin",
    "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_31.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_32.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_33.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_34.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_35.bin",
    # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_36.bin"
]

links=[
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/.gitattributes",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/LICENSE",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/Notice",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/README.md",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/USE_POLICY.md",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/config.json",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q2_K.bin",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q3_K_L.bin",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q3_K_M.bin",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q3_K_S.bin",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q4_0.bin",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q4_1.bin",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q4_K_M.bin",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q4_K_S.bin",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q5_0.bin",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q5_1.bin",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q5_K_M.bin",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q5_K_S.bin",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q6_K.bin",
    "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q8_0.bin"
]
# repo_base_dir="/huggingface.co"
# repo_base_dir=""
# repo_base_dir="/kaggle/working/"
# preffix_dir=rf"G:\file"
preffix_dir=rf"E:\addd\model"
# repo_base_dir="/j05025/model"

# G:\file
repo_base_dir=f"{preffix_dir}/j05025/model"


def path_get(lst:list):
    # path=""
    # lst. 
    path="/".join(lst)
    # for i in lst:
    #     path+=i+"/"
    return path

def path_get_parent_dir(lst:list):
    path="/".join(lst[:-1])
    return path

from top.starp.util import file_util
# file_util.check_and_create_file_parent_dir()

def obj_name():
    return " -O "

# obj_name=' -O  '

def file_path():
    return " -P "

import os 
do_background=True
do_download=False
# do_download=False
def parse_link(link):
    pass
    parts=link.split("/")
    username=parts[3]
    repoName=parts[4]
    fileName=parts[7:]
    # print(username,repoName)
    fileNamePath=path_get(fileName)
    # abs_path=os.path.join(repo_base_dir,fileNamePath)
    # abs_path
    abs_path=path_get([repo_base_dir,username,repoName,fileNamePath])
    # abs_path=os.path.join(repo_base_dir,username,repoName,fileNamePath)
    # abs_path=os.path.join(repo_base_dir,username,repoName,fileNamePath)
    file_util.check_and_create_file_parent_dir(abs_path)
    # print("fileName",fileName)
    # print("abs_path",abs_path)
    # wget_cmd=f"wget -P {abs_path} {link}"
    
    background_mark=""
    if do_background:
        background_mark="&"
    wget_cmd=f"wget -O {abs_path} {link} {background_mark}"
    print(wget_cmd)
    if do_download:
        os.system(wget_cmd)
# https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_25.bin

def getLinks(fromIdx=101,toIdxButNot=112,maxCnt=22):
    links=[]
    idx=fromIdx
    for i in range(maxCnt):
        if idx>=toIdxButNot:
            break
        link=f"https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_{idx}.bin"
        idx+=1
        links.append(link)
    return links


# links=getLinks(fromIdx=101,toIdxButNot=112,maxCnt=22)

for link in links:
    pass
    parse_link(link=link)


# pp=path_get_parent_dir(["dada","hfuiahia"])
# D:\proj\python\st-util\wget_hug_midj.py
# print(pp)
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62
  • 63
  • 64
  • 65
  • 66
  • 67
  • 68
  • 69
  • 70
  • 71
  • 72
  • 73
  • 74
  • 75
  • 76
  • 77
  • 78
  • 79
  • 80
  • 81
  • 82
  • 83
  • 84
  • 85
  • 86
  • 87
  • 88
  • 89
  • 90
  • 91
  • 92
  • 93
  • 94
  • 95
  • 96
  • 97
  • 98
  • 99
  • 100
  • 101
  • 102
  • 103
  • 104
  • 105
  • 106
  • 107
  • 108
  • 109
  • 110
  • 111
  • 112
  • 113
  • 114
  • 115
  • 116
  • 117
  • 118
  • 119
  • 120
  • 121
  • 122
  • 123
  • 124
  • 125
  • 126
  • 127
  • 128
  • 129
  • 130
  • 131
  • 132
  • 133
  • 134
  • 135
  • 136
  • 137
  • 138
  • 139
  • 140
  • 141
  • 142
  • 143
  • 144
  • 145
  • 146
  • 147
  • 148
  • 149
  • 150
  • 151
  • 152
  • 153
  • 154
  • 155
  • 156
  • 157
  • 158
  • 159
  • 160
  • 161
  • 162
  • 163
  • 164
  • 165
  • 166
  • 167
  • 168
  • 169
  • 170
  • 171
  • 172
  • 173
  • 174
  • 175
  • 176
  • 177
  • 178
  • 179
  • 180
  • 181
  • 182
  • 183
  • 184
  • 185
  • 186
  • 187
  • 188
  • 189
  • 190
  • 191
  • 192
  • 193
  • 194
  • 195
  • 196
  • 197
  • 198
  • 199
  • 200
  • 201
  • 202
  • 203
  • 204
  • 205
  • 206
  • 207
  • 208
  • 209
  • 210
  • 211
  • 212
  • 213
  • 214
  • 215
  • 216
  • 217
  • 218
  • 219
  • 220
  • 221
  • 222
  • 223
  • 224
  • 225
  • 226
  • 227
  • 228
  • 229
  • 230
  • 231
  • 232
  • 233
  • 234
  • 235
  • 236
  • 237
  • 238
  • 239
  • 240
  • 241
  • 242
  • 243
  • 244
  • 245
  • 246
  • 247
  • 248
  • 249
  • 250
  • 251
  • 252
  • 253
  • 254
  • 255
  • 256
  • 257
  • 258
  • 259
  • 260
  • 261
  • 262
  • 263
  • 264
  • 265
  • 266
  • 267
  • 268
  • 269
  • 270
  • 271
  • 272
  • 273
  • 274
  • 275
  • 276
  • 277
  • 278
  • 279
  • 280
  • 281
  • 282
  • 283
  • 284
  • 285
  • 286
  • 287
  • 288
  • 289
  • 290
  • 291
  • 292
  • 293
  • 294
  • 295
  • 296
  • 297
  • 298
  • 299
  • 300
  • 301
  • 302
声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/知新_RL/article/detail/723864
推荐阅读
相关标签
  

闽ICP备14008679号