赞
踩
wget hugging face 模型 cmd 命令生成
文档:wget hugging face 模型 cmd 命令生成…
链接:http://note.youdao.com/noteshare?id=294a8d1fab3afc774f5171a55659bcb1&sub=FB29F039A23B4055944D325DFDDCA19D
添加链接描述
links_anything_v3=[ "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/unet/config.json", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/unet/diffusion_pytorch_model.bin", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/vae/config.json", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/vae/diffusion_pytorch_model.bin", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/tokenizer/merges.txt", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/tokenizer/special_tokens_map.json", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/tokenizer/tokenizer_config.json", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/tokenizer/vocab.json", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/text_encoder/config.json", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/text_encoder/pytorch_model.bin", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/scheduler/scheduler_config.json", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/safety_checker/config.json", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/safety_checker/pytorch_model.bin", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/feature_extractor/preprocessor_config.json", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/.gitattributes", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/README.md", # "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/anything-v3-fp16-pruned.safetensors", # "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/anything-v3-fp32-pruned.safetensors", # "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/anything-v3-full.safetensors", "https://huggingface.co/Linaqruf/anything-v3.0/resolve/main/model_index.json", ] links_openjourney=[ "https://huggingface.co/prompthero/openjourney/resolve/main/.gitattributes", "https://huggingface.co/prompthero/openjourney/resolve/main/README.md", "https://huggingface.co/prompthero/openjourney/resolve/main/mdjrny-v4.ckpt", "https://huggingface.co/prompthero/openjourney/resolve/main/mdjrny-v4.safetensors", "https://huggingface.co/prompthero/openjourney/resolve/main/model.safetensors", "https://huggingface.co/prompthero/openjourney/resolve/main/model_index.json", "https://huggingface.co/prompthero/openjourney/resolve/main/vae/config.json", "https://huggingface.co/prompthero/openjourney/resolve/main/vae/diffusion_pytorch_model.bin", "https://huggingface.co/prompthero/openjourney/resolve/main/vae/diffusion_pytorch_model.safetensors", "https://huggingface.co/prompthero/openjourney/resolve/main/unet/config.json", "https://huggingface.co/prompthero/openjourney/resolve/main/unet/diffusion_pytorch_model.bin", "https://huggingface.co/prompthero/openjourney/resolve/main/unet/diffusion_pytorch_model.safetensors", "https://huggingface.co/prompthero/openjourney/resolve/main/tokenizer/merges.txt", "https://huggingface.co/prompthero/openjourney/resolve/main/tokenizer/special_tokens_map.json", "https://huggingface.co/prompthero/openjourney/resolve/main/tokenizer/tokenizer_config.json", "https://huggingface.co/prompthero/openjourney/resolve/main/tokenizer/vocab.json", "https://huggingface.co/prompthero/openjourney/resolve/main/text_encoder/config.json", "https://huggingface.co/prompthero/openjourney/resolve/main/text_encoder/model.safetensors", "https://huggingface.co/prompthero/openjourney/resolve/main/text_encoder/pytorch_model.bin", "https://huggingface.co/prompthero/openjourney/resolve/main/scheduler/scheduler_config.json", "https://huggingface.co/prompthero/openjourney/resolve/main/safety_checker/config.json", "https://huggingface.co/prompthero/openjourney/resolve/main/safety_checker/model.safetensors", "https://huggingface.co/prompthero/openjourney/resolve/main/safety_checker/pytorch_model.bin", "https://huggingface.co/prompthero/openjourney/resolve/main/feature_extractor/preprocessor_config.json", ] links_llama=[ "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/.gitattributes", "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/README.md", "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/config.json", "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/generation_config.json", "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/pytorch_model-00001-of-00002.bin", "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/pytorch_model-00002-of-00002.bin", "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/pytorch_model.bin.index.json", "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/special_tokens_map.json", "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/tokenizer.json", "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/tokenizer.model", "https://huggingface.co/daryl149/llama-2-7b-chat-hf/resolve/main/tokenizer_config.json" ] links=[ "https://huggingface.co/openai/whisper-large-v2/resolve/main/.gitattributes", "https://huggingface.co/openai/whisper-large-v2/resolve/main/README.md", "https://huggingface.co/openai/whisper-large-v2/resolve/main/added_tokens.json", "https://huggingface.co/openai/whisper-large-v2/resolve/main/config.json", "https://huggingface.co/openai/whisper-large-v2/resolve/main/flax_model.msgpack", "https://huggingface.co/openai/whisper-large-v2/resolve/main/generation_config.json", "https://huggingface.co/openai/whisper-large-v2/resolve/main/merges.txt", "https://huggingface.co/openai/whisper-large-v2/resolve/main/normalizer.json", "https://huggingface.co/openai/whisper-large-v2/resolve/main/preprocessor_config.json", "https://huggingface.co/openai/whisper-large-v2/resolve/main/pytorch_model.bin", "https://huggingface.co/openai/whisper-large-v2/resolve/main/special_tokens_map.json", "https://huggingface.co/openai/whisper-large-v2/resolve/main/tf_model.h5", "https://huggingface.co/openai/whisper-large-v2/resolve/main/tokenizer.json", "https://huggingface.co/openai/whisper-large-v2/resolve/main/tokenizer_config.json", "https://huggingface.co/openai/whisper-large-v2/resolve/main/vocab.json" ] links=links_llama links=[ "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/.gitattributes", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/added_tokens.json", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/mlc-chat-config.json", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/ndarray-cache.json", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_0.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_1.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_10.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_100.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_101.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_102.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_103.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_104.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_105.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_106.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_107.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_108.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_109.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_11.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_110.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_111.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_112.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_113.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_114.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_12.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_13.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_14.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_15.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_16.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_17.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_18.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_19.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_2.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_20.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_21.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_22.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_23.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_24.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_25.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_26.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_27.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_28.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_29.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_3.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_30.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_31.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_32.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_33.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_34.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_35.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_36.bin" ] links=[ # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/.gitattributes", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/added_tokens.json", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/mlc-chat-config.json", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/ndarray-cache.json", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_0.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_1.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_10.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_100.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_101.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_102.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_103.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_104.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_105.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_106.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_107.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_108.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_109.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_11.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_110.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_111.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_112.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_113.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_114.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_12.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_13.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_14.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_15.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_16.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_17.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_18.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_19.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_2.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_20.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_21.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_22.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_23.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_24.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_25.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_26.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_27.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_28.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_29.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_3.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_30.bin", "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_31.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_32.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_33.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_34.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_35.bin", # "https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_36.bin" ] links=[ "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/.gitattributes", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/LICENSE", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/Notice", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/README.md", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/USE_POLICY.md", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/config.json", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q2_K.bin", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q3_K_L.bin", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q3_K_M.bin", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q3_K_S.bin", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q4_0.bin", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q4_1.bin", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q4_K_M.bin", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q4_K_S.bin", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q5_0.bin", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q5_1.bin", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q5_K_M.bin", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q5_K_S.bin", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q6_K.bin", "https://huggingface.co/TheBloke/Llama-2-13B-chat-GGML/resolve/main/llama-2-13b-chat.ggmlv3.q8_0.bin" ] # repo_base_dir="/huggingface.co" # repo_base_dir="" # repo_base_dir="/kaggle/working/" # preffix_dir=rf"G:\file" preffix_dir=rf"E:\addd\model" # repo_base_dir="/j05025/model" # G:\file repo_base_dir=f"{preffix_dir}/j05025/model" def path_get(lst:list): # path="" # lst. path="/".join(lst) # for i in lst: # path+=i+"/" return path def path_get_parent_dir(lst:list): path="/".join(lst[:-1]) return path from top.starp.util import file_util # file_util.check_and_create_file_parent_dir() def obj_name(): return " -O " # obj_name=' -O ' def file_path(): return " -P " import os do_background=True do_download=False # do_download=False def parse_link(link): pass parts=link.split("/") username=parts[3] repoName=parts[4] fileName=parts[7:] # print(username,repoName) fileNamePath=path_get(fileName) # abs_path=os.path.join(repo_base_dir,fileNamePath) # abs_path abs_path=path_get([repo_base_dir,username,repoName,fileNamePath]) # abs_path=os.path.join(repo_base_dir,username,repoName,fileNamePath) # abs_path=os.path.join(repo_base_dir,username,repoName,fileNamePath) file_util.check_and_create_file_parent_dir(abs_path) # print("fileName",fileName) # print("abs_path",abs_path) # wget_cmd=f"wget -P {abs_path} {link}" background_mark="" if do_background: background_mark="&" wget_cmd=f"wget -O {abs_path} {link} {background_mark}" print(wget_cmd) if do_download: os.system(wget_cmd) # https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_25.bin def getLinks(fromIdx=101,toIdxButNot=112,maxCnt=22): links=[] idx=fromIdx for i in range(maxCnt): if idx>=toIdxButNot: break link=f"https://huggingface.co/mlc-ai/mlc-chat-Llama-2-7b-chat-hf-q4f16_1/resolve/main/params_shard_{idx}.bin" idx+=1 links.append(link) return links # links=getLinks(fromIdx=101,toIdxButNot=112,maxCnt=22) for link in links: pass parse_link(link=link) # pp=path_get_parent_dir(["dada","hfuiahia"]) # D:\proj\python\st-util\wget_hug_midj.py # print(pp)
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。