-
Notifications
You must be signed in to change notification settings - Fork 5
Cria comando para realizar download de modelo IA através do huggingface #26
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from all commits
4c98845
7fcb03e
050297e
b0f1a31
a32aadf
a4161d8
5de0daf
71c0a4b
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
This file was deleted.
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,48 @@ | ||
| import os | ||
| from pathlib import Path | ||
|
|
||
| from django.core.management.base import BaseCommand, CommandError | ||
| from huggingface_hub import hf_hub_download, login | ||
|
|
||
|
|
||
| class Command(BaseCommand): | ||
| help = "Download the model from HuggingFace" | ||
|
|
||
| def add_arguments(self, parser): | ||
| parser.add_argument( | ||
| "--dir", | ||
| type=str, | ||
| default="llama3/llama-3.2", | ||
| help="Directory to download the model", | ||
| ) | ||
| parser.add_argument( | ||
| "--repo", | ||
| type=str, | ||
| default="hugging-quants/Llama-3.2-3B-Instruct-Q4_K_M-GGUF", | ||
|
||
| ) | ||
| parser.add_argument( | ||
| "--filename", | ||
| type=str, | ||
| default="llama-3.2-3b-instruct-q4_k_m.gguf", | ||
|
||
| help="Model name", | ||
| ) | ||
| parser.add_argument("--force", action="store_true", help="Force download") | ||
|
|
||
| def handle(self, *args, **options): | ||
| token = os.getenv("HF_TOKEN") | ||
| if not token: | ||
| raise CommandError("You need to set the HF_TOKEN environment variable") | ||
| login(token=token, add_to_git_credential=False) | ||
|
|
||
| target_dir = Path(options["dir"]) | ||
| target_dir.mkdir(parents=True, exist_ok=True) | ||
|
|
||
| downloaded_file = hf_hub_download( | ||
| repo_id=options["repo"], | ||
| filename=options["filename"], | ||
| local_dir=str(target_dir), | ||
| local_dir_use_symlinks=False, | ||
| force_download=options["force"], | ||
| resume_download=True, | ||
| ) | ||
| self.stdout.write(self.style.SUCCESS(f"Downloaded {downloaded_file}")) | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The hardcoded default path 'llama3/llama-3.2' should reference the Django setting LLAMA_MODEL_DIR to maintain consistency with the configuration defined in settings/base.py.