Skip to content

Commit

Permalink
Ask users to provide huggingface token if no token cached and passed …
Browse files Browse the repository at this point in the history
…to the program. (#81)

Ask users to provide huggingface token if no token cached and passed to the program.
  • Loading branch information
guoqingbao authored Aug 14, 2024
1 parent 4d1001a commit 2876f34
Showing 1 changed file with 24 additions and 1 deletion.
25 changes: 24 additions & 1 deletion src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,30 @@ async fn main() -> Result<(), APIError> {
safetensors_files
},
}),
_ => loader.download_model(model_id, None, args.hf_token, args.hf_token_path)?,
_ => {
if args.hf_token.is_none() && args.hf_token_path.is_none() {
//no token provided
let token_path = format!(
"{}/.cache/huggingface/token",
dirs::home_dir()
.ok_or(APIError::new_str("No home directory"))?
.display()
);
if !Path::new(&token_path).exists() {
//also no token cache
use std::io::Write;
let mut input_token = String::new();
println!("Please provide your huggingface token to download model:\n");
std::io::stdin()
.read_line(&mut input_token)
.expect("Failed to read token!");
std::fs::create_dir_all(Path::new(&token_path).parent().unwrap()).unwrap();
let mut output = std::fs::File::create(token_path).unwrap();
write!(output, "{}", input_token.trim()).expect("Failed to save token!");
}
}
loader.download_model(model_id, None, args.hf_token, args.hf_token_path)?
}
};

let dtype = match args.dtype.as_deref() {
Expand Down

0 comments on commit 2876f34

Please sign in to comment.