Kaichengalex commited on
Commit
275b732
·
verified ·
1 Parent(s): 9b94de3

Upload hfd.sh

Browse files
Files changed (1) hide show
  1. hfd.sh +329 -0
hfd.sh ADDED
@@ -0,0 +1,329 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copy From MemAgent: https://github.com/BytedTsinghua-SIA/MemAgent/blob/main/hfd.sh
2
+ # Color definitions
3
+ export HF_ENDPOINT=https://hf-mirror.com
4
+ RED='\033[0;31m'; GREEN='\033[0;32m'; YELLOW='\033[1;33m'; NC='\033[0m' # No Color
5
+
6
+ trap 'printf "${YELLOW}\nDownload interrupted. You can resume by re-running the command.\n${NC}"; exit 1' INT
7
+
8
+ display_help() {
9
+ cat << EOF
10
+ Usage:
11
+ hfd <REPO_ID> [--include include_pattern1 include_pattern2 ...] [--exclude exclude_pattern1 exclude_pattern2 ...] [--hf_username username] [--hf_token token] [--tool aria2c|wget] [-x threads] [-j jobs] [--dataset] [--local-dir path] [--revision rev]
12
+
13
+ Description:
14
+ Downloads a model or dataset from Hugging Face using the provided repo ID.
15
+
16
+ Arguments:
17
+ REPO_ID The Hugging Face repo ID (Required)
18
+ Format: 'org_name/repo_name' or legacy format (e.g., gpt2)
19
+ Options:
20
+ include/exclude_pattern The patterns to match against file path, supports wildcard characters.
21
+ e.g., '--exclude *.safetensor *.md', '--include vae/*'.
22
+ --include (Optional) Patterns to include files for downloading (supports multiple patterns).
23
+ --exclude (Optional) Patterns to exclude files from downloading (supports multiple patterns).
24
+ --hf_username (Optional) Hugging Face username for authentication (not email).
25
+ --hf_token (Optional) Hugging Face token for authentication.
26
+ --tool (Optional) Download tool to use: aria2c (default) or wget.
27
+ -x (Optional) Number of download threads for aria2c (default: 4).
28
+ -j (Optional) Number of concurrent downloads for aria2c (default: 5).
29
+ --dataset (Optional) Flag to indicate downloading a dataset.
30
+ --local-dir (Optional) Directory path to store the downloaded data.
31
+ Defaults to the current directory with a subdirectory named 'repo_name'
32
+ if REPO_ID is is composed of 'org_name/repo_name'.
33
+ --revision (Optional) Model/Dataset revision to download (default: main).
34
+
35
+ Example:
36
+ hfd gpt2
37
+ hfd bigscience/bloom-560m --exclude *.safetensors
38
+ hfd meta-llama/Llama-2-7b --hf_username myuser --hf_token mytoken -x 4
39
+ hfd lavita/medical-qa-shared-task-v1-toy --dataset
40
+ hfd bartowski/Phi-3.5-mini-instruct-exl2 --revision 5_0
41
+ EOF
42
+ exit 1
43
+ }
44
+
45
+ [[ -z "$1" || "$1" =~ ^-h || "$1" =~ ^--help ]] && display_help
46
+
47
+ REPO_ID=$1
48
+ shift
49
+
50
+ # Default values
51
+ TOOL="aria2c"
52
+ THREADS=4
53
+ CONCURRENT=5
54
+ HF_ENDPOINT=${HF_ENDPOINT:-"https://huggingface.co"}
55
+ INCLUDE_PATTERNS=()
56
+ EXCLUDE_PATTERNS=()
57
+ REVISION="main"
58
+
59
+ validate_number() {
60
+ [[ "$2" =~ ^[1-9][0-9]*$ && "$2" -le "$3" ]] || { printf "${RED}[Error] $1 must be 1-$3${NC}\n"; exit 1; }
61
+ }
62
+
63
+ # Argument parsing
64
+ while [[ $# -gt 0 ]]; do
65
+ case $1 in
66
+ --include) shift; while [[ $# -gt 0 && ! ($1 =~ ^--) && ! ($1 =~ ^-[^-]) ]]; do INCLUDE_PATTERNS+=("$1"); shift; done ;;
67
+ --exclude) shift; while [[ $# -gt 0 && ! ($1 =~ ^--) && ! ($1 =~ ^-[^-]) ]]; do EXCLUDE_PATTERNS+=("$1"); shift; done ;;
68
+ --hf_username) HF_USERNAME="$2"; shift 2 ;;
69
+ --hf_token) HF_TOKEN="$2"; shift 2 ;;
70
+ --tool)
71
+ case $2 in
72
+ aria2c|wget)
73
+ TOOL="$2"
74
+ ;;
75
+ *)
76
+ printf "%b[Error] Invalid tool. Use 'aria2c' or 'wget'.%b\n" "$RED" "$NC"
77
+ exit 1
78
+ ;;
79
+ esac
80
+ shift 2
81
+ ;;
82
+ -x) validate_number "threads (-x)" "$2" 10; THREADS="$2"; shift 2 ;;
83
+ -j) validate_number "concurrent downloads (-j)" "$2" 10; CONCURRENT="$2"; shift 2 ;;
84
+ --dataset) DATASET=1; shift ;;
85
+ --local-dir) LOCAL_DIR="$2"; shift 2 ;;
86
+ --revision) REVISION="$2"; shift 2 ;;
87
+ *) display_help ;;
88
+ esac
89
+ done
90
+
91
+ # Generate current command string
92
+ generate_command_string() {
93
+ local cmd_string="REPO_ID=$REPO_ID"
94
+ cmd_string+=" TOOL=$TOOL"
95
+ cmd_string+=" INCLUDE_PATTERNS=${INCLUDE_PATTERNS[*]}"
96
+ cmd_string+=" EXCLUDE_PATTERNS=${EXCLUDE_PATTERNS[*]}"
97
+ cmd_string+=" DATASET=${DATASET:-0}"
98
+ cmd_string+=" HF_USERNAME=${HF_USERNAME:-}"
99
+ cmd_string+=" HF_TOKEN=${HF_TOKEN:-}"
100
+ cmd_string+=" HF_TOKEN=${HF_ENDPOINT:-}"
101
+ cmd_string+=" REVISION=$REVISION"
102
+ echo "$cmd_string"
103
+ }
104
+
105
+ # Check if aria2, wget, curl are installed
106
+ check_command() {
107
+ if ! command -v $1 &>/dev/null; then
108
+ printf "%b%s is not installed. Please install it first.%b\n" "$RED" "$1" "$NC"
109
+ exit 1
110
+ fi
111
+ }
112
+
113
+ check_command curl; check_command "$TOOL"
114
+
115
+ LOCAL_DIR="${LOCAL_DIR:-${REPO_ID#*/}}"
116
+ mkdir -p "$LOCAL_DIR/.hfd"
117
+
118
+ if [[ "$DATASET" == 1 ]]; then
119
+ METADATA_API_PATH="datasets/$REPO_ID"
120
+ DOWNLOAD_API_PATH="datasets/$REPO_ID"
121
+ CUT_DIRS=5
122
+ else
123
+ METADATA_API_PATH="models/$REPO_ID"
124
+ DOWNLOAD_API_PATH="$REPO_ID"
125
+ CUT_DIRS=4
126
+ fi
127
+
128
+ # Modify API URL, construct based on revision
129
+ if [[ "$REVISION" != "main" ]]; then
130
+ METADATA_API_PATH="$METADATA_API_PATH/revision/$REVISION"
131
+ fi
132
+ API_URL="$HF_ENDPOINT/api/$METADATA_API_PATH"
133
+
134
+ METADATA_FILE="$LOCAL_DIR/.hfd/repo_metadata.json"
135
+
136
+ # Fetch and save metadata
137
+ fetch_and_save_metadata() {
138
+ status_code=$(curl -L -s -w "%{http_code}" -o "$METADATA_FILE" ${HF_TOKEN:+-H "Authorization: Bearer $HF_TOKEN"} "$API_URL")
139
+ RESPONSE=$(cat "$METADATA_FILE")
140
+ if [ "$status_code" -eq 200 ]; then
141
+ printf "%s\n" "$RESPONSE"
142
+ else
143
+ printf "%b[Error] Failed to fetch metadata from $API_URL. HTTP status code: $status_code.%b\n$RESPONSE\n" "${RED}" "${NC}" >&2
144
+ rm $METADATA_FILE
145
+ exit 1
146
+ fi
147
+ }
148
+
149
+ check_authentication() {
150
+ local response="$1"
151
+ if command -v jq &>/dev/null; then
152
+ local gated
153
+ gated=$(echo "$response" | jq -r '.gated // false')
154
+ if [[ "$gated" != "false" && ( -z "$HF_TOKEN" || -z "$HF_USERNAME" ) ]]; then
155
+ printf "${RED}The repository requires authentication, but --hf_username and --hf_token is not passed. Please get token from https://huggingface.co/settings/tokens.\nExiting.\n${NC}"
156
+ exit 1
157
+ fi
158
+ else
159
+ if echo "$response" | grep -q '"gated":[^f]' && [[ -z "$HF_TOKEN" || -z "$HF_USERNAME" ]]; then
160
+ printf "${RED}The repository requires authentication, but --hf_username and --hf_token is not passed. Please get token from https://huggingface.co/settings/tokens.\nExiting.\n${NC}"
161
+ exit 1
162
+ fi
163
+ fi
164
+ }
165
+
166
+ if [[ ! -f "$METADATA_FILE" ]]; then
167
+ printf "%bFetching repo metadata...%b\n" "$YELLOW" "$NC"
168
+ RESPONSE=$(fetch_and_save_metadata) || exit 1
169
+ check_authentication "$RESPONSE"
170
+ else
171
+ printf "%bUsing cached metadata: $METADATA_FILE%b\n" "$GREEN" "$NC"
172
+ RESPONSE=$(cat "$METADATA_FILE")
173
+ check_authentication "$RESPONSE"
174
+ fi
175
+
176
+ should_regenerate_filelist() {
177
+ local command_file="$LOCAL_DIR/.hfd/last_download_command"
178
+ local current_command=$(generate_command_string)
179
+
180
+ # If file list doesn't exist, regenerate
181
+ if [[ ! -f "$LOCAL_DIR/$fileslist_file" ]]; then
182
+ echo "$current_command" > "$command_file"
183
+ return 0
184
+ fi
185
+
186
+ # If command file doesn't exist, regenerate
187
+ if [[ ! -f "$command_file" ]]; then
188
+ echo "$current_command" > "$command_file"
189
+ return 0
190
+ fi
191
+
192
+ # Compare current command with saved command
193
+ local saved_command=$(cat "$command_file")
194
+ if [[ "$current_command" != "$saved_command" ]]; then
195
+ echo "$current_command" > "$command_file"
196
+ return 0
197
+ fi
198
+
199
+ return 1
200
+ }
201
+
202
+ fileslist_file=".hfd/${TOOL}_urls.txt"
203
+
204
+ if should_regenerate_filelist; then
205
+ # Remove existing file list if it exists
206
+ [[ -f "$LOCAL_DIR/$fileslist_file" ]] && rm "$LOCAL_DIR/$fileslist_file"
207
+
208
+ printf "%bGenerating file list...%b\n" "$YELLOW" "$NC"
209
+
210
+ # Convert include and exclude patterns to regex
211
+ INCLUDE_REGEX=""
212
+ EXCLUDE_REGEX=""
213
+ if ((${#INCLUDE_PATTERNS[@]})); then
214
+ INCLUDE_REGEX=$(printf '%s\n' "${INCLUDE_PATTERNS[@]}" | sed 's/\./\\./g; s/\*/.*/g' | paste -sd '|' -)
215
+ fi
216
+ if ((${#EXCLUDE_PATTERNS[@]})); then
217
+ EXCLUDE_REGEX=$(printf '%s\n' "${EXCLUDE_PATTERNS[@]}" | sed 's/\./\\./g; s/\*/.*/g' | paste -sd '|' -)
218
+ fi
219
+
220
+ # Check if jq is available
221
+ if command -v jq &>/dev/null; then
222
+ process_with_jq() {
223
+ if [[ "$TOOL" == "aria2c" ]]; then
224
+ printf "%s" "$RESPONSE" | jq -r \
225
+ --arg endpoint "$HF_ENDPOINT" \
226
+ --arg repo_id "$DOWNLOAD_API_PATH" \
227
+ --arg token "$HF_TOKEN" \
228
+ --arg include_regex "$INCLUDE_REGEX" \
229
+ --arg exclude_regex "$EXCLUDE_REGEX" \
230
+ --arg revision "$REVISION" \
231
+ '
232
+ .siblings[]
233
+ | select(
234
+ .rfilename != null
235
+ and ($include_regex == "" or (.rfilename | test($include_regex)))
236
+ and ($exclude_regex == "" or (.rfilename | test($exclude_regex) | not))
237
+ )
238
+ | [
239
+ ($endpoint + "/" + $repo_id + "/resolve/" + $revision + "/" + .rfilename),
240
+ " dir=" + (.rfilename | split("/")[:-1] | join("/")),
241
+ " out=" + (.rfilename | split("/")[-1]),
242
+ if $token != "" then " header=Authorization: Bearer " + $token else empty end,
243
+ ""
244
+ ]
245
+ | join("\n")
246
+ '
247
+ else
248
+ printf "%s" "$RESPONSE" | jq -r \
249
+ --arg endpoint "$HF_ENDPOINT" \
250
+ --arg repo_id "$DOWNLOAD_API_PATH" \
251
+ --arg include_regex "$INCLUDE_REGEX" \
252
+ --arg exclude_regex "$EXCLUDE_REGEX" \
253
+ --arg revision "$REVISION" \
254
+ '
255
+ .siblings[]
256
+ | select(
257
+ .rfilename != null
258
+ and ($include_regex == "" or (.rfilename | test($include_regex)))
259
+ and ($exclude_regex == "" or (.rfilename | test($exclude_regex) | not))
260
+ )
261
+ | ($endpoint + "/" + $repo_id + "/resolve/" + $revision + "/" + .rfilename)
262
+ '
263
+ fi
264
+ }
265
+ result=$(process_with_jq)
266
+ printf "%s\n" "$result" > "$LOCAL_DIR/$fileslist_file"
267
+ else
268
+ printf "%b[Warning] jq not installed, using grep/awk for metadata json parsing (slower). Consider installing jq for better parsing performance.%b\n" "$YELLOW" "$NC"
269
+ process_with_grep_awk() {
270
+ local include_pattern=""
271
+ local exclude_pattern=""
272
+ local output=""
273
+
274
+ if ((${#INCLUDE_PATTERNS[@]})); then
275
+ include_pattern=$(printf '%s\n' "${INCLUDE_PATTERNS[@]}" | sed 's/\./\\./g; s/\*/.*/g' | paste -sd '|' -)
276
+ fi
277
+ if ((${#EXCLUDE_PATTERNS[@]})); then
278
+ exclude_pattern=$(printf '%s\n' "${EXCLUDE_PATTERNS[@]}" | sed 's/\./\\./g; s/\*/.*/g' | paste -sd '|' -)
279
+ fi
280
+
281
+ local files=$(printf '%s' "$RESPONSE" | grep -o '"rfilename":"[^"]*"' | awk -F'"' '{print $4}')
282
+
283
+ if [[ -n "$include_pattern" ]]; then
284
+ files=$(printf '%s\n' "$files" | grep -E "$include_pattern")
285
+ fi
286
+ if [[ -n "$exclude_pattern" ]]; then
287
+ files=$(printf '%s\n' "$files" | grep -vE "$exclude_pattern")
288
+ fi
289
+
290
+ while IFS= read -r file; do
291
+ if [[ -n "$file" ]]; then
292
+ if [[ "$TOOL" == "aria2c" ]]; then
293
+ output+="$HF_ENDPOINT/$DOWNLOAD_API_PATH/resolve/$REVISION/$file"$'\n'
294
+ output+=" dir=$(dirname "$file")"$'\n'
295
+ output+=" out=$(basename "$file")"$'\n'
296
+ [[ -n "$HF_TOKEN" ]] && output+=" header=Authorization: Bearer $HF_TOKEN"$'\n'
297
+ output+=$'\n'
298
+ else
299
+ output+="$HF_ENDPOINT/$DOWNLOAD_API_PATH/resolve/$REVISION/$file"$'\n'
300
+ fi
301
+ fi
302
+ done <<< "$files"
303
+
304
+ printf '%s' "$output"
305
+ }
306
+
307
+ result=$(process_with_grep_awk)
308
+ printf "%s\n" "$result" > "$LOCAL_DIR/$fileslist_file"
309
+ fi
310
+ else
311
+ printf "%bResume from file list: $LOCAL_DIR/$fileslist_file%b\n" "$GREEN" "$NC"
312
+ fi
313
+
314
+ # Perform download
315
+ printf "${YELLOW}Starting download with $TOOL to $LOCAL_DIR...\n${NC}"
316
+
317
+ cd "$LOCAL_DIR"
318
+ if [[ "$TOOL" == "aria2c" ]]; then
319
+ aria2c --console-log-level=error --file-allocation=none -x "$THREADS" -j "$CONCURRENT" -s "$THREADS" -k 1M -c -i "$fileslist_file" --save-session="$fileslist_file"
320
+ elif [[ "$TOOL" == "wget" ]]; then
321
+ wget -x -nH --cut-dirs="$CUT_DIRS" ${HF_TOKEN:+--header="Authorization: Bearer $HF_TOKEN"} --input-file="$fileslist_file" --continue
322
+ fi
323
+
324
+ if [[ $? -eq 0 ]]; then
325
+ printf "${GREEN}Download completed successfully. Repo directory: $PWD\n${NC}"
326
+ else
327
+ printf "${RED}Download encountered errors.\n${NC}"
328
+ exit 1
329
+ fi