comparison bin/moefetch @ 400:657f787fbae8

Merge remote-tracking branch 'origin/master'
author Edho Arief <edho@myconan.net>
date Tue, 04 Sep 2012 10:23:37 +0700
parents 38c7615caf9e
children
comparison
equal deleted inserted replaced
395:c01d272af466 400:657f787fbae8
40 set -u 40 set -u
41 useragent="Mozilla/5.0 (Windows NT 6.1; WOW64; rv:10.0) Gecko/20100101 Firefox/10.0" 41 useragent="Mozilla/5.0 (Windows NT 6.1; WOW64; rv:10.0) Gecko/20100101 Firefox/10.0"
42 42
43 # useless welcome message. Also version 43 # useless welcome message. Also version
44 msg_welcome() { 44 msg_welcome() {
45 echo "moefetch ${_version} 45 echo "moefetch ${_version}
46 Copyright (c) 2009-2012 edogawaconan <edho@myconan.net> 46 Copyright (c) 2009-2012 edogawaconan <edho@myconan.net>
47 " 47 "
48 } 48 }
49 49
50 # Sanitize path. Totally safe. Usage: cmd "$(safe_path "${filename}")" 50 # Sanitize path. Totally safe. Usage: cmd "$(safe_path "${filename}")"
51 safe_path() 51 safe_path()
52 { 52 {
53 # It all depends on the first character. 53 # It all depends on the first character.
54 start=$(printf "%s" "$*" | cut -c 1) 54 start=$(printf "%s" "$*" | cut -c 1)
55 path= 55 path=
56 case "${start}" in 56 case "${start}" in
57 .|/) path="$*";; # . and / is safe. No change. 57 .|/) path="$*";; # . and / is safe. No change.
58 *) path="./$*";; # Anything else must be prefixed with ./ 58 *) path="./$*";; # Anything else must be prefixed with ./
59 esac 59 esac
60 printf "%s" "${path}" # Return. 60 printf "%s" "${path}" # Return.
61 } 61 }
62 62
63 # Checks md5. OpenSSL should be available on anything usable. 63 # Checks md5. OpenSSL should be available on anything usable.
64 get_md5() { cat "$(safe_path "${1}")" | openssl dgst -md5 | tail -n 1 | sed -e 's/.*\([[:xdigit:]]\{32\}\).*/\1/'; } 64 get_md5() { cat "$(safe_path "${1}")" | openssl dgst -md5 | tail -n 1 | sed -e 's/.*\([[:xdigit:]]\{32\}\).*/\1/'; }
65 65
76 is_not_md5() { get_filename "$1" | sed -e 's/\([0-9a-f]\{32\}\)//g'; } 76 is_not_md5() { get_filename "$1" | sed -e 's/\([0-9a-f]\{32\}\)//g'; }
77 77
78 78
79 # fatal error handler 79 # fatal error handler
80 Err_Fatal() { 80 Err_Fatal() {
81 echo " 81 echo "
82 Fatal error: ${1}" 82 Fatal error: ${1}"
83 exit 1 83 exit 1
84 } 84 }
85 85
86 Err_Impossible() { 86 Err_Impossible() {
87 echo " 87 echo "
88 Impossible error. Or you modified content of the working directories when the script is running. 88 Impossible error. Or you modified content of the working directories when the script is running.
89 Please report to moefetch.googlecode.com if you see this message (complete with entire run log)" 89 Please report to moefetch.googlecode.com if you see this message (complete with entire run log)"
90 exit 1 90 exit 1
91 } 91 }
92 92
93 # help message 93 # help message
94 Err_Help() { 94 Err_Help() {
95 echo "moefetch.sh COMMAND [-n] [-p PASSWORD] [-s SITE_URL] [-u USERNAME] TAGS 95 echo "moefetch.sh COMMAND [-n] [-p PASSWORD] [-s SITE_URL] [-u USERNAME] TAGS
96 96
97 COMMAND: 97 COMMAND:
98 (quick)fetch: 98 (quick)fetch:
99 Do a complete update. Add prefix quick to skip file checking 99 Do a complete update. Add prefix quick to skip file checking
100 check: 100 check:
101 Get list of new files, clean up local folder and print total new files 101 Get list of new files, clean up local folder and print total new files
102 102
103 OPTIONS: 103 OPTIONS:
104 -n: 104 -n:
105 Skip checking repository directory. 105 Skip checking repository directory.
106 -p PASSWORD: 106 -p PASSWORD:
107 Specifies password for login. 107 Specifies password for login.
108 -s SITE_URL: 108 -s SITE_URL:
109 Specify URL of the Danbooru powered site you want to leech from. Default is ${DEFAULT_SITE}. 109 Specify URL of the Danbooru powered site you want to leech from. Default is ${DEFAULT_SITE}.
110 -u USERNAME: 110 -u USERNAME:
111 Specifies username for login. 111 Specifies username for login.
112 TAGS: 112 TAGS:
113 Tags you want to download. Separated by spaces. Tag name follows standard Danbooru tagging scheme." 113 Tags you want to download. Separated by spaces. Tag name follows standard Danbooru tagging scheme."
114 exit 2 114 exit 2
115 } 115 }
116 116
117 # generate link by transforming xml 117 # generate link by transforming xml
118 Generate_Link() { 118 Generate_Link() {
119 echo " 119 echo "
120 Fetching XML file" 120 Fetching XML file"
121 tempnum=1000 121 pagelimit=100
122 iternum=1 122 tempnum="${pagelimit}"
123 > "${TEMP_PREFIX}-list" 123 iternum=1
124 while [ "${tempnum}" -ge 1000 ]; do 124 > "${TEMP_PREFIX}-list"
125 url="${SITE}/post/index.xml?tags=$(get_cleantags "${TAGS}")&offset=0&limit=1000&page=${iternum}" 125 while [ "${tempnum}" -ge "${pagelimit}" ]; do
126 [ ${_use_login} -eq 1 ] && url="${url}&login=${LOGIN_USER}&password_hash=${LOGIN_PASS}" 126 url="${SITE}/post/index.xml?tags=$(get_cleantags "${TAGS}")&offset=0&limit=${pagelimit}&page=${iternum}"
127 wget --no-check-certificate --quiet "${url}" -O "${TEMP_PREFIX}-xml" --referer="${SITE}/post" --user-agent="${useragent}" -e continue=off || Err_Fatal "Failed download catalog file" 127 [ ${_use_login} -eq 1 ] && url="${url}&login=${LOGIN_USER}&password_hash=${LOGIN_PASS}"
128 printf "Processing XML file... " 128 wget --no-check-certificate --quiet "${url}" -O "${TEMP_PREFIX}-xml" --referer="${SITE}/post" --user-agent="${useragent}" -e continue=off || Err_Fatal "Failed download catalog file"
129 # xslt evilry 129 printf "Processing XML file... "
130 xsltproc - "${TEMP_PREFIX}-xml" <<EOF | sed 's/.*\(https*.*\)\(\/[a-f0-9]\{32\}\).*\.\([^\.]*\)/\1\2.\3/g' | grep ^http > "${TEMP_PREFIX}-templist" 130 # xslt evilry
131 xsltproc - "${TEMP_PREFIX}-xml" <<EOF | sed 's/.*\(https*.*\)\(\/[a-f0-9]\{32\}\).*\.\([^\.]*\)/\1\2.\3/g' | grep ^http > "${TEMP_PREFIX}-templist"
131 <xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0"> 132 <xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
132 <xsl:output method="xml" indent="yes"/> 133 <xsl:output method="xml" indent="yes"/>
133 <xsl:template match="post"> 134 <xsl:template match="post">
134 <xsl:value-of select="@file_url" /> 135 <xsl:value-of select="@file_url" />
135 </xsl:template> 136 </xsl:template>
136 </xsl:stylesheet> 137 </xsl:stylesheet>
137 EOF 138 EOF
138 tempnum=$(grep -c . "${TEMP_PREFIX}-templist") 139 tempnum=$(grep -c . "${TEMP_PREFIX}-templist")
139 iternum=$((iternum + 1)) 140 iternum=$((iternum + 1))
140 cat "${TEMP_PREFIX}-templist" >> "${TEMP_PREFIX}-list" 141 cat "${TEMP_PREFIX}-templist" >> "${TEMP_PREFIX}-list"
141 echo "${tempnum} file(s) available" 142 echo "${tempnum} file(s) available"
142 done 143 done
143 numfiles=$(grep -c . "${TEMP_PREFIX}-list") 144 numfiles=$(grep -c . "${TEMP_PREFIX}-list")
144 echo "${numfiles} file(s) available on server" 145 echo "${numfiles} file(s) available on server"
145 [ "${numfiles}" -gt 0 ] || Err_Fatal "Error in processing list or no files can be found with specified tag(s) or site." 146 [ "${numfiles}" -gt 0 ] || Err_Fatal "Error in processing list or no files can be found with specified tag(s) or site."
146 } 147 }
147 148
148 149
149 progress_init() { 150 progress_init() {
150 _last="-" 151 _last="-"
151 printf "${_last}" 152 printf "${_last}"
152 } 153 }
153 154
154 progress_anim() { 155 progress_anim() {
155 case "${_last}" in 156 case "${_last}" in
156 /) _last="-";; 157 /) _last="-";;
157 -) _last=\\;; 158 -) _last=\\;;
158 \\) _last=\|;; 159 \\) _last=\|;;
159 \|) _last="/";; 160 \|) _last="/";;
160 esac 161 esac
161 printf "\b${_last}" 162 printf "\b${_last}"
162 } 163 }
163 164
164 progress_done() { printf "\bdone\n"; } 165 progress_done() { printf "\bdone\n"; }
165 166
166 # getting rid of ls (as per suggestion) 167 # getting rid of ls (as per suggestion)
167 Count_Files() { 168 Count_Files() {
168 numfiles=0 169 numfiles=0
169 for dircontent in "${*}/"* "${*}/".*; do 170 for dircontent in "${*}/"* "${*}/".*; do
170 if [ -e "${dircontent}" ] && [ x"${dircontent}" != x"${*}/." ] && [ x"${dircontent}" != x"${*}/.." ]; then 171 if [ -e "${dircontent}" ] && [ x"${dircontent}" != x"${*}/." ] && [ x"${dircontent}" != x"${*}/.." ]; then
171 numfiles=$((numfiles + 1)) 172 numfiles=$((numfiles + 1))
172 fi 173 fi
173 done 174 done
174 echo $((numfiles)) 175 echo $((numfiles))
175 } 176 }
176 177
177 # check tools availability 178 # check tools availability
178 Check_Tools() { 179 Check_Tools() {
179 # verify all programs required do indeed exist 180 # verify all programs required do indeed exist
180 commands="cut sed wc wget xsltproc xargs rm mkdir chown comm grep date openssl" 181 commands="cut sed wc wget xsltproc xargs rm mkdir chown comm grep date openssl"
181 for cmd in ${commands} 182 for cmd in ${commands}
182 do 183 do
183 [ "$(command -v "${cmd}")" ] || Err_Fatal "${cmd} doesn't exist in ${PATH}" 184 [ "$(command -v "${cmd}")" ] || Err_Fatal "${cmd} doesn't exist in ${PATH}"
184 done 185 done
185 } 186 }
186 187
187 # verify required folders exist and writeable 188 # verify required folders exist and writeable
188 Check_Folders(){ 189 Check_Folders(){
189 [ -O "${BASE_DIR}" ] || Err_Fatal "You don't own ${BASE_DIR}. Please fix ${BASE_DIR} or run this script in your own directory." 190 [ -O "${BASE_DIR}" ] || Err_Fatal "You don't own ${BASE_DIR}. Please fix ${BASE_DIR} or run this script in your own directory."
190 for directory in temp trash deleted "${SITE_DIR}/${TARGET_DIR}"; do 191 for directory in temp trash deleted "${SITE_DIR}/${TARGET_DIR}"; do
191 if [ ! -d "${BASE_DIR}/${directory}" ]; then 192 if [ ! -d "${BASE_DIR}/${directory}" ]; then
192 mkdir -p "${BASE_DIR}/${directory}" || Err_Impossible 193 mkdir -p "${BASE_DIR}/${directory}" || Err_Impossible
193 fi 194 fi
194 if [ ! -O "${BASE_DIR}/${directory}" ]; then 195 if [ ! -O "${BASE_DIR}/${directory}" ]; then
195 echo "You don't own the ${BASE_DIR}/${directory}, applying globally writeable permission on it" 196 echo "You don't own the ${BASE_DIR}/${directory}, applying globally writeable permission on it"
196 chmod -R u=rwX,g=rwX,o=rwX "${BASE_DIR}/${directory}" || Err_Impossible 197 chmod -R u=rwX,g=rwX,o=rwX "${BASE_DIR}/${directory}" || Err_Impossible
197 fi 198 fi
198 done 199 done
199 [ "$(Count_Files "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}")" -eq 0 ] && ISNEW=1 200 [ "$(Count_Files "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}")" -eq 0 ] && ISNEW=1
200 for i in error ok list newlist templist; do 201 for i in error ok list newlist templist; do
201 touch "${TEMP_PREFIX}-${i}" || Fatal_Err "Error creating ${TEMP_PREFIX}-${i}. This shouldn't happen" 202 touch "${TEMP_PREFIX}-${i}" || Fatal_Err "Error creating ${TEMP_PREFIX}-${i}. This shouldn't happen"
202 done 203 done
203 # 204 #
204 } 205 }
205 206
206 # Do some cleanup 207 # Do some cleanup
207 Cleanup_Repository() { 208 Cleanup_Repository() {
208 # current dir: ${BASE_DIR}/${SITE_DIR}/${TARGET_DIR} 209 # current dir: ${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}
209 printf "Cleaning up repository folder... " 210 printf "Cleaning up repository folder... "
210 progress_init 211 progress_init
211 trash_dir="${BASE_DIR}/trash/${trash_dir}/$(date -u "+${SITE_DIR}-${TARGET_DIR}-%Y%m%d-%H.%M")" 212 trash_dir="${BASE_DIR}/trash/${trash_dir}/$(date -u "+${SITE_DIR}-${TARGET_DIR}-%Y%m%d-%H.%M")"
212 trashes="These files have been moved to ${trash_dir}:" 213 trashes="These files have been moved to ${trash_dir}:"
213 has_trash= 214 has_trash=
214 if [ ! -d "${trash_dir}" ]; then 215 if [ ! -d "${trash_dir}" ]; then
215 mkdir -p "${trash_dir}" || Err_Impossible 216 mkdir -p "${trash_dir}" || Err_Impossible
216 else 217 else
217 if [ ! -O "${trash_dir}" ]; then 218 if [ ! -O "${trash_dir}" ]; then
218 chmod -R u=rwX,g=rwX,o=rwX "${BASE_DIR}/${directory}" || Err_Impossible 219 chmod -R u=rwX,g=rwX,o=rwX "${BASE_DIR}/${directory}" || Err_Impossible
219 fi 220 fi
220 fi 221 fi
221 for trash in "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}/"* 222 for trash in "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}/"*
222 do 223 do
223 if [ -e "${trash}" ]; then 224 if [ -e "${trash}" ]; then
224 is_trash= 225 is_trash=
225 if [ -d "${trash}" ] || [ -n "$(is_not_md5 "${trash}")" ] || [ -z "$(grep "$(get_basename "${trash}")" "${TEMP_PREFIX}-list")" ]; then 226 if [ -d "${trash}" ] || [ -n "$(is_not_md5 "${trash}")" ] || [ -z "$(grep "$(get_basename "${trash}")" "${TEMP_PREFIX}-list")" ]; then
226 is_trash=1 227 is_trash=1
227 has_trash=1 228 has_trash=1
228 mv -f -- "${trash}" "${trash_dir}" || Err_Impossible 229 mv -f -- "${trash}" "${trash_dir}" || Err_Impossible
229 trashes="${trashes} 230 trashes="${trashes}
230 $(get_basename "${trash}")" 231 $(get_basename "${trash}")"
231 fi 232 fi
232 fi 233 fi
233 progress_anim 234 progress_anim
234 done 235 done
235 rmdir "${trash_dir}" 2>/dev/null 236 rmdir "${trash_dir}" 2>/dev/null
236 progress_done 237 progress_done
237 [ -n "${has_trash}" ] && echo "${trashes}" 238 [ -n "${has_trash}" ] && echo "${trashes}"
238 } 239 }
239 240
240 # check files correctness 241 # check files correctness
241 Check_Files() { 242 Check_Files() {
242 if [ ! -n "${ISNEW}" ]; then 243 if [ ! -n "${ISNEW}" ]; then
243 [ -z "${NOCLEAN}" ] && Cleanup_Repository 244 [ -z "${NOCLEAN}" ] && Cleanup_Repository
244 printf "Checking for errors... " 245 printf "Checking for errors... "
245 progress_init 246 progress_init
246 files_error="These files do not match its md5:" 247 files_error="These files do not match its md5:"
247 files_notdanbooru="These files are not checked:" 248 files_notdanbooru="These files are not checked:"
248 has_err_filename= 249 has_err_filename=
249 has_err_md5= 250 has_err_md5=
250 > "${TEMP_PREFIX}-error" 251 > "${TEMP_PREFIX}-error"
251 > "${TEMP_PREFIX}-ok" 252 > "${TEMP_PREFIX}-ok"
252 for file in "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}/"* 253 for file in "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}/"*
253 do 254 do
254 if [ -e "${file}" ]; then 255 if [ -e "${file}" ]; then
255 if [ -n "$(is_not_md5 "${file}")" ] || [ -d "${file}" ]; then 256 if [ -n "$(is_not_md5 "${file}")" ] || [ -d "${file}" ]; then
256 files_notdanbooru="${files_notdanbooru} 257 files_notdanbooru="${files_notdanbooru}
257 $(get_basename "${file}")" 258 $(get_basename "${file}")"
258 has_err_filename=1 259 has_err_filename=1
259 else 260 else
260 if [ "$(get_md5 "${file}")" = "$(get_filename "${file}")" ]; then 261 if [ "$(get_md5 "${file}")" = "$(get_filename "${file}")" ]; then
261 echo "$(get_basename "${file}")" >> "${TEMP_PREFIX}-ok" 262 echo "$(get_basename "${file}")" >> "${TEMP_PREFIX}-ok"
262 else 263 else
263 rm "${file}" || Err_Fatal "Error removing ${file}" 264 rm "${file}" || Err_Fatal "Error removing ${file}"
264 echo "$(get_basename "${file}")" >> "${TEMP_PREFIX}-error" 265 echo "$(get_basename "${file}")" >> "${TEMP_PREFIX}-error"
265 files_error="${files_error} 266 files_error="${files_error}
266 $(get_basename "${file}")" 267 $(get_basename "${file}")"
267 has_err_md5=1 268 has_err_md5=1
268 fi 269 fi
269 fi 270 fi
270 fi 271 fi
271 progress_anim 272 progress_anim
272 done 273 done
273 progress_done 274 progress_done
274 if [ ! -n "${has_err_md5}" ] && [ ! -n "${has_err_filename}" ]; then 275 if [ ! -n "${has_err_md5}" ] && [ ! -n "${has_err_filename}" ]; then
275 echo "All files OK" 276 echo "All files OK"
276 else 277 else
277 if [ -n "${has_err_md5}" ]; then 278 if [ -n "${has_err_md5}" ]; then
278 echo "${files_error}" 279 echo "${files_error}"
279 echo "$(grep -c . "${TEMP_PREFIX}-error") file(s) removed" 280 echo "$(grep -c . "${TEMP_PREFIX}-error") file(s) removed"
280 fi 281 fi
281 [ -n "${has_err_filename}" ] && echo "${files_notdanbooru}" 282 [ -n "${has_err_filename}" ] && echo "${files_notdanbooru}"
282 fi 283 fi
283 echo "$(grep -c . "${TEMP_PREFIX}-ok") file(s) available locally" 284 echo "$(grep -c . "${TEMP_PREFIX}-ok") file(s) available locally"
284 285
285 printf "Generating list of new files... " 286 printf "Generating list of new files... "
286 progress_init 287 progress_init
287 cp -f "${TEMP_PREFIX}-list" "${TEMP_PREFIX}-templist" 288 cp -f "${TEMP_PREFIX}-list" "${TEMP_PREFIX}-templist"
288 while read -r is_ok; do 289 while read -r is_ok; do
289 grep -v "${is_ok}" "${TEMP_PREFIX}-templist" > "${TEMP_PREFIX}-newlist" 290 grep -v "${is_ok}" "${TEMP_PREFIX}-templist" > "${TEMP_PREFIX}-newlist"
290 cp -f "${TEMP_PREFIX}-newlist" "${TEMP_PREFIX}-templist" || Err_Impossible 291 cp -f "${TEMP_PREFIX}-newlist" "${TEMP_PREFIX}-templist" || Err_Impossible
291 progress_anim 292 progress_anim
292 done < "${TEMP_PREFIX}-ok" 293 done < "${TEMP_PREFIX}-ok"
293 progress_done 294 progress_done
294 echo "$(grep -c . "${TEMP_PREFIX}-newlist") file(s) to be downloaded" 295 echo "$(grep -c . "${TEMP_PREFIX}-newlist") file(s) to be downloaded"
295 else 296 else
296 if [ -n "${ISQUICK}" ]; then 297 if [ -n "${ISQUICK}" ]; then
297 echo "Quick mode selected. Skipping check" 298 echo "Quick mode selected. Skipping check"
298 else 299 else
299 echo "Empty local repository" 300 echo "Empty local repository"
300 fi 301 fi
301 cat "${TEMP_PREFIX}-list" > "${TEMP_PREFIX}-newlist" 302 cat "${TEMP_PREFIX}-list" > "${TEMP_PREFIX}-newlist"
302 fi 303 fi
303 } 304 }
304 305
305 # start downloading the images 306 # start downloading the images
306 Fetch_Images() { 307 Fetch_Images() {
307 if [ "$(grep -c . "${TEMP_PREFIX}-newlist")" -eq 0 ]; then 308 if [ "$(grep -c . "${TEMP_PREFIX}-newlist")" -eq 0 ]; then
308 echo "No new file" 309 echo "No new file"
309 else 310 else
310 printf "Downloading files... " 311 printf "Downloading files... "
311 cd "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}" 312 cd "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}"
312 wget --no-check-certificate -e continue=on -i "${TEMP_PREFIX}-newlist" -o "${TEMP_PREFIX}.log" --referer="${SITE}/post" --user-agent="${useragent}" 313 wget --no-check-certificate -e continue=on -i "${TEMP_PREFIX}-newlist" -o "${TEMP_PREFIX}.log" --referer="${SITE}/post" --user-agent="${useragent}"
313 fi 314 fi
314 } 315 }
315 316
316 # initialize base variables and initial command check 317 # initialize base variables and initial command check
317 init() 318 init()
318 { 319 {
319 # path initialization 320 # path initialization
320 # check if additional path is specified 321 # check if additional path is specified
321 if [ -n "${ADDITIONAL_PATH}" ] 322 if [ -n "${ADDITIONAL_PATH}" ]
322 then 323 then
323 # insert the additional path 324 # insert the additional path
324 PATH="${ADDITIONAL_PATH}:${PATH}" 325 PATH="${ADDITIONAL_PATH}:${PATH}"
325 export PATH 326 export PATH
326 fi 327 fi
327 328
328 # misc variables 329 # misc variables
329 ISQUICK= 330 ISQUICK=
330 ISNEW= 331 ISNEW=
331 332
332 # minimum number of arguments: 2 (command and tag). If less than two, exit and print help message 333 # minimum number of arguments: 2 (command and tag). If less than two, exit and print help message
333 [ $# -lt 2 ] && Err_Help 334 [ $# -lt 2 ] && Err_Help
334 case "$1" in 335 case "$1" in
335 check|fetch|quickfetch) 336 check|fetch|quickfetch)
336 echo "Starting..." 337 echo "Starting..."
337 JOB="$1" 338 JOB="$1"
338 ;; 339 ;;
339 *) 340 *)
340 Err_Help 341 Err_Help
341 ;; 342 ;;
342 esac 343 esac
343 shift 344 shift
344 SITE= 345 SITE=
345 TAGS= 346 TAGS=
346 has_pass=0 347 has_pass=0
347 has_user=0 348 has_user=0
348 x=1 349 x=1
349 while getopts "s:nu:p:" opt 350 while getopts "s:nu:p:" opt
350 do 351 do
351 case "$opt" in 352 case "$opt" in
352 s) SITE="$OPTARG";; 353 s) SITE="$OPTARG";;
353 n) NOCLEAN=1;; 354 n) NOCLEAN=1;;
354 p) 355 p)
355 LOGIN_PASS=$(printf "%s" "$OPTARG" | openssl dgst -sha1 | sed -e 's/.*\([[:xdigit:]]\{40\}\).*/\1/') 356 LOGIN_PASS=$(printf "%s" "$OPTARG" | openssl dgst -sha1 | sed -e 's/.*\([[:xdigit:]]\{40\}\).*/\1/')
356 has_pass=1 357 has_pass=1
357 ;; 358 ;;
358 u) 359 u)
359 LOGIN_USER="$OPTARG" 360 LOGIN_USER="$OPTARG"
360 has_user=1 361 has_user=1
361 ;; 362 ;;
362 esac 363 esac
363 x=$OPTIND 364 x=$OPTIND
364 done 365 done
365 shift $(($x-1)) 366 shift $(($x-1))
366 if [ "$1" = -- ]; then shift; fi 367 if [ "$1" = -- ]; then shift; fi
367 TAGS="$@" 368 TAGS="$@"
368 [ -n "${SITE}" ] || SITE=${DEFAULT_SITE} 369 [ -n "${SITE}" ] || SITE=${DEFAULT_SITE}
369 [ -n "${TAGS}" ] || Err_Fatal "No tag specified" 370 [ -n "${TAGS}" ] || Err_Fatal "No tag specified"
370 # Get base folder - default, current folder or fallback to ${HOME} 371 # Get base folder - default, current folder or fallback to ${HOME}
371 [ -n "${BASE_DIR}" ] || BASE_DIR=${PWD} 372 [ -n "${BASE_DIR}" ] || BASE_DIR=${PWD}
372 [ -n "${BASE_DIR}" ] || BASE_DIR=${HOME} 373 [ -n "${BASE_DIR}" ] || BASE_DIR=${HOME}
373 [ -n "$(echo "${BASE_DIR}" | cut -c1 | grep \/)" ] || BASE_DIR="/${BASE_DIR}" 374 [ -n "$(echo "${BASE_DIR}" | cut -c1 | grep \/)" ] || BASE_DIR="/${BASE_DIR}"
374 # see if both pass and use are set. If they're set, switch _use_login variable content to 1. 375 # see if both pass and use are set. If they're set, switch _use_login variable content to 1.
375 [ ${has_pass} -eq 1 -a ${has_user} -eq 1 ] && _use_login=1 376 [ ${has_pass} -eq 1 -a ${has_user} -eq 1 ] && _use_login=1
376 377
377 echo "Tags: ${TAGS}" 378 echo "Tags: ${TAGS}"
378 # slash is not wanted for folder name 379 # slash is not wanted for folder name
379 TARGET_DIR=$(echo "${TAGS}" | sed -e 's/\//_/g') 380 TARGET_DIR=$(echo "${TAGS}" | sed -e 's/\//_/g')
380 SITE_DIR=$(echo "${SITE}" | sed -e 's/\/$//g;s/\//_/g') 381 SITE_DIR=$(echo "${SITE}" | sed -e 's/\/$//g;s/\//_/g')
381 TEMP_PREFIX="${BASE_DIR}/temp/${SITE_DIR}-${TARGET_DIR}" 382 TEMP_PREFIX="${BASE_DIR}/temp/${SITE_DIR}-${TARGET_DIR}"
382 } 383 }
383 384
384 # global variables goes here 385 # global variables goes here
385 init_globals() 386 init_globals()
386 { 387 {
387 _version="1.0-rc3" # version of this script 388 _version="1.0-rc3" # version of this script
388 _use_login=0 # variable to check whether a login is used or not 389 _use_login=0 # variable to check whether a login is used or not
389 } 390 }
390 391
391 main() 392 main()
392 { 393 {
393 # removing GNU-ism as much as possible 394 # removing GNU-ism as much as possible
394 POSIXLY_CORRECT=1 395 POSIXLY_CORRECT=1
395 #initialize global variables 396 #initialize global variables
396 init_globals 397 init_globals
397 #print welcome message 398 #print welcome message
398 msg_welcome 399 msg_welcome
399 # initialization 400 # initialization
400 init "$@" 401 init "$@"
401 Check_Tools 402 Check_Tools
402 Check_Folders 403 Check_Folders
403 404
404 405
405 # let's do the job! 406 # let's do the job!
406 case "${JOB}" in 407 case "${JOB}" in
407 check) 408 check)
408 Generate_Link 409 Generate_Link
409 Check_Files 410 Check_Files
410 ;; 411 ;;
411 fetch) 412 fetch)
412 Generate_Link 413 Generate_Link
413 Check_Files 414 Check_Files
414 Fetch_Images 415 Fetch_Images
415 ;; 416 ;;
416 quickfetch) 417 quickfetch)
417 ISNEW=1 418 ISNEW=1
418 ISQUICK=1 419 ISQUICK=1
419 Generate_Link 420 Generate_Link
420 Check_Files 421 Check_Files
421 Fetch_Images 422 Fetch_Images
422 ;; 423 ;;
423 esac 424 esac
424 } 425 }
425 426
426 # call the main routine! 427 # call the main routine!
427 main "$@" 428 main "$@"
428 429