comparison bin/moefetch @ 370:fc644e52df7c

[bin/ren] Replaced tabs with spaces
author Edho Arief <edho@myconan.net>
date Thu, 28 Jun 2012 09:08:27 +0700
parents 391f2b64900e
children 38c7615caf9e
comparison
equal deleted inserted replaced
369:0adc967bafda 370:fc644e52df7c
40 set -u 40 set -u
41 useragent="Mozilla/5.0 (Windows NT 6.1; WOW64; rv:10.0) Gecko/20100101 Firefox/10.0" 41 useragent="Mozilla/5.0 (Windows NT 6.1; WOW64; rv:10.0) Gecko/20100101 Firefox/10.0"
42 42
43 # useless welcome message. Also version 43 # useless welcome message. Also version
44 msg_welcome() { 44 msg_welcome() {
45 echo "moefetch ${_version} 45 echo "moefetch ${_version}
46 Copyright (c) 2009-2012 edogawaconan <edho@myconan.net> 46 Copyright (c) 2009-2012 edogawaconan <edho@myconan.net>
47 " 47 "
48 } 48 }
49 49
50 # Sanitize path. Totally safe. Usage: cmd "$(safe_path "${filename}")" 50 # Sanitize path. Totally safe. Usage: cmd "$(safe_path "${filename}")"
51 safe_path() 51 safe_path()
52 { 52 {
53 # It all depends on the first character. 53 # It all depends on the first character.
54 start=$(printf "%s" "$*" | cut -c 1) 54 start=$(printf "%s" "$*" | cut -c 1)
55 path= 55 path=
56 case "${start}" in 56 case "${start}" in
57 .|/) path="$*";; # . and / is safe. No change. 57 .|/) path="$*";; # . and / is safe. No change.
58 *) path="./$*";; # Anything else must be prefixed with ./ 58 *) path="./$*";; # Anything else must be prefixed with ./
59 esac 59 esac
60 printf "%s" "${path}" # Return. 60 printf "%s" "${path}" # Return.
61 } 61 }
62 62
63 # Checks md5. OpenSSL should be available on anything usable. 63 # Checks md5. OpenSSL should be available on anything usable.
64 get_md5() { cat "$(safe_path "${1}")" | openssl dgst -md5 | tail -n 1 | sed -e 's/.*\([[:xdigit:]]\{32\}\).*/\1/'; } 64 get_md5() { cat "$(safe_path "${1}")" | openssl dgst -md5 | tail -n 1 | sed -e 's/.*\([[:xdigit:]]\{32\}\).*/\1/'; }
65 65
76 is_not_md5() { get_filename "$1" | sed -e 's/\([0-9a-f]\{32\}\)//g'; } 76 is_not_md5() { get_filename "$1" | sed -e 's/\([0-9a-f]\{32\}\)//g'; }
77 77
78 78
79 # fatal error handler 79 # fatal error handler
80 Err_Fatal() { 80 Err_Fatal() {
81 echo " 81 echo "
82 Fatal error: ${1}" 82 Fatal error: ${1}"
83 exit 1 83 exit 1
84 } 84 }
85 85
86 Err_Impossible() { 86 Err_Impossible() {
87 echo " 87 echo "
88 Impossible error. Or you modified content of the working directories when the script is running. 88 Impossible error. Or you modified content of the working directories when the script is running.
89 Please report to moefetch.googlecode.com if you see this message (complete with entire run log)" 89 Please report to moefetch.googlecode.com if you see this message (complete with entire run log)"
90 exit 1 90 exit 1
91 } 91 }
92 92
93 # help message 93 # help message
94 Err_Help() { 94 Err_Help() {
95 echo "moefetch.sh COMMAND [-n] [-p PASSWORD] [-s SITE_URL] [-u USERNAME] TAGS 95 echo "moefetch.sh COMMAND [-n] [-p PASSWORD] [-s SITE_URL] [-u USERNAME] TAGS
96 96
97 COMMAND: 97 COMMAND:
98 (quick)fetch: 98 (quick)fetch:
99 Do a complete update. Add prefix quick to skip file checking 99 Do a complete update. Add prefix quick to skip file checking
100 check: 100 check:
101 Get list of new files, clean up local folder and print total new files 101 Get list of new files, clean up local folder and print total new files
102 102
103 OPTIONS: 103 OPTIONS:
104 -n: 104 -n:
105 Skip checking repository directory. 105 Skip checking repository directory.
106 -p PASSWORD: 106 -p PASSWORD:
107 Specifies password for login. 107 Specifies password for login.
108 -s SITE_URL: 108 -s SITE_URL:
109 Specify URL of the Danbooru powered site you want to leech from. Default is ${DEFAULT_SITE}. 109 Specify URL of the Danbooru powered site you want to leech from. Default is ${DEFAULT_SITE}.
110 -u USERNAME: 110 -u USERNAME:
111 Specifies username for login. 111 Specifies username for login.
112 TAGS: 112 TAGS:
113 Tags you want to download. Separated by spaces. Tag name follows standard Danbooru tagging scheme." 113 Tags you want to download. Separated by spaces. Tag name follows standard Danbooru tagging scheme."
114 exit 2 114 exit 2
115 } 115 }
116 116
117 # generate link by transforming xml 117 # generate link by transforming xml
118 Generate_Link() { 118 Generate_Link() {
119 echo " 119 echo "
120 Fetching XML file" 120 Fetching XML file"
121 tempnum=1000 121 tempnum=1000
122 iternum=1 122 iternum=1
123 > "${TEMP_PREFIX}-list" 123 > "${TEMP_PREFIX}-list"
124 while [ "${tempnum}" -ge 1000 ]; do 124 while [ "${tempnum}" -ge 1000 ]; do
125 url="${SITE}/post/index.xml?tags=$(get_cleantags "${TAGS}")&offset=0&limit=1000&page=${iternum}" 125 url="${SITE}/post/index.xml?tags=$(get_cleantags "${TAGS}")&offset=0&limit=1000&page=${iternum}"
126 [ ${_use_login} -eq 1 ] && url="${url}&login=${LOGIN_USER}&password_hash=${LOGIN_PASS}" 126 [ ${_use_login} -eq 1 ] && url="${url}&login=${LOGIN_USER}&password_hash=${LOGIN_PASS}"
127 wget --no-check-certificate --quiet "${url}" -O "${TEMP_PREFIX}-xml" --referer="${SITE}/post" --user-agent="${useragent}" -e continue=off || Err_Fatal "Failed download catalog file" 127 wget --no-check-certificate --quiet "${url}" -O "${TEMP_PREFIX}-xml" --referer="${SITE}/post" --user-agent="${useragent}" -e continue=off || Err_Fatal "Failed download catalog file"
128 printf "Processing XML file... " 128 printf "Processing XML file... "
129 # xslt evilry 129 # xslt evilry
130 xsltproc - "${TEMP_PREFIX}-xml" <<EOF | sed 's/.*\(https*.*\)\(\/[a-f0-9]\{32\}\).*\.\([^\.]*\)/\1\2.\3/g' | grep ^http > "${TEMP_PREFIX}-templist" 130 xsltproc - "${TEMP_PREFIX}-xml" <<EOF | sed 's/.*\(https*.*\)\(\/[a-f0-9]\{32\}\).*\.\([^\.]*\)/\1\2.\3/g' | grep ^http > "${TEMP_PREFIX}-templist"
131 <xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0"> 131 <xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
132 <xsl:output method="xml" indent="yes"/> 132 <xsl:output method="xml" indent="yes"/>
133 <xsl:template match="post"> 133 <xsl:template match="post">
134 <xsl:value-of select="@file_url" /> 134 <xsl:value-of select="@file_url" />
135 </xsl:template> 135 </xsl:template>
136 </xsl:stylesheet> 136 </xsl:stylesheet>
137 EOF 137 EOF
138 tempnum=$(grep -c . "${TEMP_PREFIX}-templist") 138 tempnum=$(grep -c . "${TEMP_PREFIX}-templist")
139 iternum=$((iternum + 1)) 139 iternum=$((iternum + 1))
140 cat "${TEMP_PREFIX}-templist" >> "${TEMP_PREFIX}-list" 140 cat "${TEMP_PREFIX}-templist" >> "${TEMP_PREFIX}-list"
141 echo "${tempnum} file(s) available" 141 echo "${tempnum} file(s) available"
142 done 142 done
143 numfiles=$(grep -c . "${TEMP_PREFIX}-list") 143 numfiles=$(grep -c . "${TEMP_PREFIX}-list")
144 echo "${numfiles} file(s) available on server" 144 echo "${numfiles} file(s) available on server"
145 [ "${numfiles}" -gt 0 ] || Err_Fatal "Error in processing list or no files can be found with specified tag(s) or site." 145 [ "${numfiles}" -gt 0 ] || Err_Fatal "Error in processing list or no files can be found with specified tag(s) or site."
146 } 146 }
147 147
148 148
149 progress_init() { 149 progress_init() {
150 _last="-" 150 _last="-"
151 printf "${_last}" 151 printf "${_last}"
152 } 152 }
153 153
154 progress_anim() { 154 progress_anim() {
155 case "${_last}" in 155 case "${_last}" in
156 /) _last="-";; 156 /) _last="-";;
157 -) _last=\\;; 157 -) _last=\\;;
158 \\) _last=\|;; 158 \\) _last=\|;;
159 \|) _last="/";; 159 \|) _last="/";;
160 esac 160 esac
161 printf "\b${_last}" 161 printf "\b${_last}"
162 } 162 }
163 163
164 progress_done() { printf "\bdone\n"; } 164 progress_done() { printf "\bdone\n"; }
165 165
166 # getting rid of ls (as per suggestion) 166 # getting rid of ls (as per suggestion)
167 Count_Files() { 167 Count_Files() {
168 numfiles=0 168 numfiles=0
169 for dircontent in "${*}/"* "${*}/".*; do 169 for dircontent in "${*}/"* "${*}/".*; do
170 if [ -e "${dircontent}" ] && [ x"${dircontent}" != x"${*}/." ] && [ x"${dircontent}" != x"${*}/.." ]; then 170 if [ -e "${dircontent}" ] && [ x"${dircontent}" != x"${*}/." ] && [ x"${dircontent}" != x"${*}/.." ]; then
171 numfiles=$((numfiles + 1)) 171 numfiles=$((numfiles + 1))
172 fi 172 fi
173 done 173 done
174 echo $((numfiles)) 174 echo $((numfiles))
175 } 175 }
176 176
177 # check tools availability 177 # check tools availability
178 Check_Tools() { 178 Check_Tools() {
179 # verify all programs required do indeed exist 179 # verify all programs required do indeed exist
180 commands="cut sed wc wget xsltproc xargs rm mkdir chown comm grep date openssl" 180 commands="cut sed wc wget xsltproc xargs rm mkdir chown comm grep date openssl"
181 for cmd in ${commands} 181 for cmd in ${commands}
182 do 182 do
183 [ "$(command -v "${cmd}")" ] || Err_Fatal "${cmd} doesn't exist in ${PATH}" 183 [ "$(command -v "${cmd}")" ] || Err_Fatal "${cmd} doesn't exist in ${PATH}"
184 done 184 done
185 } 185 }
186 186
187 # verify required folders exist and writeable 187 # verify required folders exist and writeable
188 Check_Folders(){ 188 Check_Folders(){
189 [ -O "${BASE_DIR}" ] || Err_Fatal "You don't own ${BASE_DIR}. Please fix ${BASE_DIR} or run this script in your own directory." 189 [ -O "${BASE_DIR}" ] || Err_Fatal "You don't own ${BASE_DIR}. Please fix ${BASE_DIR} or run this script in your own directory."
190 for directory in temp trash deleted "${SITE_DIR}/${TARGET_DIR}"; do 190 for directory in temp trash deleted "${SITE_DIR}/${TARGET_DIR}"; do
191 if [ ! -d "${BASE_DIR}/${directory}" ]; then 191 if [ ! -d "${BASE_DIR}/${directory}" ]; then
192 mkdir -p "${BASE_DIR}/${directory}" || Err_Impossible 192 mkdir -p "${BASE_DIR}/${directory}" || Err_Impossible
193 fi 193 fi
194 if [ ! -O "${BASE_DIR}/${directory}" ]; then 194 if [ ! -O "${BASE_DIR}/${directory}" ]; then
195 echo "You don't own the ${BASE_DIR}/${directory}, applying globally writeable permission on it" 195 echo "You don't own the ${BASE_DIR}/${directory}, applying globally writeable permission on it"
196 chmod -R u=rwX,g=rwX,o=rwX "${BASE_DIR}/${directory}" || Err_Impossible 196 chmod -R u=rwX,g=rwX,o=rwX "${BASE_DIR}/${directory}" || Err_Impossible
197 fi 197 fi
198 done 198 done
199 [ "$(Count_Files "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}")" -eq 0 ] && ISNEW=1 199 [ "$(Count_Files "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}")" -eq 0 ] && ISNEW=1
200 for i in error ok list newlist templist; do 200 for i in error ok list newlist templist; do
201 touch "${TEMP_PREFIX}-${i}" || Fatal_Err "Error creating ${TEMP_PREFIX}-${i}. This shouldn't happen" 201 touch "${TEMP_PREFIX}-${i}" || Fatal_Err "Error creating ${TEMP_PREFIX}-${i}. This shouldn't happen"
202 done 202 done
203 # 203 #
204 } 204 }
205 205
206 # Do some cleanup 206 # Do some cleanup
207 Cleanup_Repository() { 207 Cleanup_Repository() {
208 # current dir: ${BASE_DIR}/${SITE_DIR}/${TARGET_DIR} 208 # current dir: ${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}
209 printf "Cleaning up repository folder... " 209 printf "Cleaning up repository folder... "
210 progress_init 210 progress_init
211 trash_dir="${BASE_DIR}/trash/${trash_dir}/$(date -u "+${SITE_DIR}-${TARGET_DIR}-%Y%m%d-%H.%M")" 211 trash_dir="${BASE_DIR}/trash/${trash_dir}/$(date -u "+${SITE_DIR}-${TARGET_DIR}-%Y%m%d-%H.%M")"
212 trashes="These files have been moved to ${trash_dir}:" 212 trashes="These files have been moved to ${trash_dir}:"
213 has_trash= 213 has_trash=
214 if [ ! -d "${trash_dir}" ]; then 214 if [ ! -d "${trash_dir}" ]; then
215 mkdir -p "${trash_dir}" || Err_Impossible 215 mkdir -p "${trash_dir}" || Err_Impossible
216 else 216 else
217 if [ ! -O "${trash_dir}" ]; then 217 if [ ! -O "${trash_dir}" ]; then
218 chmod -R u=rwX,g=rwX,o=rwX "${BASE_DIR}/${directory}" || Err_Impossible 218 chmod -R u=rwX,g=rwX,o=rwX "${BASE_DIR}/${directory}" || Err_Impossible
219 fi 219 fi
220 fi 220 fi
221 for trash in "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}/"* 221 for trash in "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}/"*
222 do 222 do
223 if [ -e "${trash}" ]; then 223 if [ -e "${trash}" ]; then
224 is_trash= 224 is_trash=
225 if [ -d "${trash}" ] || [ -n "$(is_not_md5 "${trash}")" ] || [ -z "$(grep "$(get_basename "${trash}")" "${TEMP_PREFIX}-list")" ]; then 225 if [ -d "${trash}" ] || [ -n "$(is_not_md5 "${trash}")" ] || [ -z "$(grep "$(get_basename "${trash}")" "${TEMP_PREFIX}-list")" ]; then
226 is_trash=1 226 is_trash=1
227 has_trash=1 227 has_trash=1
228 mv -f -- "${trash}" "${trash_dir}" || Err_Impossible 228 mv -f -- "${trash}" "${trash_dir}" || Err_Impossible
229 trashes="${trashes} 229 trashes="${trashes}
230 $(get_basename "${trash}")" 230 $(get_basename "${trash}")"
231 fi 231 fi
232 fi 232 fi
233 progress_anim 233 progress_anim
234 done 234 done
235 rmdir "${trash_dir}" 2>/dev/null 235 rmdir "${trash_dir}" 2>/dev/null
236 progress_done 236 progress_done
237 [ -n "${has_trash}" ] && echo "${trashes}" 237 [ -n "${has_trash}" ] && echo "${trashes}"
238 } 238 }
239 239
240 # check files correctness 240 # check files correctness
241 Check_Files() { 241 Check_Files() {
242 if [ ! -n "${ISNEW}" ]; then 242 if [ ! -n "${ISNEW}" ]; then
243 [ -z "${NOCLEAN}" ] && Cleanup_Repository 243 [ -z "${NOCLEAN}" ] && Cleanup_Repository
244 printf "Checking for errors... " 244 printf "Checking for errors... "
245 progress_init 245 progress_init
246 files_error="These files do not match its md5:" 246 files_error="These files do not match its md5:"
247 files_notdanbooru="These files are not checked:" 247 files_notdanbooru="These files are not checked:"
248 has_err_filename= 248 has_err_filename=
249 has_err_md5= 249 has_err_md5=
250 > "${TEMP_PREFIX}-error" 250 > "${TEMP_PREFIX}-error"
251 > "${TEMP_PREFIX}-ok" 251 > "${TEMP_PREFIX}-ok"
252 for file in "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}/"* 252 for file in "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}/"*
253 do 253 do
254 if [ -e "${file}" ]; then 254 if [ -e "${file}" ]; then
255 if [ -n "$(is_not_md5 "${file}")" ] || [ -d "${file}" ]; then 255 if [ -n "$(is_not_md5 "${file}")" ] || [ -d "${file}" ]; then
256 files_notdanbooru="${files_notdanbooru} 256 files_notdanbooru="${files_notdanbooru}
257 $(get_basename "${file}")" 257 $(get_basename "${file}")"
258 has_err_filename=1 258 has_err_filename=1
259 else 259 else
260 if [ "$(get_md5 "${file}")" = "$(get_filename "${file}")" ]; then 260 if [ "$(get_md5 "${file}")" = "$(get_filename "${file}")" ]; then
261 echo "$(get_basename "${file}")" >> "${TEMP_PREFIX}-ok" 261 echo "$(get_basename "${file}")" >> "${TEMP_PREFIX}-ok"
262 else 262 else
263 rm "${file}" || Err_Fatal "Error removing ${file}" 263 rm "${file}" || Err_Fatal "Error removing ${file}"
264 echo "$(get_basename "${file}")" >> "${TEMP_PREFIX}-error" 264 echo "$(get_basename "${file}")" >> "${TEMP_PREFIX}-error"
265 files_error="${files_error} 265 files_error="${files_error}
266 $(get_basename "${file}")" 266 $(get_basename "${file}")"
267 has_err_md5=1 267 has_err_md5=1
268 fi 268 fi
269 fi 269 fi
270 fi 270 fi
271 progress_anim 271 progress_anim
272 done 272 done
273 progress_done 273 progress_done
274 if [ ! -n "${has_err_md5}" ] && [ ! -n "${has_err_filename}" ]; then 274 if [ ! -n "${has_err_md5}" ] && [ ! -n "${has_err_filename}" ]; then
275 echo "All files OK" 275 echo "All files OK"
276 else 276 else
277 if [ -n "${has_err_md5}" ]; then 277 if [ -n "${has_err_md5}" ]; then
278 echo "${files_error}" 278 echo "${files_error}"
279 echo "$(grep -c . "${TEMP_PREFIX}-error") file(s) removed" 279 echo "$(grep -c . "${TEMP_PREFIX}-error") file(s) removed"
280 fi 280 fi
281 [ -n "${has_err_filename}" ] && echo "${files_notdanbooru}" 281 [ -n "${has_err_filename}" ] && echo "${files_notdanbooru}"
282 fi 282 fi
283 echo "$(grep -c . "${TEMP_PREFIX}-ok") file(s) available locally" 283 echo "$(grep -c . "${TEMP_PREFIX}-ok") file(s) available locally"
284 284
285 printf "Generating list of new files... " 285 printf "Generating list of new files... "
286 progress_init 286 progress_init
287 cp -f "${TEMP_PREFIX}-list" "${TEMP_PREFIX}-templist" 287 cp -f "${TEMP_PREFIX}-list" "${TEMP_PREFIX}-templist"
288 while read -r is_ok; do 288 while read -r is_ok; do
289 grep -v "${is_ok}" "${TEMP_PREFIX}-templist" > "${TEMP_PREFIX}-newlist" 289 grep -v "${is_ok}" "${TEMP_PREFIX}-templist" > "${TEMP_PREFIX}-newlist"
290 cp -f "${TEMP_PREFIX}-newlist" "${TEMP_PREFIX}-templist" || Err_Impossible 290 cp -f "${TEMP_PREFIX}-newlist" "${TEMP_PREFIX}-templist" || Err_Impossible
291 progress_anim 291 progress_anim
292 done < "${TEMP_PREFIX}-ok" 292 done < "${TEMP_PREFIX}-ok"
293 progress_done 293 progress_done
294 echo "$(grep -c . "${TEMP_PREFIX}-newlist") file(s) to be downloaded" 294 echo "$(grep -c . "${TEMP_PREFIX}-newlist") file(s) to be downloaded"
295 else 295 else
296 if [ -n "${ISQUICK}" ]; then 296 if [ -n "${ISQUICK}" ]; then
297 echo "Quick mode selected. Skipping check" 297 echo "Quick mode selected. Skipping check"
298 else 298 else
299 echo "Empty local repository" 299 echo "Empty local repository"
300 fi 300 fi
301 cat "${TEMP_PREFIX}-list" > "${TEMP_PREFIX}-newlist" 301 cat "${TEMP_PREFIX}-list" > "${TEMP_PREFIX}-newlist"
302 fi 302 fi
303 } 303 }
304 304
305 # start downloading the images 305 # start downloading the images
306 Fetch_Images() { 306 Fetch_Images() {
307 if [ "$(grep -c . "${TEMP_PREFIX}-newlist")" -eq 0 ]; then 307 if [ "$(grep -c . "${TEMP_PREFIX}-newlist")" -eq 0 ]; then
308 echo "No new file" 308 echo "No new file"
309 else 309 else
310 printf "Downloading files... " 310 printf "Downloading files... "
311 cd "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}" 311 cd "${BASE_DIR}/${SITE_DIR}/${TARGET_DIR}"
312 wget --no-check-certificate -e continue=on -i "${TEMP_PREFIX}-newlist" -o "${TEMP_PREFIX}.log" --referer="${SITE}/post" --user-agent="${useragent}" 312 wget --no-check-certificate -e continue=on -i "${TEMP_PREFIX}-newlist" -o "${TEMP_PREFIX}.log" --referer="${SITE}/post" --user-agent="${useragent}"
313 fi 313 fi
314 } 314 }
315 315
316 # initialize base variables and initial command check 316 # initialize base variables and initial command check
317 init() 317 init()
318 { 318 {
319 # path initialization 319 # path initialization
320 # check if additional path is specified 320 # check if additional path is specified
321 if [ -n "${ADDITIONAL_PATH}" ] 321 if [ -n "${ADDITIONAL_PATH}" ]
322 then 322 then
323 # insert the additional path 323 # insert the additional path
324 PATH="${ADDITIONAL_PATH}:${PATH}" 324 PATH="${ADDITIONAL_PATH}:${PATH}"
325 export PATH 325 export PATH
326 fi 326 fi
327 327
328 # misc variables 328 # misc variables
329 ISQUICK= 329 ISQUICK=
330 ISNEW= 330 ISNEW=
331 331
332 # minimum number of arguments: 2 (command and tag). If less than two, exit and print help message 332 # minimum number of arguments: 2 (command and tag). If less than two, exit and print help message
333 [ $# -lt 2 ] && Err_Help 333 [ $# -lt 2 ] && Err_Help
334 case "$1" in 334 case "$1" in
335 check|fetch|quickfetch) 335 check|fetch|quickfetch)
336 echo "Starting..." 336 echo "Starting..."
337 JOB="$1" 337 JOB="$1"
338 ;; 338 ;;
339 *) 339 *)
340 Err_Help 340 Err_Help
341 ;; 341 ;;
342 esac 342 esac
343 shift 343 shift
344 SITE= 344 SITE=
345 TAGS= 345 TAGS=
346 has_pass=0 346 has_pass=0
347 has_user=0 347 has_user=0
348 x=1 348 x=1
349 while getopts "s:nu:p:" opt 349 while getopts "s:nu:p:" opt
350 do 350 do
351 case "$opt" in 351 case "$opt" in
352 s) SITE="$OPTARG";; 352 s) SITE="$OPTARG";;
353 n) NOCLEAN=1;; 353 n) NOCLEAN=1;;
354 p) 354 p)
355 LOGIN_PASS=$(printf "%s" "$OPTARG" | openssl dgst -sha1 | sed -e 's/.*\([[:xdigit:]]\{40\}\).*/\1/') 355 LOGIN_PASS=$(printf "%s" "$OPTARG" | openssl dgst -sha1 | sed -e 's/.*\([[:xdigit:]]\{40\}\).*/\1/')
356 has_pass=1 356 has_pass=1
357 ;; 357 ;;
358 u) 358 u)
359 LOGIN_USER="$OPTARG" 359 LOGIN_USER="$OPTARG"
360 has_user=1 360 has_user=1
361 ;; 361 ;;
362 esac 362 esac
363 x=$OPTIND 363 x=$OPTIND
364 done 364 done
365 shift $(($x-1)) 365 shift $(($x-1))
366 if [ "$1" = -- ]; then shift; fi 366 if [ "$1" = -- ]; then shift; fi
367 TAGS="$@" 367 TAGS="$@"
368 [ -n "${SITE}" ] || SITE=${DEFAULT_SITE} 368 [ -n "${SITE}" ] || SITE=${DEFAULT_SITE}
369 [ -n "${TAGS}" ] || Err_Fatal "No tag specified" 369 [ -n "${TAGS}" ] || Err_Fatal "No tag specified"
370 # Get base folder - default, current folder or fallback to ${HOME} 370 # Get base folder - default, current folder or fallback to ${HOME}
371 [ -n "${BASE_DIR}" ] || BASE_DIR=${PWD} 371 [ -n "${BASE_DIR}" ] || BASE_DIR=${PWD}
372 [ -n "${BASE_DIR}" ] || BASE_DIR=${HOME} 372 [ -n "${BASE_DIR}" ] || BASE_DIR=${HOME}
373 [ -n "$(echo "${BASE_DIR}" | cut -c1 | grep \/)" ] || BASE_DIR="/${BASE_DIR}" 373 [ -n "$(echo "${BASE_DIR}" | cut -c1 | grep \/)" ] || BASE_DIR="/${BASE_DIR}"
374 # see if both pass and use are set. If they're set, switch _use_login variable content to 1. 374 # see if both pass and use are set. If they're set, switch _use_login variable content to 1.
375 [ ${has_pass} -eq 1 -a ${has_user} -eq 1 ] && _use_login=1 375 [ ${has_pass} -eq 1 -a ${has_user} -eq 1 ] && _use_login=1
376 376
377 echo "Tags: ${TAGS}" 377 echo "Tags: ${TAGS}"
378 # slash is not wanted for folder name 378 # slash is not wanted for folder name
379 TARGET_DIR=$(echo "${TAGS}" | sed -e 's/\//_/g') 379 TARGET_DIR=$(echo "${TAGS}" | sed -e 's/\//_/g')
380 SITE_DIR=$(echo "${SITE}" | sed -e 's/\/$//g;s/\//_/g') 380 SITE_DIR=$(echo "${SITE}" | sed -e 's/\/$//g;s/\//_/g')
381 TEMP_PREFIX="${BASE_DIR}/temp/${SITE_DIR}-${TARGET_DIR}" 381 TEMP_PREFIX="${BASE_DIR}/temp/${SITE_DIR}-${TARGET_DIR}"
382 } 382 }
383 383
384 # global variables goes here 384 # global variables goes here
385 init_globals() 385 init_globals()
386 { 386 {
387 _version="1.0-rc3" # version of this script 387 _version="1.0-rc3" # version of this script
388 _use_login=0 # variable to check whether a login is used or not 388 _use_login=0 # variable to check whether a login is used or not
389 } 389 }
390 390
391 main() 391 main()
392 { 392 {
393 # removing GNU-ism as much as possible 393 # removing GNU-ism as much as possible
394 POSIXLY_CORRECT=1 394 POSIXLY_CORRECT=1
395 #initialize global variables 395 #initialize global variables
396 init_globals 396 init_globals
397 #print welcome message 397 #print welcome message
398 msg_welcome 398 msg_welcome
399 # initialization 399 # initialization
400 init "$@" 400 init "$@"
401 Check_Tools 401 Check_Tools
402 Check_Folders 402 Check_Folders
403 403
404 404
405 # let's do the job! 405 # let's do the job!
406 case "${JOB}" in 406 case "${JOB}" in
407 check) 407 check)
408 Generate_Link 408 Generate_Link
409 Check_Files 409 Check_Files
410 ;; 410 ;;
411 fetch) 411 fetch)
412 Generate_Link 412 Generate_Link
413 Check_Files 413 Check_Files
414 Fetch_Images 414 Fetch_Images
415 ;; 415 ;;
416 quickfetch) 416 quickfetch)
417 ISNEW=1 417 ISNEW=1
418 ISQUICK=1 418 ISQUICK=1
419 Generate_Link 419 Generate_Link
420 Check_Files 420 Check_Files
421 Fetch_Images 421 Fetch_Images
422 ;; 422 ;;
423 esac 423 esac
424 } 424 }
425 425
426 # call the main routine! 426 # call the main routine!
427 main "$@" 427 main "$@"
428 428