#!/bin/env bash
path=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
export path
clear
url="http://mirrors.cnnic.cn/
apache/"
downlistfile="/tmp/downlist.txt"
downlisttmpfile="/tmp/tmplist.txt"
downfiletype="zip$|gz$"
downlist=""
urlback="$url"
[ ! -f $downlistfile ] && touch $downlistfile || echo > $downlistfile
[ ! -f $downlisttmpfile ] && touch $downlisttmpfile || echo > $downlisttmpfile
curl_urls(){
urls=`curl $urlback |
linuxjishu/13830.html target=_blank class=infotextkey>awk -f "a href="" '{printf "%sn",$2}'|awk -f """ '{printf "%sn",$1}'|grep -ve "^$|^?|^http://"|^#`
}
url_list(){
curl_urls
for i in $urls ;do
echo "$urlback$i" >> $downlisttmpfile
done
}
recursive_search_url(){
urlbacktmps=`cat $downlisttmpfile`
[[ "$urlbacktmps" == "" ]] && echo "no more page for search" && exit 1
for j in $urlbacktmps ;do
if [[ "${j##*/}" != "" ]] ;then
echo "$j" >> $downlistfile
else
urlback="$j"
url_list
fi
urltmps=`grep -ve "$j$" $downlisttmpfile`
echo "$urltmps" > $downlisttmpfile
recursive_search_url
done
}
download_file(){
downlist=`grep -e "$downfiletype" $downlistfile`
for k in $downlist ;do
filepath=/tmp/${k#*//}
[ ! -d `dirname $filepath` ] && mkdir -p `dirname $filepath`
[ ! -f $filepath ] && cd `dirname $filepath` && curl -o $k
done
}
url_list $urls
recursive_search_url