在ubuntu下测试通过, 适合在支持多线程下载的站点下载文件
也可以配合flashgot在firefox中使用
用法:./mycurl url [referedUrl]
第一个参数url是要下载的文件的地址,
第二个参数referedUrl是指需要参照的网址(一般不需要,有些网站,比如华军需要此参数)
例如:
./mycurl ftp://xx.xxx.xxx/xxx.rar
或者
./mycurl
下面是代码:
#!/bin/bash
####################################################################
#
# Script for curl to support resumable multi-part download.
#
# Tested on Ubuntu
#
url=$1
# How many "parts" will the target file be divided into?
declare -i parts=5
read -ep "Please input the target directory: " targetdir
read -ep "Please input the outfile name: " outfile
[ -z "$targetdir" ] && targetdir="./"
cd $targetdir||exit 2
[ -z "$outfile" ] && outfile=`basename $1`
#Set the referer url
if [ -n "$2" ]; then
refurl="-L -e $2"
else refurl=""
fi
length=`curl $refurl -s -I $url|grep Content-Length|tail -n 1|sed s/[^0-9]//g`
if [ -z "$length" ]; then
echo "cann't get the length of the target file"
exit 1
fi
let "length = $length"
#lsession is used to record how many bytes of each subpart should be downloaded
declare -i lsession=$(($length/$parts))
finished="false"
#Assume the available maximum connections on server can reach "parts" at first
maxconn=$parts
while true;
do
for (( i=1; i<=parts ; i=i+1 ))
do
#Array offsetold is used to record how many bytes have been downloaded of each subpart
if [ -e $outfile$i ]; then
offsetold[$i]=`ls -l $outfile$i|awk '{print $5}'`
else offsetold[$i]=0
fi
let "offsetold[$i] = ${offsetold[$i]}"
done
curr=0
for (( i=1; i<=parts && maxconn>0; i=i+1 ))
do
if [ $i -lt $parts ]; then
if [ ${offsetold[$i]} -lt $lsession ]; then
curl $refurl -r $(($curr+${offsetold[$i]}))-$(($curr+$lsession-1)) $url >> $outfile$i &
maxconn=$(($maxconn-1))
fi
else
if [ ${offsetold[$i]} -lt $(($length-$(($lsession*$(($parts-1)))))) ]; then
curl $refurl -r $(($curr+${offsetold[$i]}))- $url >> $outfile$i &
maxconn=$(($maxconn-1))
fi
fi
curr=$(($curr+$lsession))
done
#To wait for all curl processes to terminate.
wait
finished="true"
maxconn=0
for (( i=1; i<=parts; i=i+1 ))
do
#Array offsetnew is used to record how many bytes have been downloaded of each subpart
if [ -e $outfile$i ]; then
offsetnew[$i]=`ls -l $outfile$i|awk '{print $5}'`
else offsetnew[$i]=0
fi
let "offsetnew[$i] = ${offsetnew[$i]}"
if [ $i -lt $parts ]; then
if [ ${offsetnew[$i]} -lt $lsession ]; then
finished="false"
fi
else
if [ ${offsetnew[$i]} -lt $(($length-$(($lsession*$(($parts-1)))))) ]; then
finished="false"
fi
fi
#Calculate the "real" available maximum connections supported by server
if [ ${offsetnew[$i]} -gt ${offsetold[$i]} ]; then
maxconn=$(($maxconn+1))
fi
done
if [ "$finished" == "true" ]; then
break
elif [ $maxconn -eq 0 ]; then
echo "Some errors may occur. retry 10 sec later..."
sleep 10
maxconn=parts
fi
done
echo "All parts have been downloaded. Merging..."
mv --backup=t $outfile"1" $outfile
for (( i=2; i<=parts; i=i+1))
do
cat $outfile$i >> $outfile
rm $outfile$i
done
echo "Done."
阅读(1162) | 评论(0) | 转发(1) |