Commit 1ebc0f96 authored by Boris Yumankulov's avatar Boris Yumankulov Committed by Vitaly Lipatov

added --trust-server-names to wget and curl (eterbug #17842)

parent 3f5c92d4
...@@ -315,11 +315,12 @@ WGETQ='' #-q ...@@ -315,11 +315,12 @@ WGETQ='' #-q
CURLQ='' #-s CURLQ='' #-s
AXELQ='' #-q AXELQ='' #-q
# TODO: aria2c # TODO: aria2c
# TODO: wget --trust-server-names
# TODO: # TODO:
WGETNAMEOPTIONS='--content-disposition' WGETNAMEOPTIONS='--content-disposition'
CURLNAMEOPTIONS='--remote-name --remote-time --remote-header-name' CURLNAMEOPTIONS='--remote-name --remote-time --remote-header-name'
AXELNAMEOPTIONS='' AXELNAMEOPTIONS=''
WGETRUSTSERVERNAMES=''
CURLTRUSTSERVERNAMES=''
WGETNODIRECTORIES='' WGETNODIRECTORIES=''
WGETCONTINUE='' WGETCONTINUE=''
...@@ -385,6 +386,7 @@ Options: ...@@ -385,6 +386,7 @@ Options:
--latest - print only latest version of a file --latest - print only latest version of a file
--second-latest - print only second to latest version of a file --second-latest - print only second to latest version of a file
--allow-mirrors - check mirrors if url is not accessible --allow-mirrors - check mirrors if url is not accessible
--trust-server-names - use the name specified by the redirection
--list|--list-only - print only URLs --list|--list-only - print only URLs
--check-url URL - check if the URL exists (returns HTTP 200 OK) --check-url URL - check if the URL exists (returns HTTP 200 OK)
...@@ -535,6 +537,10 @@ while [ -n "$1" ] ; do ...@@ -535,6 +537,10 @@ while [ -n "$1" ] ; do
WGETRETRYCONNREFUSED="$1" WGETRETRYCONNREFUSED="$1"
CURLRETRYCONNREFUSED="$1" CURLRETRYCONNREFUSED="$1"
;; ;;
--trust-server-names)
WGETRUSTSERVERNAMES="--trust-server-names"
CURLTRUSTSERVERNAMES="-w '%{url_effective}'"
;;
-t|--tries) -t|--tries)
if [ -z "$argvalue" ];then if [ -z "$argvalue" ];then
shift shift
...@@ -1032,9 +1038,9 @@ elif [ "$EGET_BACKEND" = "wget" ] ; then ...@@ -1032,9 +1038,9 @@ elif [ "$EGET_BACKEND" = "wget" ] ; then
__wget() __wget()
{ {
if [ -n "$WGETUSERAGENT" ] ; then if [ -n "$WGETUSERAGENT" ] ; then
docmd $WGET $FORCEIPV $WGETQ $WGETCOMPRESSED $WGETHEADER $WGETNOSSLCHECK $WGETNODIRECTORIES $WGETCONTINUE $WGETTIMEOUT $WGETREADTIMEOUT $WGETRETRYCONNREFUSED $WGETTRIES $WGETLOADCOOKIES "$WGETUSERAGENT" "$@" docmd $WGET $FORCEIPV $WGETQ $WGETCOMPRESSED $WGETHEADER $WGETNOSSLCHECK $WGETNODIRECTORIES $WGETCONTINUE $WGETTIMEOUT $WGETREADTIMEOUT $WGETRETRYCONNREFUSED $WGETTRIES $WGETLOADCOOKIES $WGETRUSTSERVERNAMES "$WGETUSERAGENT" "$@"
else else
docmd $WGET $FORCEIPV $WGETQ $WGETCOMPRESSED $WGETHEADER $WGETNOSSLCHECK $WGETNODIRECTORIES $WGETCONTINUE $WGETTIMEOUT $WGETREADTIMEOUT $WGETRETRYCONNREFUSED $WGETTRIES $WGETLOADCOOKIES "$@" docmd $WGET $FORCEIPV $WGETQ $WGETCOMPRESSED $WGETHEADER $WGETNOSSLCHECK $WGETNODIRECTORIES $WGETCONTINUE $WGETTIMEOUT $WGETREADTIMEOUT $WGETRETRYCONNREFUSED $WGETTRIES $WGETLOADCOOKIES $WGETRUSTSERVERNAMES "$@"
fi fi
} }
...@@ -1082,9 +1088,9 @@ elif [ "$EGET_BACKEND" = "curl" ] ; then ...@@ -1082,9 +1088,9 @@ elif [ "$EGET_BACKEND" = "curl" ] ; then
__curl() __curl()
{ {
if [ -n "$CURLUSERAGENT" ] ; then if [ -n "$CURLUSERAGENT" ] ; then
docmd $CURL $FORCEIPV --fail -L $CURLQ $CURLCOMPRESSED $CURLHEADER $CURLNOSSLCHECK $CURLCONTINUE $CURLMAXTIME $CURLRETRYCONNREFUSED $CURLRETRY $CURLCOOKIE "$CURLUSERAGENT" "$@" docmd $CURL $FORCEIPV --fail -L $CURLQ $CURLCOMPRESSED $CURLHEADER $CURLNOSSLCHECK $CURLCONTINUE $CURLMAXTIME $CURLRETRYCONNREFUSED $CURLRETRY $CURLCOOKIE $CURLTRUSTSERVERNAMES "$CURLUSERAGENT" "$@"
else else
docmd $CURL $FORCEIPV --fail -L $CURLQ $CURLCOMPRESSED $CURLHEADER $CURLNOSSLCHECK $CURLCONTINUE $CURLMAXTIME $CURLRETRYCONNREFUSED $CURLRETRY $CURLCOOKIE "$@" docmd $CURL $FORCEIPV --fail -L $CURLQ $CURLCOMPRESSED $CURLHEADER $CURLNOSSLCHECK $CURLCONTINUE $CURLMAXTIME $CURLRETRYCONNREFUSED $CURLRETRY $CURLCOOKIE $CURLTRUSTSERVERNAMES "$@"
fi fi
} }
# put remote content to stdout # put remote content to stdout
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment