tools_eget 34.5 KB
Newer Older
Vitaly Lipatov's avatar
Vitaly Lipatov committed
1
#!/bin/sh
2
# eget - simply shell on wget for loading directories over http (wget does not support wildcard for http)
3
# Use:
4
# eget http://ftp.altlinux.ru/pub/security/ssl/*
Vitaly Lipatov's avatar
Vitaly Lipatov committed
5
#
6
# Copyright (C) 2014-2014, 2016, 2020, 2022  Etersoft
7
# Copyright (C) 2014 Daniil Mikhailov <danil@etersoft.ru>
8
# Copyright (C) 2016-2017, 2020, 2022 Vitaly Lipatov <lav@etersoft.ru>
Vitaly Lipatov's avatar
Vitaly Lipatov committed
9 10 11 12 13 14 15 16 17 18 19 20 21 22 23
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#

24 25
init_eget()
{
26 27 28 29 30 31 32 33
PROGDIR=$(dirname "$0")
PROGNAME=$(basename "$0")
CMDSHELL="/bin/sh"
[ "$PROGDIR" = "." ] && PROGDIR="$(pwd)"
if [ "$0" = "/dev/stdin" ] || [ "$0" = "sh" ] ; then
    PROGDIR=""
    PROGNAME=""
fi
34 35
}
init_eget
36 37


38 39 40 41 42 43
fatal()
{
    echo "FATAL: $*" >&2
    exit 1
}

44 45
info()
{
Vitaly Lipatov's avatar
Vitaly Lipatov committed
46
    [ -n "$quiet" ] && return
47 48 49 50 51
    echo "$*" >&2
}

eget()
{
52 53
	if [ -n "$EPMMODE" ] ; then
		# if embedded in epm
54
		(unset EGET_IPFS_GATEWAY; unset EGET_IPFS_API ; unset EGET_IPFS_DB ; EGET_BACKEND=$ORIG_EGET_BACKEND internal_tools_eget "$@" )
55 56
		return
	fi
57

58
	[ -n "$PROGNAME" ] || fatal "pipe mode is not supported"
59

60 61 62
	local bashopt=''
	#[ -n "$verbose" ] && bashopt='-x'

63
	(unset EGET_IPFS_GATEWAY; unset EGET_IPFS_API ; unset EGET_IPFS_DB ; EGET_BACKEND=$ORIG_EGET_BACKEND $CMDSHELL $bashopt $PROGDIR/$PROGNAME "$@" )
64
}
65

66 67
# TODO:
arch="$(uname -m)"
68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89

# copied from eepm project

# copied from /etc/init.d/outformat (ALT Linux)
isatty()
{
	# Set a sane TERM required for tput
	[ -n "$TERM" ] || TERM=dumb
	export TERM
	test -t 1
}

isatty2()
{
	# check stderr
	test -t 2
}


check_tty()
{
	isatty || return
90
	is_command tput >/dev/null 2>/dev/null || return
91
	# FreeBSD does not support tput -S
92 93
	echo | a= tput -S >/dev/null 2>/dev/null || return
	export USETTY="tput -S"
94 95 96 97 98 99
}

: ${BLACK:=0} ${RED:=1} ${GREEN:=2} ${YELLOW:=3} ${BLUE:=4} ${MAGENTA:=5} ${CYAN:=6} ${WHITE:=7}

set_boldcolor()
{
100
	[ -n "$USETTY" ] || return
101 102 103
	{
		echo bold
		echo setaf $1
104 105 106 107 108 109 110 111 112
	} | $USETTY
}

set_color()
{
	[ -n "$USETTY" ] || return
	{
		echo setaf $1
	} | $USETTY
113 114 115 116
}

restore_color()
{
117
	[ -n "$USETTY" ] || return
118 119 120
	{
		echo op; # set Original color Pair.
		echo sgr0; # turn off all special graphics mode (bold in our case).
121
	} | $USETTY
122 123
}

124

125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149
echover()
{
    [ -n "$verbose" ] || return
    echo "$*" >&2
}

# Print command line and run command line
showcmd()
{
	if [ -z "$quiet" ] ; then
		set_boldcolor $GREEN
		local PROMTSIG="\$"
		[ "$UID" = 0 ] && PROMTSIG="#"
		echo " $PROMTSIG $@"
		restore_color
	fi >&2
}

# Print command line and run command line
docmd()
{
	showcmd "$@"
	"$@"
}

150 151 152 153 154 155
verdocmd()
{
	[ -n "$verbose" ] && showcmd "$@"
	"$@"
}

156

157 158
# copied from epm
# print a path to the command if exists in $PATH
159
if a= which which 2>/dev/null >/dev/null ; then
160 161 162
    # the best case if we have which command (other ways needs checking)
    # TODO: don't use which at all, it is binary, not builtin shell command
print_command_path()
163
{
164
    a= which -- "$1" 2>/dev/null
165
}
166
elif a= type -a type 2>/dev/null >/dev/null ; then
167 168
print_command_path()
{
169
    a= type -fpP -- "$1" 2>/dev/null
170 171 172 173
}
else
print_command_path()
{
174
    a= type "$1" 2>/dev/null | sed -e 's|.* /|/|'
175
}
Vitaly Lipatov's avatar
Vitaly Lipatov committed
176 177
fi

178 179 180 181 182 183
# check if <arg> is a real command
is_command()
{
    print_command_path "$1" >/dev/null
}

Vitaly Lipatov's avatar
Vitaly Lipatov committed
184 185 186 187 188 189 190 191 192
# add realpath if missed
if ! is_command realpath ; then
realpath()
{
    [ -n "$*" ] || return
    readlink -f "$@"
}
fi

193

194 195 196 197 198
# check man glob
filter_glob()
{
	[ -z "$1" ] && cat && return
	# translate glob to regexp
199
	grep "$(echo "$1" | sed -e 's|\.|\\.|g' -e 's|\*|.*|g' -e 's|\?|.|g' )$"
200 201 202 203 204 205 206 207 208 209 210 211
}

filter_order()
{
    if [ -n "$SECONDLATEST" ] ; then
        sort -V | tail -n2 | head -n1
        return
    fi
    [ -z "$LATEST" ] && cat && return
    sort -V | tail -n1
}

212 213 214 215 216
have_end_slash()
{
    echo "$1" | grep -q '/$'
}

217 218 219 220
is_abs_path()
{
    echo "$1" | grep -q '^/'
}
221 222 223

is_fileurl()
{
224
    is_abs_path "$1" && return
225
    echo "$1" | grep -q "^file:/"
226 227
}

228
path_from_url()
229 230 231 232 233 234
{
    echo "$1" | sed -e 's|^file://*|/|'
}

is_url()
{
235
    echo "$1" | grep -q "^[filehtps]*:/"
236 237
}

238 239 240 241 242 243 244
is_strange_url()
{
    local URL="$1"
    is_url "$URL" || return
    echo "$URL" | grep -q "[?&]"
}

245 246
is_ipfs_hash()
{
247 248 249 250
    # If a CID is 46 characters starting with "Qm", it's a CIDv0
    echo "$1" | grep -q -E "^Qm[[:alnum:]]{44}$" && return
    # TODO: CIDv1 support, see https://github.com/multiformats/cid
    return 1
251 252 253 254 255 256 257 258
}

is_ipfsurl()
{
    is_ipfs_hash "$1" && return
    echo "$1" | grep -q "^ipfs://"
}

259 260 261 262 263 264 265
is_httpurl()
{
    # TODO: improve
    echo "$1" | grep -q "^https://" & return
    echo "$1" | grep -q "^http://" & return
}

266 267
cid_from_url()
{
268
    echo "$1" | sed -e 's|^ipfs://*||' -e 's|\?.*||'
269 270
}

271

272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294
# args: cmd <URL> <options>
# will run cmd <options> <URL>
download_with_mirroring()
{
    local CMD="$1"
    shift
    local URL="$1"
    shift

    local res
    $CMD "$@" "$URL" && return
    res=$?
    [ -n "$CHECKMIRRORS" ] || return $res

    MIRROR="https://mirror.eterfund.ru"
    SECONDURL="$(echo "$URL" | sed -e "s|^.*://|$MIRROR/|")"
    $CMD "$@" "$SECONDURL" && URL="$SECONDURL" && return

    MIRROR="https://mirror.eterfund.org"
    SECONDURL="$(echo "$URL" | sed -e "s|^.*://|$MIRROR/|")"
    $CMD "$@" "$SECONDURL" && URL="$SECONDURL" && return
}

295

296 297 298

check_tty

Vitaly Lipatov's avatar
Vitaly Lipatov committed
299
quiet=''
300
verbose=''
301 302
WGETNOSSLCHECK=''
CURLNOSSLCHECK=''
303
AXELNOSSLCHECK=''
304 305
WGETUSERAGENT=''
CURLUSERAGENT=''
306
AXELUSERAGENT=''
307 308 309
WGETHEADER=''
CURLHEADER=''
AXELHEADER=''
310 311
WGETCOMPRESSED=''
CURLCOMPRESSED=''
312
AXELCOMPRESSED=''
313 314
WGETQ='' #-q
CURLQ='' #-s
315
AXELQ='' #-q
316 317 318
# TODO: aria2c
# TODO: wget --trust-server-names
# TODO: 
319
WGETNAMEOPTIONS='--content-disposition'
320
CURLNAMEOPTIONS='--remote-name --remote-time --remote-header-name'
321 322
AXELNAMEOPTIONS=''

323

324 325
LISTONLY=''
CHECKURL=''
326
CHECKSITE=''
327
GETRESPONSE=''
328
GETFILENAME=''
329
GETREALURL=''
330
GETIPFSCID=''
331 332 333 334
LATEST=''
SECONDLATEST=''
CHECKMIRRORS=''
TARGETFILE=''
335
FORCEIPV=''
336

337

338 339 340 341
set_quiet()
{
    WGETQ='-q'
    CURLQ='-s'
342
    AXELQ='-q'
Vitaly Lipatov's avatar
Vitaly Lipatov committed
343
    quiet=1
344 345 346
}


347 348
eget_help()
{
349 350 351 352 353 354
cat <<EOF

eget - wget like downloader wrapper with wildcard support in filename part of URL
Usage: eget [options] http://somesite.ru/dir/na*.log

Options:
355
    -q|--quiet                - quiet mode
356
    --verbose                 - verbose mode
357
    -k|--no-check-certificate - skip SSL certificate chain support
358
    -H|--header               - use <header> (X-Cache:1 for example)
359
    -U|-A|--user-agent        - send browser like UserAgent
360
    --compressed              - request a compressed response and automatically decompress the content
361 362
    -4|--ipv4|--inet4-only    - use only IPV4
    -6|--ipv6|--inet6-only    - use only IPV6
363 364 365 366 367 368 369
    -O-|-O -                  - output downloaded file to stdout
    -O file                   - download to this file
    --latest                  - print only latest version of a file
    --second-latest           - print only second to latest version of a file
    --allow-mirrors           - check mirrors if url is not accessible

    --list|--list-only        - print only URLs
370
    --check-url URL           - check if the URL exists (returns HTTP 200 OK)
371
    --check-site URL          - check if the site is accessible (returns HTTP 200 OK or 404 Not found)
372
    --get-response URL        - get response with all headers (ever if HEAD is not acceptable)
373
    --get-filename URL        - print filename for the URL (via Content-Disposition if applicable)
374
    --get-real-url URL        - print URL after all redirects
375
    --get-ipfs-cid URL        - print CID for URL (after all redirects)
376

377 378 379
Supported URLs:
  ftp:// http:// https:// file:/ ipfs://

380 381
Supported backends (set like EGET_BACKEND=curl)
  wget curl (todo: aria2c)
382

383 384 385 386
Examples:
  $ eget http://ftp.somesite.ru/package-*.x64.tar
  $ eget http://ftp.somesite.ru/package *.tar
  $ eget https://github.com/owner/project package*.ext
387
  $ eget -O myname ipfs://QmVRUjnsnxHWkjq91KreCpUk4D9oZEbMwNQ3rzdjwND5dR
388
  $ eget --list http://ftp.somesite.ru/package-*.tar
389
  $ eget --check-url http://ftp.somesite.ru/test
390 391 392 393
  $ eget --list http://download.somesite.ru 'package-*.tar.xz'
  $ eget --list --latest https://github.com/telegramdesktop/tdesktop/releases 'tsetup.*.tar.xz'

EOF
394
}
395 396


397 398 399 400
if [ -z "$1" ] ; then
    echo "eget - wget like downloader wrapper with wildcard support, uses wget or curl as backend" >&2
    echo "Run $0 --help to get help" >&2
    exit 1
401 402 403
fi


404 405 406 407 408 409 410
while [ -n "$1" ] ; do

    case "$1" in
        -h|--help)
            eget_help
            exit
            ;;
411
        -q|--quiet)
412 413
            set_quiet
            ;;
414 415 416
        --verbose)
            verbose="$1"
            ;;
417 418 419
        -k|--no-check-certificate)
            WGETNOSSLCHECK='--no-check-certificate'
            CURLNOSSLCHECK='-k'
420
            AXELNOSSLCHECK='--insecure'
421
            ;;
422 423 424 425 426 427
        -H|--header)
            shift
            WGETHEADER="--header=$1"
            CURLHEADER="--header $1"
            AXELHEADER="--header=$1"
            ;;
428 429 430 431
        -U|-A|--user-agent)
            user_agent="Mozilla/5.0 (X11; Linux $arch) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36"
            WGETUSERAGENT="-U '$user_agent'"
            CURLUSERAGENT="-A '$user_agent'"
432
            AXELUSERAGENT="--user-agent='$user_agent'"
433
            ;;
434 435 436 437
        --compressed)
            CURLCOMPRESSED='--compressed'
            WGETCOMPRESSED='--compression=auto'
            ;;
438 439 440 441 442 443
        -4|--ipv4|--inet4-only)
            FORCEIPV="-4"
            ;;
        -6|--ipv6|--inet6-only)
            FORCEIPV="-6"
            ;;
444
        --list|--list-only)
445 446 447
            LISTONLY="$1"
            set_quiet
            ;;
448
        --check-url)
449
            CHECKURL="$1"
450
            #set_quiet
451
            ;;
452
        --check-site|--check)
453 454 455
            CHECKSITE="$1"
            #set_quiet
            ;;
456 457 458
        --get-filename)
            GETFILENAME="$1"
            ;;
459 460 461
        --get-response)
            GETRESPONSE="$1"
            ;;
462 463 464
        --get-real-url)
            GETREALURL="$1"
            ;;
465 466 467
        --get-ipfs-cid)
            GETIPFSCID="$1"
            ;;
468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483
        --latest)
            LATEST="$1"
            ;;
        --second-latest)
            SECONDLATEST="$1"
            ;;
        --check-mirrors)
            CHECKMIRRORS="$1"
            ;;
        -O)
            shift
            TARGETFILE="$1"
            ;;
        -O-)
            TARGETFILE="-"
            ;;
484 485 486
        -*)
            fatal "Unknown option '$1', check eget --help."
            ;;
487 488 489 490
        *)
            break
            ;;
    esac
491
    shift
492
done
493 494


495 496
#############################3
# defaults
497 498

# https://github.com/ipfs/kubo/issues/5541
499
ipfs_diag_timeout='--timeout 60s'
500

501 502 503 504 505
ipfs_api_local="/ip4/127.0.0.1/tcp/5001"
[ -n "$EGET_IPFS_API" ] && ipfs_api_local="$EGET_IPFS_API"

ipfs_api_brave="/ip4/127.0.0.1/tcp/45005"

506 507
# Public IPFS http gateways
ipfs_gateways="https://cloudflare-ipfs.com/ipfs https://dweb.link/ipfs https://dhash.ru/ipfs"
508 509 510 511 512 513

# Test data: https://etersoft.ru/templates/etersoft/images/logo.png
ipfs_checkQm="QmYwf2GAMvHxfFiUFL2Mr6KUG6QrDiupqGc8ms785ktaYw"

get_ipfs_brave()
{
514
    local ipfs_brave="$(ls ~/.config/BraveSoftware/Brave-Browser/*/*/go-ipfs_* 2>/dev/null | sort | tail -n1)"
515 516 517 518
    [ -n "$ipfs_brave" ] && [ -x "$ipfs_brave" ] || return
    echo "$ipfs_brave"
}

519
ipfs_api_access()
520
{
521
    [ -n "$IPFS_CMD" ] || fatal "IPFS is disabled"
522 523 524 525 526
    if [ -n "$verbose" ] ; then
         verdocmd $IPFS_CMD --api $IPFS_API $ipfs_diag_timeout diag sys >/dev/null
    else
         verdocmd $IPFS_CMD --api $IPFS_API $ipfs_diag_timeout diag sys >/dev/null 2>/dev/null
    fi
527 528 529 530
}

ipfs_check()
{
531
    [ -n "$IPFS_CMD" ] || fatal "IPFS is disabled"
532
    verdocmd $IPFS_CMD --api $IPFS_API $ipfs_diag_timeout cat "$1" >/dev/null
533 534
}

535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551
check_ipfs_gateway()
{
    local ipfs_gateway="$1"
    # TODO: check checksum
    if docmd eget --check-url "$ipfs_gateway/$ipfs_checkQm" ; then
        ipfs_mode="gateway"
        return
    fi

    if docmd eget --check-site "$(dirname $ipfs_gateway)" ; then
       info "IPFS gateway $ipfs_gateway is accessible, but can't return shared $ipfs_checkQm"
    else
       info "IPFS gateway $(dirname $ipfs_gateway) is not accessible"
    fi

    return 1
}
552

553 554 555 556 557 558 559 560 561 562 563 564 565
select_ipfs_gateway()
{
    # check public http gateways
    for ipfs_gateway in $ipfs_gateways ; do
        check_ipfs_gateway $ipfs_gateway || continue
        IPFS_GATEWAY="$ipfs_gateway"
        return
    done

    ipfs_mode="disabled"
    return 1
}

566

567 568
select_ipfs_mode()
{
569 570 571
    IPFS_CMD="$(print_command_path ipfs)"
    if [ -n "$IPFS_CMD" ] ; then
        IPFS_API="$ipfs_api_local"
572
        if ipfs_api_access ; then
573
            ipfs_mode="local" && return
574
            #if ipfs_check "$ipfs_checkQm" ; then
575
            #    ipfs_mode="local" && return
576
            #else
577
            #    info "Skipped local: it is accessible via $IPFS_CMD --api $IPFS_API, but can't return shared $ipfs_checkQm"
578
            #fi
579 580 581
        fi
    fi

582 583 584 585
    IPFS_CMD="$(get_ipfs_brave)"
    # if no EGET_IPFS_API, check brave
    if [ -z "$EGET_IPFS_API" ] && [ -n "$IPFS_CMD" ] ; then
        IPFS_API="$ipfs_api_brave"
586
        if ipfs_api_access ; then
587
            ipfs_mode="brave" && return
588
            #if ipfs_check "$ipfs_checkQm" ; then
589
            #    ipfs_mode="brave" && return
590
            #else
591
            #    info "Skipped Brave: it is accessible via $IPFS_CMD --api $IPFS_API, but can't return shared $ipfs_checkQm"
592
            #fi
593 594 595
        fi
    fi

596 597 598 599 600
    IPFS_GATEWAY=''

    # if set some http gateway, use only it
    if [ -n "$EGET_IPFS_GATEWAY" ] ; then
        check_ipfs_gateway "$EGET_IPFS_GATEWAY" && IPFS_GATEWAY="$EGET_IPFS_GATEWAY" || ipfs_mode="disabled"
601
        return
602 603
    fi

604
    select_ipfs_gateway
605
}
606 607


608
# Functions for work with eget ipfs db
609 610 611 612
get_cid_by_url()
{
    local URL="$1"
    [ -r "$EGET_IPFS_DB" ] || return
613
    is_fileurl "$URL" && return 1
614
    grep -F "$URL Qm" "$EGET_IPFS_DB" | cut -f2 -d" " | grep -E "Qm[[:alnum:]]{44}" | head -n1
615 616 617 618 619 620 621 622
}

put_cid_and_url()
{
    local URL="$1"
    local CID="$2"
    local FN="$3"
    [ -w "$EGET_IPFS_DB" ] || return
623 624 625

    is_fileurl "$URL" && return

626
    echo "$URL $CID $FN" >> "$EGET_IPFS_DB"
627
    info "Placed in $EGET_IPFS_DB: $URL $CID $FN"
628 629 630 631 632
}

get_filename_by_cid()
{
    local CID="$1"
633
    [ -z "$EGET_IPFS_DB" ] && basename "$CID" && return
634 635 636
    grep -F " $CID " "$EGET_IPFS_DB" | head -n1 | cut -f3 -d" "
}

637 638 639 640 641 642 643
get_url_by_cid()
{
    local CID="$1"
    [ -z "$EGET_IPFS_DB" ] && echo "$CID" && return
    grep -F " $CID " "$EGET_IPFS_DB" | head -n1 | cut -f1 -d" "
}

644 645
###################

646

647 648 649 650 651
ipfs_mode="$EGET_IPFS"

# enable auto mode when set $EGET_IPFS_DB
[ -z "$ipfs_mode" ] && [ -n "$EGET_IPFS_DB" ] && ipfs_mode="auto"

652
if [ -n "$LISTONLY$CHECKURL$CHECKSITE" ] ; then
653
    ipfs_mode=""
654 655 656
    EGET_IPFS_DB=''
fi

657

658
if [ -n "$ipfs_mode" ] && [ -n "$EGET_IPFS_DB" ] ; then
659 660 661 662
    ddb="$(dirname "$EGET_IPFS_DB")"
    if [ -d "$ddb" ] ; then
        info "Using eget IPFS db $EGET_IPFS_DB"
        [ -r "$EGET_IPFS_DB" ] || touch "$EGET_IPFS_DB"
663 664 665 666 667 668
    else
        EGET_IPFS_DB=''
    fi
fi


669
# detect if we run with ipfs:// or with auto
670
if is_ipfsurl "$1" && [ -z "$ipfs_mode" ] || [ "$ipfs_mode" = "auto" ] ; then
671
    info "Autodetecting available IPFS relay..."
672 673
    select_ipfs_mode
    info "Auto selected IPFS mode: $ipfs_mode"
674
    [ "$ipfs_mode" = "gateway" ] && info "Since the ipfs command is missed, the http gateway will be used."
675
else
676
    [ "$ipfs_mode" = "gateway" ] && select_ipfs_gateway
677
    [ -n "$ipfs_mode" ] && info "IPFS mode: $ipfs_mode"
678 679
fi

680 681 682 683 684 685
IPFS_CMD=''

if [ "$ipfs_mode" = "disabled" ] ; then

ipfs_get()
{
686
    fatal "IPFS is disabled"
687 688 689 690
}

ipfs_put()
{
691
    fatal "IPFS is disabled"
692 693 694 695
}

ipfs_cat()
{
696
    fatal "IPFS is disabled"
697 698 699 700 701
}


elif [ "$ipfs_mode" = "brave" ] ; then
    IPFS_CMD="$(get_ipfs_brave)" || fatal "Can't find ipfs command in Brave"
702
    IPFS_PRETTY_CMD="~Brave-Browser/$(basename $IPFS_CMD)"
703
    IPFS_API="$ipfs_api_brave"
704
    ipfs_api_access || fatal "Can't access to Brave IPFS API (Brave browser is not running and IPFS is not activated?)"
705
    info "Will use $IPFS_PRETTY_CMD --api $IPFS_API"
706 707 708

elif [ "$ipfs_mode" = "local" ] ; then
    IPFS_CMD="$(print_command_path ipfs)" || fatal "Can't find ipfs command"
709
    IPFS_PRETTY_CMD="$IPFS_CMD"
710
    IPFS_API="$ipfs_api_local"
711
    ipfs_api_access || fatal "Can't access to IPFS API (ipfs daemon is not running?)"
712
    info "Will use $IPFS_PRETTY_CMD --api $IPFS_API"
713 714

elif [ "$ipfs_mode" = "gateway" ] ; then
715
    info "Will use eget $IPFS_GATEWAY/HASH"
716 717

ipfs_get_real_url()
718 719
{
    [ -n "$IPFS_GATEWAY" ] || fatal "ipfs http gateway is not set"
720 721 722 723 724
    echo "$IPFS_GATEWAY/$1"
}

ipfs_get()
{
725
    if [ -n "$2" ] ; then
726
        docmd eget -O "$2" "$(ipfs_get_real_url "$1")"
727
    else
728
        docmd eget "$(ipfs_get_real_url "$1")"
729 730 731 732 733 734 735 736 737 738 739
    fi
}

ipfs_cat()
{
    # FIXME:
    ipfs_get "$1" "-"
}

ipfs_put()
{
740 741
    info "IPFS put skipped when a gateway is used"
    return 1
742 743 744 745 746 747 748 749 750
}
elif [ -z "$ipfs_mode" ] ; then
    :
else
    fatal "Unsupported eget ipfs mode $ipfs_mode"
fi

if [ -n "$IPFS_CMD" ] ; then

751 752 753 754 755
ipfs_get_real_url()
{
    return 1
}

756 757 758 759
ipfs_get()
{
    [ -n "$IPFS_CMD" ] || fatal "ipfs api is not usable"
    if [ -n "$2" ] ; then
760 761
        showcmd $IPFS_PRETTY_CMD --api $IPFS_API get -o "$2" "$1"
        $IPFS_CMD --api $IPFS_API get -o "$2" "$1"
762
    else
763 764
        showcmd $IPFS_PRETTY_CMD --api $IPFS_API get "$1"
        $IPFS_CMD --api $IPFS_API get "$1"
765 766 767 768 769 770
    fi
}

ipfs_put()
{
    [ -n "$IPFS_CMD" ] || fatal "ipfs api is not usable"
771 772 773 774 775

    # detect if -q is used (will output Qm instead of addded Qm)
    local qu="$1"
    [ "$qu" = "-q" ] || qu=''

776
    showcmd $IPFS_PRETTY_CMD --api $IPFS_API add "$@"
777 778 779 780 781 782 783 784 785 786 787

    local res
    res="$($IPFS_CMD --api $IPFS_API add "$@")" || return

    if [ -z "$qu" ] ; then
        res="$(echo "$res" | grep "^added Qm")" || return
        res="$(echo "$res" | cut -f2 -d" ")"
    fi

    is_ipfs_hash "$res" && echo "$res" && return
    fatal "Can't recognize $res IPFS hash"
788 789 790 791 792
}

ipfs_cat()
{
    [ -n "$IPFS_CMD" ] || fatal "ipfs api is not usable"
793 794
    showcmd $IPFS_PRETTY_CMD --api $IPFS_API cat "$1"
    $IPFS_CMD --api $IPFS_API cat "$1"
795 796 797 798 799
}

fi
###############################

800 801


802
WGET="$(print_command_path wget)"
803
CURL="$(print_command_path curl)"
804

805 806
ORIG_EGET_BACKEND="$EGET_BACKEND"
# override backend
807
if is_fileurl "$1" ; then
808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835
    EGET_BACKEND="file"
elif is_ipfsurl "$1" ; then
    EGET_BACKEND="ipfs"
fi


case "$EGET_BACKEND" in
    file|ipfs)
        ;;
    wget)
        [ -n "$WGET" ] || fatal "There are no wget in the system but you forced using it via EGET_BACKEND. Install it with $ epm install wget"
        ;;
    curl)
        [ -n "$CURL" ] || fatal "There are no curl in the system but you forced using it via EGET_BACKEND. Install it with $ epm install curl"
        ;;
    '')
        [ -n "$WGET" ] && EGET_BACKEND="wget"
        [ -z "$EGET_BACKEND" ] && [ -n "$CURL" ] && EGET_BACKEND="curl"
        [ -n "$EGET_BACKEND" ] || fatal "There are no wget nor curl in the system. Install something with $ epm install wget"
        ;;
    *)
        fatal "Uknown EGET_BACKEND $EGET_BACKEND"
        ;;
esac



if [ "$EGET_BACKEND" = "file" ] ; then
836 837

# put remote content to stdout
838
url_scat()
839 840
{
    local URL="$1"
841
    cat "$(path_from_url "$URL")"
842 843
}
# download to default name of to $2
844
url_sget()
845 846 847 848 849 850
{
    local URL="$1"
    if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then
       scat "$URL"
       return
    elif [ -n "$2" ] ; then
851
       cp -av "$(path_from_url "$URL")" "$2"
852 853
       return
    fi
854
    cp -av "$(path_from_url "$URL")" .
855 856
}

857 858 859 860 861 862 863
url_check_accessible()
{
    local URL="$1"
    test -f "$(path_from_url "$URL")"
}

url_check_available()
864 865
{
    local URL="$1"
866
    test -f "$(path_from_url "$URL")"
867 868
}

869 870 871 872 873
url_get_filename()
{
    basename "$1"
}

874 875 876 877 878
url_get_real_url()
{
    echo "$1"
}

879
elif [ "$EGET_BACKEND" = "ipfs" ] ; then
880 881

# put remote content to stdout
882
url_scat()
883 884
{
    local URL="$1"
885
    ipfs_cat "$(cid_from_url "$URL")"
886 887
}
# download to default name of to $2
888
url_sget()
889 890 891 892 893 894
{
    local URL="$1"
    if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then
       scat "$URL"
       return
    elif [ -n "$2" ] ; then
895
       ipfs_get "$(cid_from_url "$URL")" "$2"
896 897
       return
    fi
898 899 900 901 902 903 904

    local fn="$(url_print_filename_from_url "$URL")"
    if [ -n "$fn" ] ; then
       ipfs_get "$(cid_from_url "$URL")" "$fn"
       return
    fi

905
    ipfs_get "$(cid_from_url "$URL")"
906 907
}

908 909 910 911 912 913 914 915
url_check_accessible()
{
    local URL="$1"
    # TODO: improve me
    scat "$URL" >/dev/null
}

url_check_available()
916 917 918 919 920 921
{
    local URL="$1"
    # TODO: improve me
    scat "$URL" >/dev/null
}

922 923 924 925 926 927 928
url_print_filename_from_url()
{
    local URL="$1"
    local fn="$(echo "$URL" | sed -e 's|ipfs://.*\?filename=||')"
    [ "$URL" != "$fn" ] && echo "$fn" && return
}

929 930
url_get_filename()
{
931
    local URL="$1"
932
    url_print_filename_from_url "$URL" && return
933 934 935 936
    local CID="$(cid_from_url "$URL")"
    get_filename_by_cid "$CID"
}

937 938 939 940
url_get_real_url()
{
    local URL="$1"
    local CID="$(cid_from_url "$URL")"
941 942
    # if we use gateway, return URL with gateway
    ipfs_get_real_url "$URL" && return
943 944 945
    get_url_by_cid "$CID"
}

946

947
elif [ "$EGET_BACKEND" = "wget" ] ; then
948 949 950
__wget()
{
    if [ -n "$WGETUSERAGENT" ] ; then
951
        docmd $WGET $FORCEIPV $WGETQ $WGETCOMPRESSED $WGETHEADER $WGETNOSSLCHECK "$WGETUSERAGENT" "$@"
952
    else
953
        docmd $WGET $FORCEIPV $WGETQ $WGETCOMPRESSED $WGETHEADER $WGETNOSSLCHECK "$@"
954 955
    fi
}
956

957
# put remote content to stdout
958
url_scat()
959
{
960
    local URL="$1"
961
    download_with_mirroring __wget "$URL" -O-
962 963
}
# download to default name of to $2
964
url_sget()
965
{
966
    local URL="$1"
967
    if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then
968 969
       scat "$URL"
       return
970
    elif [ -n "$2" ] ; then
971
       download_with_mirroring __wget "$URL" -O "$2"
972 973
       return
    fi
974 975 976 977
# TODO: поддержка rsync для известных хостов?
# Не качать, если одинаковый размер и дата
# -nc
# TODO: overwrite always
978
    download_with_mirroring __wget "$URL" $WGETNAMEOPTIONS
979 980
}

981
url_get_response()
982 983
{
    local URL="$1"
984
    local answer
985
    answer="$(quiet=1 __wget --spider -S "$URL" 2>&1)"
986
    # HTTP/1.1 405 Method Not Allowed
987 988 989
    # HTTP/1.1 404 Not Found
    if echo "$answer" | grep -q "^ *HTTP/[12.]* 40[45]" ; then
        (quiet=1 __wget -O/dev/null --header="Range: bytes=0-0" -S "$URL" 2>&1)
990 991 992
        return
    fi
    echo "$answer"
993 994
}

995

996
elif [ "$EGET_BACKEND" = "curl" ] ; then
997

998 999 1000
__curl()
{
    if [ -n "$CURLUSERAGENT" ] ; then
1001
        docmd $CURL $FORCEIPV --fail -L $CURLQ $CURLCOMPRESSED $CURLHEADER "$CURLUSERAGENT" $CURLNOSSLCHECK "$@"
1002
    else
1003
        docmd $CURL $FORCEIPV --fail -L $CURLQ $CURLCOMPRESSED $CURLHEADER $CURLNOSSLCHECK "$@"
1004 1005
    fi
}
1006
# put remote content to stdout
1007
url_scat()
1008
{
1009
    local URL="$1"
1010
    download_with_mirroring __curl "$URL" --output -
1011 1012
}
# download to default name of to $2
1013
url_sget()
1014
{
1015 1016
    local URL="$1"
    local res
1017 1018
    if [ "$2" = "/dev/stdout" ] || [ "$2" = "-" ] ; then
       scat "$1"
1019
       return
1020
    elif [ -n "$2" ] ; then
1021
       download_with_mirroring __curl "$URL" --output "$2"
1022
       return
1023
    fi
1024

1025
    download_with_mirroring __curl "$URL" $CURLNAMEOPTIONS
1026
}
1027

1028
url_get_response()
1029 1030
{
    local URL="$1"
1031
    local answer
1032
    answer="$(quiet=1 __curl -LI "$URL" 2>&1)"
1033
    # HTTP/1.1 405 Method Not Allowed
1034 1035
    # HTTP/1.1 404 Not Found
    if echo "$answer" | grep -q "^ *HTTP/[12.]* 40[45]" ; then
1036
        (quiet=1 __curl -L -i -r0-0 "$URL" 2>&1)
1037 1038 1039
        return
    fi
    echo "$answer"
1040 1041
}

1042 1043
else
    fatal "Unknown EGET_BACKEND '$EGET_BACKEND', logical error."
1044 1045 1046 1047 1048 1049
fi


# Common code for both wget and curl (http related)
if [ "$EGET_BACKEND" = "wget" ] || [ "$EGET_BACKEND" = "curl" ] ; then

1050 1051 1052 1053 1054 1055
url_get_headers()
{
    local URL="$1"
    url_get_response "$URL" | grep -i "^ *[[:alpha:]].*: " | sed -e 's|^ *||' -e 's|\r$||'
}

1056
url_check_accessible()
1057 1058
{
    local URL="$1"
Vitaly Lipatov's avatar
Vitaly Lipatov committed
1059
    url_get_response "$URL" | grep "HTTP/" | tail -n1 | grep -q -w "200\|404"
1060 1061
}

1062 1063 1064 1065 1066 1067
url_check_available()
{
    local URL="$1"
    url_get_response "$URL" | grep "HTTP/" | tail -n1 | grep -q -w "200"
}

1068 1069 1070 1071
url_get_header()
{
    local URL="$1"
    local HEADER="$2"
1072
    url_get_headers "$URL" | grep -i "^ *$HEADER: " | sed -e "s|^ *$HEADER: ||i"
1073 1074
}

1075 1076 1077 1078 1079 1080 1081 1082 1083
url_get_real_url()
{
    local URL="$1"

    ! is_httpurl "$URL" && echo "$URL" && return

    # don't check location if we have made form of the URL
    [ -n "$MADEURL" ] && [ "$MADEURL" = "$URL" ] && echo "$URL" && return

1084
    local loc
1085
    for loc in $(url_get_header "$URL" "Location" | tac | sed -e 's| .*||') ; do
1086
        # hack for construct full url from related Location
1087
        if is_abs_path "$loc" ; then
1088
            loc="$(concatenate_url_and_filename "$(get_host_only "$URL")" "$loc")" #"
1089
        fi
1090 1091 1092 1093 1094
        if ! is_strange_url "$loc" ; then
            echo "$loc"
            return
        fi
    done
1095 1096 1097 1098

    echo "$URL"
}

1099 1100
url_get_filename()
{
1101
    local URL="$1"
1102 1103 1104

    ! is_httpurl "$URL" && basename "$URL" && return

1105
    # See https://www.cpcwood.com/blog/5-aws-s3-utf-8-content-disposition
1106
    # https://www.rfc-editor.org/rfc/rfc6266
1107
    local cd="$(url_get_header "$URL" "Content-Disposition")"
1108
    if echo "$cd" | grep -qi "filename\*= *UTF-8" ; then
1109
        #Content-Disposition: attachment; filename="unityhub-amd64-3.3.0.deb"; filename*=UTF-8''"unityhub-amd64-3.3.0.deb"
1110
        echo "$cd" | sed -e "s|.*filename\*= *UTF-8''||i" -e 's|^"||' -e 's|";$||' -e 's|"$||'
1111 1112
        return
    fi
1113
    if echo "$cd" | grep -qi "filename=" ; then
1114
        #Content-Disposition: attachment; filename=postman-linux-x64.tar.gz
1115
        #content-disposition: attachment; filename="code-1.77.1-1680651749.el7.x86_64.rpm"
1116
        echo "$cd" | sed -e 's|.*filename= *||i' -e 's|^"||' -e 's|";.*||' -e 's|"$||'
1117 1118 1119
        return
    fi

1120
    basename "$(url_get_real_url "$URL")"
1121 1122
}

1123 1124
fi

1125

1126
if [ -n "$ipfs_mode" ] && [ -n "$EGET_IPFS_DB" ] &&  ! is_ipfsurl "$1"  ; then
1127

1128 1129 1130 1131 1132 1133 1134 1135 1136 1137 1138 1139 1140
download_to_ipfs()
{
    local URL="$1"
    local res
    #res="$(url_scat "$URL" | ipfs_put )" || return
    #res="$(echo "$res" | grep "^added Qm")" || return 1
    #CID="$(echo "$res" | cut -f2 -d" ")"
    # with -q to disable progress (mixed with download progress)
    res="$(url_scat "$URL" | ipfs_put -q)" || return
    is_ipfs_hash "$res" || return 1
    echo "$res"
}

1141 1142 1143 1144
# put remote content to stdout
scat()
{
    local URL="$1"
1145 1146 1147
    url_scat "$URL"

    # It is list only function. Don't save to IPFS
1148 1149 1150
    return

    ###################
1151

1152 1153 1154 1155 1156 1157 1158
    local CID="$(get_cid_by_url "$URL")"
    if [ -n "$CID" ] ; then
        info "$URL -> $CID"
        ipfs_cat "$CID"
        return
    fi

1159
    CID="$(download_to_ipfs "$URL")" || return
1160 1161 1162 1163 1164 1165 1166

    ipfs_cat "$CID" || return

    local FN="$(url_get_filename "$URL")" || return

    put_cid_and_url "$URL" "$CID" "$FN"
}
1167

1168 1169 1170 1171
# download to default name of to $2
sget()
{
    local URL="$1"
1172
    local TARGET="$2"
1173 1174 1175 1176

    if [ -n "$GETFILENAME" ] ; then
        get_filename "$URL"
        exit
1177 1178
    fi

Vitaly Lipatov's avatar
Vitaly Lipatov committed
1179
    local REALURL="$(get_real_url "$URL")" || return
1180

Vitaly Lipatov's avatar
Vitaly Lipatov committed
1181 1182 1183 1184 1185
    if [ -n "$GETREALURL" ] ; then
        echo "$REALURL"
        exit
    fi

1186 1187 1188 1189 1190 1191 1192
    # skip ipfs for cat
    if [ "$TARGET" = "/dev/stdout" ] || [ "$TARGET" = "-" ] ; then
       url_scat "$URL"
       return
    fi


1193 1194 1195 1196 1197
    #if is_strange_url "$REALURL" ; then
    #    info "Just download strange URL $REALURL, skipping IPFS"
    #    url_sget "$REALURL" "$TARGET"
    #    return
    #fi
Vitaly Lipatov's avatar
Vitaly Lipatov committed
1198 1199

    local CID="$(get_cid_by_url "$REALURL")"
1200
    if [ -n "$CID" ] ; then
1201

1202 1203 1204 1205 1206
        if [ -n "$GETIPFSCID" ] ; then
            echo "$CID"
            exit
        fi

1207 1208 1209 1210 1211
        if [ -n "$GETFILENAME" ] ; then
            get_filename_by_cid "$CID"
            exit
        fi

1212 1213 1214 1215 1216
        if [ -n "$GETREALURL" ] ; then
            get_url_by_cid "$CID"
            exit
        fi

1217
        if [ -z "$TARGET" ] ; then
1218
            # TODO: in some cases we can get name from URL...
1219 1220 1221 1222 1223
            TARGET="$(get_filename_by_cid "$CID")"
            if [ -z "$TARGET" ] ; then
                TARGET="$CID"
            fi
        fi
Vitaly Lipatov's avatar
Vitaly Lipatov committed
1224
        [ "$URL" = "$REALURL" ] && info "$URL -> $CID -> $TARGET" || info "$URL -> $REALURL -> $CID -> $TARGET"
1225 1226 1227 1228
        ipfs_get "$CID" "$TARGET" && return

        # fail get from IPFS, fallback
        url_sget "$REALURL" "$TARGET"
1229 1230 1231
        return
    fi

1232 1233

    # download and put to IPFS
Vitaly Lipatov's avatar
Vitaly Lipatov committed
1234
    local FN="$(url_get_filename "$REALURL")" || return
1235 1236 1237 1238
    if [ -z "$TARGET" ] ; then
        TARGET="$FN"
    fi

1239
    if [ -n "$GETIPFSCID" ] ; then
1240
         # add to IPFS and print out CID
1241
         CID="$(ipfs_put --progress "$REALURL")" || return
1242 1243
         echo "$CID"
         exit
1244 1245
    fi

1246 1247
    # download file and add to IPFS
    url_sget "$REALURL" "$TARGET" || return
1248 1249 1250 1251

    # don't do ipfs put when gateway is using
    [ "$ipfs_mode" = "gateway" ] && return

1252
    CID="$(ipfs_put --progress "$TARGET")" || return
1253

Vitaly Lipatov's avatar
Vitaly Lipatov committed
1254
    put_cid_and_url "$REALURL" "$CID" "$FN"
1255 1256
}

1257
check_url_is_available()
1258 1259
{
    local URL="$1"
Vitaly Lipatov's avatar
Vitaly Lipatov committed
1260 1261
    local REALURL="$(get_real_url "$URL")" || return
    local CID="$(get_cid_by_url "$REALURL")"
1262
    if [ -n "$CID" ] ; then
Vitaly Lipatov's avatar
Vitaly Lipatov committed
1263
        [ "$URL" = "$REALURL" ] && info "$URL -> $CID" || info "$URL -> $REALURL -> $CID"
1264 1265 1266 1267
        ipfs_check "$CID"
        return
    fi

1268
    CID="$(download_to_ipfs "$REALURL")" || return
1269

Vitaly Lipatov's avatar
Vitaly Lipatov committed
1270
    local FN="$(url_get_filename "$REALURL")" || return
1271
    ipfs_cat "$CID" >/dev/null || return
Vitaly Lipatov's avatar
Vitaly Lipatov committed
1272
    put_cid_and_url "$REALURL" "$CID" "$FN"
1273 1274
}

1275 1276 1277 1278 1279
check_url_is_accessible()
{
    check_url_is_available "$@"
}

1280 1281 1282 1283 1284
get_filename()
{
    url_get_filename "$1"
}

1285 1286 1287 1288 1289
get_real_url()
{
    url_get_real_url "$1"
}

1290
else
1291 1292 1293 1294 1295 1296 1297
scat()
{
    url_scat "$@"
}

sget()
{
1298 1299 1300 1301 1302
    if [ -n "$GETFILENAME" ] ; then
        get_filename "$1"
        exit
    fi

1303 1304 1305 1306 1307
    if [ -n "$GETREALURL" ] ; then
        get_real_url "$1"
        exit
    fi

1308 1309 1310 1311 1312
    url_sget "$@"
}

check_url_is_accessible()
{
1313 1314 1315 1316 1317 1318
    url_check_accessible "$@"
}

check_url_is_available()
{
    url_check_available "$@"
1319
}
1320 1321 1322 1323 1324 1325

get_filename()
{
    url_get_filename "$1"
}

Vitaly Lipatov's avatar
Vitaly Lipatov committed
1326 1327 1328 1329 1330
get_real_url()
{
    url_get_real_url "$1"
}

1331 1332 1333
fi


1334 1335 1336 1337 1338 1339 1340
get_github_urls()
{
    # https://github.com/OWNER/PROJECT
    local owner="$(echo "$1" | sed -e "s|^https://github.com/||" -e "s|/.*||")" #"
    local project="$(echo "$1" | sed -e "s|^https://github.com/$owner/||" -e "s|/.*||")" #"
    [ -n "$owner" ] || fatal "Can't get owner from $1"
    [ -n "$project" ] || fatal "Can't get project from $1"
1341
    local URL="https://api.github.com/repos/$owner/$project/releases"
1342 1343 1344
    # api sometime returns unformatted json
    scat $URL | sed -e 's|,\(["{]\)|,\n\1|g' | \
        grep -i -o -E '"browser_download_url": *"https://.*"' | cut -d'"' -f4
1345 1346
}

1347 1348 1349 1350 1351 1352
# drop file path from URL
get_host_only()
{
    echo "$1/" | grep -Eo '(.*://[^/]+)'
}

1353 1354
concatenate_url_and_filename()
{
1355 1356 1357
    local url="$(echo "$1" | sed -e 's|/*$||' )"
    local fn="$(echo "$2" | sed -e 's|^/*||' )"
    echo "$url/$fn"
1358 1359
}

1360 1361 1362
# MADEURL filled with latest made URL as flag it is end form of URL
MADEURL=''

1363 1364 1365 1366 1367
# Args: URL filename
make_fileurl()
{
    local url="$1"
    local fn="$2"
1368

1369
    fn="$(echo "$fn" | sed -e 's|^./||' -e 's|^/+||')"
1370

1371
    if is_fileurl "$url" ; then
1372 1373
        # if it is url
        :
1374
    elif is_abs_path "$fn" ; then
1375 1376
        # if there is file path from the root of the site
        url="$(get_host_only "$url")"
1377
    elif ! have_end_slash "$url" ; then
1378
        url="$(dirname "$url")"
1379
    fi
1380

1381 1382
    MADEURL="$(concatenate_url_and_filename "$url" "$fn")"
    echo "$MADEURL"
1383 1384
}

1385
get_urls()
Vitaly Lipatov's avatar
Vitaly Lipatov committed
1386
{
1387
    if is_fileurl "$URL" ; then
1388
        ls -1 "$(path_from_url "$URL")"
1389 1390 1391
        return
    fi

1392 1393 1394 1395 1396 1397 1398
    # Markdown support
    # https://raw.githubusercontent.com/dotnet/core/main/release-notes/8.0/8.0.3/8.0.103.md
    if echo "$URL" | grep -q "\.md$" ; then
        scat $URL | grep "https*" | sed -e 's|.*\(https*://\)|\1|'
        return
    fi

1399
    # cat html, divide to lines by tags and cut off hrefs only
1400
    scat $URL | sed -e 's|<|<\n|g' -e 's|data-file=|href=|g' -e "s|href=http|href=\"http|g" -e "s|>|\">|g" -e "s|'|\"|g" | \
1401
         grep -i -o -E 'href="(.+)"' | sed -e 's|&amp;|\&|' | cut -d'"' -f2
Vitaly Lipatov's avatar
Vitaly Lipatov committed
1402 1403
}

1404 1405

if [ -n "$CHECKURL" ] ; then
1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416
    #set_quiet
    URL="$1"
    check_url_is_available "$URL"
    res=$?
    if [ -n "$verbose" ] ; then
        [ "$res" = "0" ] && echo "$URL is accessible via network and file exists" || echo "$URL is NOT accessible via network or file does not exist"
    fi
    exit $res
fi

if [ -n "$CHECKSITE" ] ; then
1417
    #set_quiet
1418 1419 1420 1421 1422 1423 1424
    URL="$1"
    check_url_is_accessible "$URL"
    res=$?
    if [ -n "$verbose" ] ; then
        [ "$res" = "0" ] && echo "$URL is accessible via network" || echo "$URL is NOT accessible via network"
    fi
    exit $res
1425 1426
fi

1427 1428 1429 1430 1431
if [ -n "$GETRESPONSE" ] ; then
    url_get_response "$1"
    exit
fi

1432 1433

# separate part for github downloads
1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444
if echo "$1" | grep -q "^https://github.com/" && \
   echo "$1" | grep -q -v "/download/" && [ -n "$2" ] ; then
    MASK="$2"

    if [ -n "$LISTONLY" ] ; then
        get_github_urls "$1" | filter_glob "$MASK" | filter_order
        exit
    fi

    ERROR=0
    for fn in $(get_github_urls "$1" | filter_glob "$MASK" | filter_order) ; do
1445
        MADEURL="$fn" # mark it is the end form of the URL
1446 1447 1448 1449 1450 1451
        sget "$fn" "$TARGETFILE" || ERROR=1
        [ -n "$TARGETFILE" ] && [ "$ERROR" = "0" ] && break
    done
    exit
fi

1452 1453 1454 1455 1456
if is_ipfsurl "$1" ; then
    [ -n "$2" ] && fatal "too many args when ipfs://Qm... used: extra '$2' arg"
    sget "$1" "$TARGETFILE"
    exit
fi
1457 1458 1459 1460 1461

# if mask is the second arg
if [ -n "$2" ] ; then
    URL="$1"
    MASK="$2"
1462
    SEPMASK="$2"
1463
else
1464
    if have_end_slash "$1" ; then
1465 1466 1467 1468 1469 1470 1471
        URL="$1"
        MASK=""
    else
        # drop mask part
        URL="$(dirname "$1")/"
        # wildcards allowed only in the last part of path
        MASK=$(basename "$1")
1472 1473 1474 1475
    fi

fi

1476
# https://www.freeoffice.com/download.php?filename=freeoffice-2021-1062.x86_64.rpm
1477 1478
if echo "$URL" | grep -q "[*\[\]]" ; then
    fatal "Error: there are globbing symbol (*[]) in $URL. It is allowed only for mask part"
1479 1480
fi

1481 1482
is_url "$MASK" && fatal "eget supports only one URL as argument"
[ -n "$3" ] && fatal "too many args: extra '$3'. May be you need use quotes for arg with wildcards."
1483 1484 1485 1486 1487 1488 1489 1490 1491 1492

# TODO: curl?
# If ftp protocol, just download
if echo "$URL" | grep -q "^ftp://" ; then
    [ -n "$LISTONLY" ] && fatal "TODO: list files for ftp:// is not supported yet"
    sget "$1" "$TARGETFILE"
    exit
fi


1493 1494 1495 1496 1497 1498 1499 1500
if [ -n "$LISTONLY" ] ; then
    for fn in $(get_urls | filter_glob "$MASK" | filter_order) ; do
        is_url "$fn" && echo "$fn" && continue
        make_fileurl "$URL" "$fn"
    done
    exit
fi

1501 1502 1503 1504 1505 1506 1507
is_wildcard()
{
    echo "$1" | grep -q "[*?]" && return
    echo "$1" | grep -q "\]" && return
    echo "$1" | grep -q "\[" && return
}

1508
# If there is no wildcard symbol like asterisk, just download
1509
if [ -z "$SEPMASK" ] && ! is_wildcard "$MASK" || echo "$MASK" | grep -q "[?].*="; then
1510 1511 1512 1513
    sget "$1" "$TARGETFILE"
    exit
fi

1514
ERROR=0
1515
for fn in $(get_urls | filter_glob "$MASK" | filter_order) ; do
1516
    is_url "$fn" || fn="$(make_fileurl "$URL" "$fn" )" #"
1517 1518
    sget "$fn" "$TARGETFILE" || ERROR=1
    [ -n "$TARGETFILE" ] && [ "$ERROR" = "0" ] && break
1519 1520
done
exit $ERROR
Vitaly Lipatov's avatar
Vitaly Lipatov committed
1521