This file is indexed.

/usr/share/plowshare/modules/uploaded_net.sh is in plowshare-modules 0~git20160124.8a8190d-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
# Plowshare uploaded.net module
# Copyright (c) 2011-2015 Plowshare team
#
# This file is part of Plowshare.
#
# Plowshare is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Plowshare is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Plowshare.  If not, see <http://www.gnu.org/licenses/>.

MODULE_UPLOADED_NET_REGEXP_URL='http://\(www\.\)\?\(uploaded\.\(to\|net\)\|ul\.to\)/'

MODULE_UPLOADED_NET_DOWNLOAD_OPTIONS="
AUTH,a,auth,a=USER:PASSWORD,User account
LINK_PASSWORD,p,link-password,S=PASSWORD,Used in password-protected files"
MODULE_UPLOADED_NET_DOWNLOAD_RESUME=no
MODULE_UPLOADED_NET_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=yes
MODULE_UPLOADED_NET_DOWNLOAD_SUCCESSIVE_INTERVAL=7200

MODULE_UPLOADED_NET_UPLOAD_OPTIONS="
ADMIN_CODE,,admin-code,s=ADMIN_CODE,Admin code (used for file deletion)
AUTH,a,auth,a=USER:PASSWORD,User account (mandatory)
FOLDER,,folder,s=FOLDER,Folder to upload files into
LINK_PASSWORD,p,link-password,S=PASSWORD,Protect a link with a password
PRIVATE_FILE,,private,,Do not allow others to download the file"
MODULE_UPLOADED_NET_UPLOAD_REMOTE_SUPPORT=no

MODULE_UPLOADED_NET_DELETE_OPTIONS="
AUTH,a,auth,a=USER:PASSWORD,User account (mandatory)"

MODULE_UPLOADED_NET_LIST_OPTIONS=""
MODULE_UPLOADED_NET_LIST_HAS_SUBFOLDERS=no

MODULE_UPLOADED_NET_PROBE_OPTIONS=""

# Static function. Proceed with login
# $1: authentication
# $2: cookie file
# $3: base url
# stdout: account type ("free" or "premium") on success
uploaded_net_login() {
    local -r AUTH=$1
    local -r COOKIE_FILE=$2
    local -r BASE_URL=$3
    local LOGIN_DATA PAGE ERR TYPE ID NAME

    LOGIN_DATA='id=$USER&pw=$PASSWORD'
    PAGE=$(post_login "$AUTH" "$COOKIE_FILE" "$LOGIN_DATA" \
        "$BASE_URL/io/login") || return

    # Note: Cookies "login" + "auth" get set on successful login
    ERR=$(parse_json_quiet 'err' <<< "$PAGE")

    if [ -n "$ERR" ]; then
        log_error "Remote error: $ERR"
        return $ERR_LOGIN_FAILED
    fi

    # Note: Login changes site's language according to account's preference
    uploaded_net_switch_lang "$COOKIE_FILE" "$BASE_URL" || return

    # Determine account type
    PAGE=$(curl -b "$COOKIE_FILE" "$BASE_URL/me") || return
    ID=$(parse 'ID:' '<em.*>\(.*\)</em>' 1 <<< "$PAGE") || return
    TYPE=$(parse 'Status:' '<em>\(.*\)</em>' 1 <<< "$PAGE") || return
    NAME=$(parse_quiet 'Alias:' '<b><b>\(.*\)</b></b>' 1 <<< "$PAGE")

    if [ "$TYPE" = 'Free' ]; then
        TYPE='free'
    elif [ "$TYPE" = 'Premium' ]; then
        TYPE='premium'
    else
        log_error 'Could not determine account type. Site updated?'
        return $ERR_FATAL
    fi

    log_debug "Successfully logged in as $TYPE member '$ID' (${NAME:-n/a})"
    echo "$TYPE"
}

# Switch language to english
# $1: cookie file
# $2: base URL
uploaded_net_switch_lang() {
    # Note: Language is associated with session, no new cookie is set
    curl -b "$1" -o /dev/null "$2/language/en" || return
}

# Get canonical URL
# $1: input url
# stdout: rebased URL
uploaded_net_get_canonical_url() {
    local U REDIR
    U=$(replace '://ul.to/file/' '://uploaded.net/file/' <<< "$1")
    U=$(replace '://ul.to/' '://uploaded.net/file/' <<< "$U")
    REDIR=$(curl --head "$U" | grep_http_header_location_quiet)
    if [ -n "$REDIR" ] && ! match '/dl/' "$REDIR"; then
        echo "$REDIR"
    else
        echo "$U"
    fi
}

# Simple and limited parsing of flawed JSON
#
# Notes:
# - Large parts copied from "parse_json" in core.sh (look there for further documentation)
# - Also accepts flawed JSON (unquoted or single quoted names/strings)
#
# $1: variable name (string)
# $2: (optional) preprocess option. Accepted values are:
#     - "join": make a single line of input stream.
#     - "split": split input buffer on comma character (,).
# stdin: JSON data
# stdout: result
uploaded_net_parse_json_alt() {
    local -r D="[\"']\?" # string/name delimiter
    local -r S="^.*$D$1$D[[:space:]]*:[[:space:]]*" # start of JSON string
    local -r E='\([,}[:space:]].*\)\?$' # end of JSON string
    local STRING PRE

    if [ "$2" = 'join' ]; then
        PRE="tr -d '\n\r' |"
    elif [ "$2" = 'split' ]; then
        PRE="sed -e 's/,[[:space:]]*/\n/g' |"
    fi

    STRING=$($PRE sed -n \
        -e "s/$S\(-\?\(0\|[1-9][[:digit:]]*\)\(\.[[:digit:]]\+\)\?\([eE][-+]\?[[:digit:]]\+\)\?\)$E/\1/p" \
        -e "s/$S\(true\|false\|null\)$E/\1/p" \
        -e "s/\\\\\"/\\\\q/g; s/$S$D\([^,}[:space:]\"']*\)$D$E/\1/p")

    if [ -z "$STRING" ]; then
        log_error "$FUNCNAME failed (json): \"$1\""
        return $ERR_FATAL
    fi

    # Translate two-character sequence escape representations
    STRING=${STRING//\\q/\"}
    STRING=${STRING//\\\\/\\}
    STRING=${STRING//\\\//\/}
    STRING=${STRING//\\b/$'\b'}
    STRING=${STRING//\\f/$'\f'}
    STRING=${STRING//\\n/$'\n'}
    STRING=${STRING//\\r/$'\r'}
    STRING=${STRING//\\t/	}

    echo "$STRING"
}

# Check if specified folder name is valid.
# When multiple folders have the same name, first one is taken.
# $1: folder name selected by user
# $2: cookie file (logged into account)
# $3: base URL
# stdout: folder ID
uploaded_net_check_folder() {
    local -r NAME=$1
    local -r COOKIE_FILE=$2
    local -r BASE_URL=$3
    local JSON FOLDERS FOL_ID

    # Special treatment for root folder (always uses ID "0")
    if [ "$NAME" = 'root' ]; then
        echo 0
        return 0
    fi

    JSON=$(curl -b "$COOKIE_FILE" "$BASE_URL/api/folder/tree") || return

    # Find matching folder ID
    FOL_ID=$(parse_quiet . "{\"id\":\"\([[:alnum:]]\+\)\",\"name\":\"$NAME\"" <<< "$JSON")

    if [ -n "$FOL_ID" ]; then
        echo "$FOL_ID"
        return 0
    fi

    FOLDERS=$(parse_json 'name' 'split' <<< "$JSON") || return
    log_error 'Invalid folder, choose from:' $FOLDERS
    return $ERR_BAD_COMMAND_LINE
}

# Extract file ID from download link
# $1: canonical uploaded.net download URL
# $2: base URL
# stdout: file ID
uploaded_net_extract_file_id() {
    local FILE_ID

    # check whether it looks like a folder link
    if match "${MODULE_UPLOADED_NET_REGEXP_URL}f\(older\)\?/" "$URL"; then
        log_error 'This is a folder. Please use plowlist.'
        return $ERR_FATAL
    fi

    FILE_ID=$(parse . "$2/file/\([[:alnum:]]\+\)" <<< "$1") || return
    log_debug "File ID: '$FILE_ID'"
    echo "$FILE_ID"
}


# Output an Uploaded.net file download URL
# $1: cookie file
# $2: uploaded.net url
# stdout: real file download link
uploaded_net_download() {
    local -r COOKIE_FILE=$1
    local -r BASE_URL='http://uploaded.net'
    local URL REDIR_URL ACCOUNT PAGE JSON WAIT ERR FILE_ID FILE_NAME FILE_URL CV SESS

    URL=$(uploaded_net_get_canonical_url "$2") || return

    # Recognize folders
    if match "$BASE_URL/folder/" "$URL"; then
        log_error 'This is a directory list'
        return $ERR_FATAL
    fi

    REDIR_URL=$(curl --head "$URL" | grep_http_header_location_quiet)
    if [ -n "$REDIR_URL" ]; then
        # Check for direct download
        if match '/dl/' "$REDIR_URL"; then
            FILE_NAME=$(curl "$URL/status" | first_line)

            echo "$REDIR_URL"
            echo "$FILE_NAME"
            return 0

        # Page not found
        # The requested file isn't available anymore!
        elif match "$BASE_URL/\(404\|410\)" "$REDIR_URL"; then
            return $ERR_LINK_DEAD
        else
            log_error "remote suspicious redirection: $REDIR_URL"
        fi
    fi

    uploaded_net_switch_lang "$COOKIE_FILE" "$BASE_URL" || return

    # Note: File owner never needs password and only owner may access private
    # files, so login comes first.
    if CV=$(storage_get 'cookie_file'); then
        echo "$CV" >"$COOKIE_FILE"

        # Check for expired session
        PAGE=$(curl -b "$COOKIE_FILE" "$BASE_URL/me") || return
        if ! match '>\(Profile\|Logout\)<' "$PAGE"; then
            log_error 'Expired session, delete cache entry'
            storage_set 'cookie_file'
            echo 1
            return $ERR_LINK_TEMP_UNAVAILABLE
        fi

        # Determine account type
        local TYPE
        TYPE=$(parse 'Status:' '<em>\(.*\)</em>' 1 <<< "$PAGE") || return
        ACCOUNT=$(lowercase "$TYPE")

        SESS=$(parse_cookie 'auth' < "$COOKIE_FILE")
        log_debug "session (cached): '$SESS'"
    elif [ -n "$AUTH" ]; then
        ACCOUNT=$(uploaded_net_login "$AUTH" "$COOKIE_FILE" "$BASE_URL") || return
        storage_set 'cookie_file' "$(cat "$COOKIE_FILE")"

        SESS=$(parse_cookie 'auth' < "$COOKIE_FILE")
        log_debug "session (new): '$SESS'"
    fi

    # Note: Save HTTP headers to catch premium users' "direct downloads"
    PAGE=$(curl -i -b "$COOKIE_FILE" "$URL") || return

    # Check for files that need a password
    if match '<h2>Authentification</h2>' "$PAGE"; then
        log_debug 'File is password protected'

        if [ -z "$LINK_PASSWORD" ]; then
            LINK_PASSWORD=$(prompt_for_password) || return
        fi

        # Note: Again, consider "direct downloads"
        PAGE=$(curl -i -b "$COOKIE_FILE" -F "pw=$LINK_PASSWORD" "$URL") || return

        if match '<h2>Authentification</h2>' "$PAGE"; then
            return $ERR_LINK_PASSWORD_REQUIRED
        fi
    fi

    FILE_ID=$(uploaded_net_extract_file_id "$URL" "$BASE_URL") || return
    FILE_NAME=$(curl "$BASE_URL/file/$FILE_ID/status" | first_line) || return

    if [ "$ACCOUNT" = 'premium' ]; then
        # Premium users can resume downloads
        MODULE_UPLOADED_NET_DOWNLOAD_RESUME=yes

        # Seems that download rate is lowered..
        MODULE_UPLOADED_NET_DOWNLOAD_SUCCESSIVE_INTERVAL=30

        # Get download link, if this was a direct download
        FILE_URL=$(grep_http_header_location_quiet <<< "$PAGE")

        if match 'your Hybrid-Traffic is completely exhausted' "$PAGE"; then
            WAIT=$(parse 'Hybrid-Traffic.*exhausted' \
                'will be released in \([[:digit:]]\+\) minutes' <<< "$PAGE")
            echo $(( ${WAIT:-60} * 60 ))
            return $ERR_LINK_TEMP_UNAVAILABLE
        fi

        if [ -z "$FILE_URL" ]; then
            FILE_URL=$(parse_attr 'stor[[:digit:]]\+\.' 'action' <<< "$PAGE") || return
        fi

        echo "$FILE_URL"
        echo "$FILE_NAME"
        return 0
    fi

    if match '^[[:space:]]*var free_enabled = false;' "$PAGE"; then
        log_error 'No free download slots available'
        echo 300 # wait some arbitrary time
        return $ERR_LINK_TEMP_UNAVAILABLE
    fi

    # Request download (use dummy "-d" to force a POST request)
    JSON=$(curl -b "$COOKIE_FILE" --referer "$URL" \
        -H 'X-Requested-With: XMLHttpRequest' -d '' \
        "$BASE_URL/io/ticket/slot/$FILE_ID") || return

    if [ "$JSON" != '{succ:true}' ]; then
        ERR=$(parse_json_quiet 'err' <<< "$JSON")

        # from 'http://uploaded.net/js/download.js' - 'function(limit)'
        if [ "$ERR" = 'limit-dl' ]; then
            log_error 'Free download limit reached'
            echo 600 # wait some arbitrary time
            return $ERR_LINK_TEMP_UNAVAILABLE

        elif [ "$ERR" = 'limit-parallel' ]; then
            log_error 'No parallel download allowed.'
            echo 600 # wait some arbitrary time
            return $ERR_LINK_TEMP_UNAVAILABLE

        elif [ "$ERR" = 'limit-size' ]; then
            return $ERR_SIZE_LIMIT_EXCEEDED

        elif [ "$ERR" = 'limit-slot' ]; then
            log_error 'No free download slots available'
            echo 300 # wait some arbitrary time
            return $ERR_LINK_TEMP_UNAVAILABLE
        fi

        log_error "Unexpected remote error: $ERR"
        return $ERR_FATAL
    fi

    # <span>Current waiting period: <span>30</span> seconds</span>
    WAIT=$(parse '<span>Current waiting period' \
        'period: <span>\([[:digit:]]\+\)</span>' <<< "$PAGE") || return
    wait $((WAIT + 1)) || return

    # from 'http://uploaded.net/js/download.js' - 'Recaptcha.create'
    local PUBKEY WCI CHALLENGE WORD ID
    PUBKEY='6Lcqz78SAAAAAPgsTYF3UlGf2QFQCNuPMenuyHF3'
    WCI=$(recaptcha_process $PUBKEY) || return
    { read WORD; read CHALLENGE; read ID; } <<< "$WCI"

    JSON=$(curl -b "$COOKIE_FILE" --referer "$URL" \
        -H 'X-Requested-With: XMLHttpRequest' \
        -d "recaptcha_challenge_field=$CHALLENGE" \
        -d "recaptcha_response_field=$WORD" \
        "$BASE_URL/io/ticket/captcha/$FILE_ID") || return

    ERR=$(parse_json_quiet 'err' <<< "$JSON")

    if [ -n "$ERR" ]; then
        if [ "$ERR" = 'captcha' ]; then
            log_error 'Wrong captcha'
            captcha_nack "$ID"
            return $ERR_CAPTCHA
        fi

        captcha_ack "$ID"

        if [ "$ERR" = 'limit-dl' ]; then
            log_error 'Free download limit reached'
            echo 600 # wait some arbitrary time
            return $ERR_LINK_TEMP_UNAVAILABLE

        # You have reached the max. number of possible free downloads for this hour
        elif match 'possible free downloads for this hour' "$ERR"; then
            log_error 'Hourly limit reached.'
            echo 3600
            return $ERR_LINK_TEMP_UNAVAILABLE

        # This file exceeds the max. filesize which can be downloaded by free users.
        elif match 'exceeds the max. filesize' "$ERR"; then
            return $ERR_SIZE_LIMIT_EXCEEDED

        # We're sorry but all of our available download slots are busy currently
        elif match 'all of our available download slots are busy' "$ERR"; then
            log_error 'No free download slots available'
            echo 300 # wait some arbitrary time
            return $ERR_LINK_TEMP_UNAVAILABLE
        fi

        log_error "Unexpected remote error: $ERR"
        return $ERR_FATAL
    fi

    captcha_ack "$ID"

    # {type:'download',url:'http://storXXXX.uploaded.net/dl/...'}
    # Note: This is no valid JSON due to the unquoted/single quoted strings
    FILE_URL=$(uploaded_net_parse_json_alt 'url' <<< "$JSON") || return

    echo "$FILE_URL"
    echo "$FILE_NAME"
}

# Upload a file to Uploaded.net
# $1: cookie file
# $2: input file (with full path)
# $3: remote filename
# stdout: ul.to download link
uploaded_net_upload() {
    local -r COOKIE_FILE=$1
    local -r FILE=$2
    local -r DEST_FILE=$3
    local -r BASE_URL='http://uploaded.net'
    local -r MAX_SIZE=1073741823
    local PAGE SERVER FILE_ID AUTH_DATA ACCOUNT FOLDER_ID OPT_FOLDER CV SESS

    # Sanity checks
    [ -n "$AUTH" ] || return $ERR_LINK_NEED_PERMISSIONS

    if [ -n "$LINK_PASSWORD" ]; then
        local -r PW_MAX=12

        if [ -n "$PRIVATE_FILE" ]; then
            log_error 'Private files cannot be password protected'
            return $ERR_BAD_COMMAND_LINE
        fi

        # Check length limitation
        if [ ${#LINK_PASSWORD} -gt $PW_MAX ]; then
            log_error "Password must not be longer than $PW_MAX characters"
            return $ERR_BAD_COMMAND_LINE
        fi
    fi

    if [ -n "$ADMIN_CODE" ]; then
        local -r AC_MAX=30
        local -r AC_FORBIDDEN="/ '\"%#;&"

        # Check length limitation
        if [ ${#ADMIN_CODE} -gt $AC_MAX ]; then
            log_error "Admin code must not be longer than $AC_MAX characters"
            return $ERR_BAD_COMMAND_LINE
        fi

        # Check for forbidden characters
        if match "[$AC_FORBIDDEN]" "$ADMIN_CODE"; then
            log_error "Admin code must not contain any of these: $AC_FORBIDDEN"
            return $ERR_BAD_COMMAND_LINE
        fi
    else
        ADMIN_CODE=$(random a 8)
    fi

    PAGE=$(curl "$BASE_URL/js/script.js") || return
    SERVER=$(parse 'uploadServer =' "[[:space:]]'\([^']*\)" <<< "$PAGE") || return

    log_debug "Upload server: $SERVER"

    if CV=$(storage_get 'cookie_file'); then
        echo "$CV" >"$COOKIE_FILE"

        # Check for expired session
        PAGE=$(curl -b "$COOKIE_FILE" "$BASE_URL/me") || return
        if ! match '>\(Profile\|Logout\)<' "$PAGE"; then
            log_debug 'expired session, delete cache entry'
            storage_set 'cookie_file'
            echo 1
            return $ERR_LINK_TEMP_UNAVAILABLE
        fi

        # Determine account type
        local TYPE
        TYPE=$(parse 'Status:' '<em>\(.*\)</em>' 1 <<< "$PAGE") || return
        ACCOUNT=$(lowercase "$TYPE")

        SESS=$(parse_cookie 'auth' < "$COOKIE_FILE")
        log_debug "session (cached): '$SESS'"
    elif [ -n "$AUTH" ]; then
        ACCOUNT=$(uploaded_net_login "$AUTH" "$COOKIE_FILE" "$BASE_URL") || return
        storage_set 'cookie_file' "$(cat "$COOKIE_FILE")"

        SESS=$(parse_cookie 'auth' < "$COOKIE_FILE")
        log_debug "session (new): '$SESS'"
    fi

    if [ "$ACCOUNT" != 'premium' ]; then
        local SIZE
        SIZE=$(get_filesize "$FILE") || return

        if [ $SIZE -gt $MAX_SIZE ]; then
            log_debug "File is bigger than $MAX_SIZE"
            return $ERR_SIZE_LIMIT_EXCEEDED
        fi
    fi

    # If user chose a folder, check it now
    if [ -n "$FOLDER" ]; then
        FOLDER_ID=$(uploaded_net_check_folder "$FOLDER" "$COOKIE_FILE" \
            "$BASE_URL") || return
        OPT_FOLDER="&folder=$FOLDER_ID"
    fi
    log_debug "Folder ID: $FOLDER_ID"

    AUTH_DATA=$(parse_cookie 'login' < "$COOKIE_FILE" | uri_decode | \
        parse . '^\(&id=.\+&pw=.\+\)&cks=') || return

    PAGE=$(curl_with_log --user-agent 'Shockwave Flash' \
        -F "Filename=$DEST_FILE" \
        -F "Filedata=@$FILE;type=application/octet-stream;filename=$DEST_FILE" \
        -F 'Upload=Submit Query' \
        "${SERVER}upload?admincode=$ADMIN_CODE$AUTH_DATA$OPT_FOLDER") || return

    if match '<title>504 Gateway Time-out</title>' "$PAGE"; then
        log_error 'Remote server error, maybe due to overload.'
        echo 120 # arbitrary time
        return $ERR_LINK_TEMP_UNAVAILABLE
    fi

    FILE_ID=${PAGE%%,*}

    # Sanity check
    if [ -z "$FILE_ID" ]; then
        log_error "Upstream error: '$PAGE'"
        return $ERR_FATAL
    elif [ "$FILE_ID" = 'forbidden' ]; then
        log_error 'Upstream error: file hash was blacklisted or try with another file.'
        return $ERR_FATAL
    fi

    # Do we need to edit the file? (change visibility, set password)
    if [ -n "$PRIVATE_FILE" -o -n "$LINK_PASSWORD" ]; then
        log_debug 'Editing file...'
        local OPT_PRIV='true'

        [ -n "$LINK_PASSWORD" ] && OPT_PRIV=$LINK_PASSWORD

        # Note: Site uses the same API call to set file private or set a password
        PAGE=$(curl -b "$COOKIE_FILE" \
            -d "auth=$FILE_ID" -d "priv=$OPT_PRIV" \
            "$BASE_URL/api/file/priv") || return

        if [ "$PAGE" != '{"succ":"true"}' ]; then
            log_error 'Could not set password/private. Site updated?'
        fi
    fi

    echo "http://ul.to/$FILE_ID"
    echo
    echo "$ADMIN_CODE"
}

# Delete a file on Uploaded.net
# $1: cookie file
# $2: uploaded.net (download) link
uploaded_net_delete() {
    local -r COOKIE_FILE=$1
    local -r BASE_URL='http://uploaded.net'
    local URL PAGE FILE_ID

    [ -n "$AUTH" ] || return $ERR_LINK_NEED_PERMISSIONS

    URL=$(uploaded_net_get_canonical_url "$2") || return

    # Recognize folders
    if match "$BASE_URL/folder/" "$URL"; then
        log_error 'This is a directory list'
        return $ERR_FATAL
    fi

    # Page not found
    # The requested file isn't available anymore!
    if match "$BASE_URL/\(404\|410\)" "$URL"; then
        return $ERR_LINK_DEAD
    fi

    uploaded_net_login "$AUTH" "$COOKIE_FILE" "$BASE_URL" >/dev/null || return

    FILE_ID=$(uploaded_net_extract_file_id "$URL" "$BASE_URL") || return
    PAGE=$(curl -b "$COOKIE_FILE" -H 'X-Requested-With: XMLHttpRequest' \
        -d "file%5B%5D=$FILE_ID" "$BASE_URL/api/Remove") || return

    # {"succ":1,"trust":0}
    [ "$PAGE" = '{"succ":1,"trust":0}' ] || return $ERR_FATAL
}

# List an Uploaded.net shared file folder URL
# $1: uploaded.net url
# $2: recurse subfolders (null string means not selected)
# stdout: list of links
uploaded_net_list() {
    local URL=$1
    local PAGE LINKS NAMES

    # check whether it looks like a folder link
    if ! match "${MODULE_UPLOADED_NET_REGEXP_URL}f\(older\)\?/" "$URL"; then
        log_error 'This is not a directory list'
        return $ERR_FATAL
    fi

    PAGE=$(curl -L "$URL") || return

    LINKS=$(parse_all_attr 'tr id="' 'id' <<< "$PAGE") || return
    NAMES=$(parse_all_tag_quiet 'onclick="visit($(this))' 'a' <<< "$PAGE")

    test "$LINKS" || return $ERR_LINK_DEAD

    list_submit "$LINKS" "$NAMES" 'http://uploaded.net/file/' || return
}

# Probe a download URL
# $1: cookie file (unused here)
# $2: Uploaded.net url
# $3: requested capability list
# stdout: 1 capability per line
uploaded_net_probe() {
    local -r REQ_IN=$3
    local -r BASE_URL='http://uploaded.net'
    local URL REDIR_URL PAGE REQ_OUT FILE_ID FILE_SIZE

    URL=$(uploaded_net_get_canonical_url "$2") || return

    # Page not found
    # The requested file isn't available anymore!
    [[ $URL = */404 || $URL = */410/* ]]  && return $ERR_LINK_DEAD
    REQ_OUT=c

    FILE_ID=$(uploaded_net_extract_file_id "$URL" "$BASE_URL") || return
    PAGE=$(curl --location "$BASE_URL/file/$FILE_ID/status") || return

    if [[ $REQ_IN = *f* ]]; then
        first_line <<< "$PAGE" && REQ_OUT="${REQ_OUT}f"
    fi

    if [[ $REQ_IN = *s* ]]; then
        FILE_SIZE=$(last_line <<< "$PAGE" | replace_all '.' '' | replace_all ',' '.') \
            && translate_size "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s"
    fi

    if [[ $REQ_IN = *v* ]]; then
        echo "$URL"
        REQ_OUT="${REQ_OUT}v"
    fi

    echo $REQ_OUT
}