This file is indexed.

/usr/share/plowshare/modules/netload_in.sh is in plowshare-modules 0~git20160124.8a8190d-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
# Plowshare netload.in module
# Copyright (c) 2010-2013 Plowshare team
#
# This file is part of Plowshare.
#
# Plowshare is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Plowshare is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Plowshare.  If not, see <http://www.gnu.org/licenses/>.

MODULE_NETLOAD_IN_REGEXP_URL='https\?://\(www\.\)\?net\(load\|folder\)\.in/'

MODULE_NETLOAD_IN_DOWNLOAD_OPTIONS="
AUTH,a,auth,a=USER:PASSWORD,Premium account"
MODULE_NETLOAD_IN_DOWNLOAD_RESUME=no
MODULE_NETLOAD_IN_DOWNLOAD_FINAL_LINK_NEEDS_COOKIE=no
MODULE_NETLOAD_IN_DOWNLOAD_SUCCESSIVE_INTERVAL=

MODULE_NETLOAD_IN_UPLOAD_OPTIONS="
AUTH,a,auth,a=USER:PASSWORD,Premium account"
MODULE_NETLOAD_IN_UPLOAD_REMOTE_SUPPORT=no

MODULE_NETLOAD_IN_LIST_OPTIONS="
LINK_PASSWORD,p,link-password,S=PASSWORD,Used for password-protected folder"
MODULE_NETLOAD_IN_LIST_HAS_SUBFOLDERS=yes

MODULE_NETLOAD_IN_PROBE_OPTIONS=""

# Static function. Proceed with login
# $1: $AUTH argument string
# $2: cookie file
# $3: netload.in baseurl
netload_in_premium_login() {
    # Even if login/passwd are wrong cookie content is returned
    local LOGIN_DATA LOGIN_RESULT
    LOGIN_DATA='txtuser=$USER&txtpass=$PASSWORD&txtcheck=login&txtlogin='
    LOGIN_RESULT=$(post_login "$1" "$2" "$LOGIN_DATA" "$3/index.php" -L) || return

    if match 'InPage_Error\|lostpassword\.tpl' "$LOGIN_RESULT"; then
        log_debug 'bad login and/or password'
        return $ERR_LOGIN_FAILED
    fi
}

# Static function. Retrieve file information using official API
# $1: file id
# $2: return md5 (0 or 1)
netload_in_infos() {
    # Plowshare Auth Code
    local -r AUTH_CODE='ec3vfSuAXoHVQxA816hsKGdOCbQ6it9N'
    curl -d "auth=$AUTH_CODE" -d "file_id=$1" -d 'bz=1' -d "md5=$2" \
        'https://api.netload.in/info.php'
}

# Output a netload.in file download URL
# $1: cookie file
# $2: netload.in url
# stdout: real file download link
netload_in_download() {
    local -r COOKIE_FILE=$1
    local -r URL=${2/www.}
    local -r BASE_URL='http://netload.in'
    local FILE_ID FILE_NAME
    local PAGE WAIT_URL WAIT_TIME FILE_URL

    # Get filename using API
    FILE_ID=$(parse . '/datei\([[:alnum:]]\+\)[/.]' <<< "$URL") || return
    log_debug "File ID: '$FILE_ID'"

    # file ID, filename, size, status
    PAGE=$(netload_in_infos "$FILE_ID" 0) || return
    FILE_NAME=${PAGE#*;}
    FILE_NAME=${FILE_NAME%%;*}

    if [ -n "$AUTH" ]; then
        netload_in_premium_login "$AUTH" "$COOKIE_FILE" "$BASE_URL" || return
        MODULE_NETLOAD_IN_DOWNLOAD_RESUME=yes

        PAGE=$(curl -i -b "$COOKIE_FILE" "$URL") || return
        FILE_URL=$(grep_http_header_location <<< "$PAGE")

        # check for link redirection (HTTP error 301)
        if [ "${FILE_URL:0:1}" = '/' ]; then
            PAGE=$(curl -i -b "$COOKIE_FILE" "${BASE_URL}$FILE_URL") || return
            FILE_URL=$(grep_http_header_location <<< "$PAGE")
        fi

        # Account download method set to "Automatisch"
        # HTTP HEAD request discarded, can't read "Content-Disposition" header
        if [ -n "$FILE_URL" ]; then
            echo "$FILE_URL"
            echo "$FILE_NAME"
            return 0
        fi

        parse_attr 'Orange_Link' 'href' <<< "$PAGE" || return
        echo "$FILE_NAME"
        return 0
    fi

    PAGE=$(curl --location -c "$COOKIE_FILE" "$URL") || return

    # This file can be only downloaded by Premium users in fact of its file size
    if match 'This file is only for Premium Users' "$PAGE"; then
        return $ERR_LINK_NEED_PERMISSIONS
    fi

    # Extract wait time
    WAIT_URL=$(parse_attr_quiet '<div class="Free_dl">' 'href' <<< "$PAGE")
    [ "$WAIT_URL" ] || return $ERR_LINK_DEAD

    WAIT_URL="$BASE_URL/${WAIT_URL//&amp;/&}"
    PAGE=$(curl -b "$COOKIE_FILE" --referer "$URL" "$WAIT_URL") || return
    WAIT_TIME=$(parse 'type="text/javascript">countdown' \
            "countdown(\([[:digit:]]*\),'change()')" <<< "$PAGE") || return

    # Scrape (post) form
    local FORM FORM_ACT FORM_FID
    FORM=$(grep_form_by_order "$PAGE") || return
    FORM_ACT=$(parse_form_action <<< "$FORM") || return
    FORM_FID=$(parse_form_input_by_name 'file_id' <<< "$FORM") || return

    # Solve recaptcha
    local PUBKEY WCI CHALLENGE WORD ID
    PUBKEY='6LcLJMQSAAAAAJzquPUPKNovIhbK6LpSqCjYrsR1'
    WCI=$(recaptcha_process $PUBKEY)
    { read WORD; read CHALLENGE; read ID; } <<< "$WCI"
    log_debug "Decoded captcha: $WORD"

    wait $((WAIT_TIME / 100 + 1)) || return

    PAGE=$(curl --include -b "$COOKIE_FILE" \
        -d "recaptcha_challenge_field=$CHALLENGE" \
        -d "recaptcha_response_field=$WORD" -d "file_id=$FORM_FID" \
        -d 'captcha_check=1' -d 'start' "${BASE_URL}/$FORM_ACT") || return

    # Site redirects on captcha error
    if grep_http_header_location <<< "$PAGE" &>/dev/null; then
        captcha_nack $ID
        log_error 'Wrong captcha'
        return $ERR_CAPTCHA
    fi

    captcha_ack $ID
    log_debug 'Correct captcha'

    WAIT_TIME=$(parse_quiet 'type="text/javascript">countdown' \
            "countdown(\([[:digit:]]*\),'change()')" <<< "$PAGE")

    # <!--./share/templates/download_limit.tpl-->
    # <!--./share/templates/download_wait.tpl-->
    if [[ $WAIT_TIME -gt 10000 ]]; then
        log_debug 'Download limit reached!'
        echo $((WAIT_TIME / 100 + 1))
        return $ERR_LINK_TEMP_UNAVAILABLE
    fi

    # Suppress this wait will lead to a 400 http error (bad request)
    wait $((WAIT_TIME / 100 + 1)) seconds || return

    parse '<a class="Orange_Link"' 'Link" href="\(http[^"]*\)' <<< "$PAGE" || return
    echo "$FILE_NAME"
}

# Upload a file to netload.in
# $1: cookie file (unused here)
# $2: input file (with full path)
# $3: remote filename
# stdout: netload.in download link (delete link)
#
# http://api.netload.in/index.php?id=3
# Note: Password protected archives upload is not managed here.
netload_in_upload() {
    local -r COOKIE_FILE=$1
    local -r FILE=$2
    local -r DESTFILE=$3
    local -r BASE_URL="http://www.netload.in"

    local AUTH_CODE UPLOAD_SERVER EXTRA_PARAMS

    if test "$AUTH"; then
        netload_in_premium_login "$AUTH" "$COOKIE_FILE" "$BASE_URL" || return
        curl -b "$COOKIE_FILE" --data 'get=Get Auth Code' -o /dev/null "$BASE_URL/index.php?id=56"

        AUTH_CODE=$(curl -b "$COOKIE_FILE" "$BASE_URL/index.php?id=56" | \
            parse 'Your Auth Code' ';">\([^<]*\)') || return
        log_debug "auth=$AUTH_CODE"

        local USER PASSWORD
        split_auth "$AUTH" USER PASSWORD || return

        EXTRA_PARAMS="-F user_id=$USER -F user_password=$PASSWORD"
    else
        AUTH_CODE="LINUX"
        EXTRA_PARAMS=
    fi

    UPLOAD_SERVER=$(curl 'http://api.netload.in/getserver.php') || return

    PAGE=$(curl_with_log $EXTRA_PARAMS \
        --form-string "auth=$AUTH_CODE" \
        -F 'modus=file_upload' \
        -F "file_link=@$FILE;filename=$DESTFILE" \
        "$UPLOAD_SERVER") || return

    # Expected result:
    # return_code;filename;filesize;download_link;delete_link
    IFS=';' read RETCODE FILENAME FILESIZE DL DEL <<< "$PAGE"

    case "$RETCODE" in
        UPLOAD_OK)
            echo "$DL"
            echo "$DEL"
            return 0
            ;;
        rar_password)
            log_error 'Archive is password protected'
            ;;
        unknown_user_id|wrong_user_password|no_user_password)
            log_error "bad login and/or password ($RETCODE)"
            return $ERR_LOGIN_FAILED
            ;;
        unknown_auth|prepare_failed)
            log_error "unexpected result ($RETCODE)"
            ;;
    esac

    return $ERR_FATAL
}

# List multiple netload.in links
# $1: netfolder.in link
# $2: recurse subfolders (null string means not selected)
# stdout: list of links
netload_in_list() {
    local URL=$1
    local PAGE LINKS NAMES

    if ! match '/folder' "$URL"; then
        log_error 'This is not a directory list'
        return $ERR_FATAL
    fi

    PAGE=$(curl "$URL" | break_html_lines_alt) || return

    # Folder can have a password
    if match '<div id="Password">' "$PAGE"; then
        log_debug 'Password-protected folder'
        if [ -z "$LINK_PASSWORD" ]; then
            LINK_PASSWORD=$(prompt_for_password) || return
        fi
        PAGE=$(curl --data "password=$LINK_PASSWORD" "$URL" | \
            break_html_lines_alt) || return

        #<div class="InPage_Error"><pre>&bull; Passwort ist ung&uuml;ltig!<br/></pre></div>
        match '"InPage_Error">' "$PAGE" && \
            return $ERR_LINK_PASSWORD_REQUIRED
    fi

    LINKS=$(echo "$PAGE" | parse_all_attr_quiet 'Link_[[:digit:]]' 'href')
    test "$LINKS" || return $ERR_LINK_DEAD

    NAMES=$(echo "$PAGE" | parse_all 'Link_[[:digit:]]' '^\([^<]*\)' 2)

    list_submit "$LINKS" "$NAMES" || return
}

# Probe a download URL
# $1: cookie file (unused here)
# $2: Netfolder.in url
# $3: requested capability list
# stdout: 1 capability per line
netload_in_probe() {
    local -r URL=$2
    local -r REQ_IN=$3
    local RESPONSE REQ_OUT FILE_ID FILE_NAME FILE_SIZE FILE_HASH STATUS

    if [[ "$URL" = */folder* ]]; then
        log_error 'This is a folder. Please use plowlist.'
        return $ERR_FATAL
    fi

    FILE_ID=$(echo "$2" | parse . '/datei\([[:alnum:]]\+\)[/.]') || return
    log_debug "File ID: '$FILE_ID'"

    RESPONSE=$(netload_in_infos "$FILE_ID" 1) || return

    if [ "$RESPONSE" = 'unknown_auth' ]; then
        log_error 'API key invalid. Please report this issue!'
        return $ERR_FATAL
    fi

    # file ID, filename, size, status, MD5
    IFS=';' read FILE_ID FILE_NAME FILE_SIZE STATUS FILE_HASH <<< "$RESPONSE"

    [ "$STATUS" = 'online' ] || return $ERR_LINK_DEAD
    REQ_OUT=c

    if [[ $REQ_IN = *f* ]]; then
        [ -n "$FILE_NAME" ] && echo "$FILE_NAME" && REQ_OUT="${REQ_OUT}f"
    fi

    if [[ $REQ_IN = *s* ]]; then
        [ -n "$FILE_SIZE" ] && echo "$FILE_SIZE" && REQ_OUT="${REQ_OUT}s"
    fi

    if [[ $REQ_IN = *h* ]]; then
        [ -n "$FILE_HASH" ] && echo "$FILE_HASH" && REQ_OUT="${REQ_OUT}h"
    fi

    echo $REQ_OUT
}