This file is indexed.

/usr/lib/python3/dist-packages/aiohttp/server.py is in python3-aiohttp 0.20.2-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
"""simple http server."""

import asyncio
import http.server
import traceback
import socket

from html import escape as html_escape
from math import ceil

import aiohttp
from aiohttp import errors, streams, hdrs, helpers
from aiohttp.log import server_logger
from aiohttp.helpers import ensure_future

__all__ = ('ServerHttpProtocol',)


RESPONSES = http.server.BaseHTTPRequestHandler.responses
DEFAULT_ERROR_MESSAGE = """
<html>
  <head>
    <title>{status} {reason}</title>
  </head>
  <body>
    <h1>{status} {reason}</h1>
    {message}
  </body>
</html>"""


if hasattr(socket, 'SO_KEEPALIVE'):
    def tcp_keepalive(server, transport):
        sock = transport.get_extra_info('socket')
        sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
else:
    def tcp_keepalive(server, transport):  # pragma: no cover
        pass

EMPTY_PAYLOAD = streams.EmptyStreamReader()


class ServerHttpProtocol(aiohttp.StreamProtocol):
    """Simple http protocol implementation.

    ServerHttpProtocol handles incoming http request. It reads request line,
    request headers and request payload and calls handle_request() method.
    By default it always returns with 404 response.

    ServerHttpProtocol handles errors in incoming request, like bad
    status line, bad headers or incomplete payload. If any error occurs,
    connection gets closed.

    :param keep_alive: number of seconds before closing keep-alive connection
    :type keep_alive: int or None

    :param bool keep_alive_on: keep-alive is o, default is on

    :param int timeout: slow request timeout

    :param allowed_methods: (optional) List of allowed request methods.
                            Set to empty list to allow all methods.
    :type allowed_methods: tuple

    :param bool debug: enable debug mode

    :param logger: custom logger object
    :type logger: aiohttp.log.server_logger

    :param access_log: custom logging object
    :type access_log: aiohttp.log.server_logger

    :param str access_log_format: access log format string

    :param loop: Optional event loop
    """
    _request_count = 0
    _request_handler = None
    _reading_request = False
    _keep_alive = False  # keep transport open
    _keep_alive_handle = None  # keep alive timer handle
    _timeout_handle = None  # slow request timer handle

    _request_prefix = aiohttp.HttpPrefixParser()  # http method parser
    _request_parser = aiohttp.HttpRequestParser()  # default request parser

    def __init__(self, *, loop=None,
                 keep_alive=75,  # NGINX default value is 75 secs
                 keep_alive_on=True,
                 timeout=0,
                 logger=server_logger,
                 access_log=None,
                 access_log_format=helpers.AccessLogger.LOG_FORMAT,
                 debug=False,
                 log=None,
                 **kwargs):
        super().__init__(
            loop=loop,
            disconnect_error=errors.ClientDisconnectedError, **kwargs)

        self._keep_alive_on = keep_alive_on
        self._keep_alive_period = keep_alive  # number of seconds to keep alive
        self._timeout = timeout  # slow request timeout
        self._loop = loop if loop is not None else asyncio.get_event_loop()

        self.logger = log or logger
        self.debug = debug
        self.access_log = access_log
        if access_log:
            self.access_logger = helpers.AccessLogger(access_log,
                                                      access_log_format)
        else:
            self.access_logger = None

    @property
    def keep_alive_timeout(self):
        return self._keep_alive_period

    def closing(self, timeout=15.0):
        """Worker process is about to exit, we need cleanup everything and
        stop accepting requests. It is especially important for keep-alive
        connections."""
        self._keep_alive = False
        self._keep_alive_on = False
        self._keep_alive_period = None

        if (not self._reading_request and self.transport is not None):
            if self._request_handler:
                self._request_handler.cancel()
                self._request_handler = None

            self.transport.close()
            self.transport = None
        elif self.transport is not None and timeout:
            if self._timeout_handle is not None:
                self._timeout_handle.cancel()

            # use slow request timeout for closing
            # connection_lost cleans timeout handler
            now = self._loop.time()
            self._timeout_handle = self._loop.call_at(
                ceil(now+timeout), self.cancel_slow_request)

    def connection_made(self, transport):
        super().connection_made(transport)

        self._request_handler = ensure_future(self.start(), loop=self._loop)

        # start slow request timer
        if self._timeout:
            now = self._loop.time()
            self._timeout_handle = self._loop.call_at(
                ceil(now+self._timeout), self.cancel_slow_request)

        if self._keep_alive_on:
            tcp_keepalive(self, transport)

    def connection_lost(self, exc):
        super().connection_lost(exc)

        if self._request_handler is not None:
            self._request_handler.cancel()
            self._request_handler = None
        if self._keep_alive_handle is not None:
            self._keep_alive_handle.cancel()
            self._keep_alive_handle = None
        if self._timeout_handle is not None:
            self._timeout_handle.cancel()
            self._timeout_handle = None

    def data_received(self, data):
        super().data_received(data)

        # reading request
        if not self._reading_request:
            self._reading_request = True

        # stop keep-alive timer
        if self._keep_alive_handle is not None:
            self._keep_alive_handle.cancel()
            self._keep_alive_handle = None

    def keep_alive(self, val):
        """Set keep-alive connection mode.

        :param bool val: new state.
        """
        self._keep_alive = val

    def log_access(self, message, environ, response, time):
        if self.access_logger:
            self.access_logger.log(message, environ, response,
                                   self.transport, time)

    def log_debug(self, *args, **kw):
        if self.debug:
            self.logger.debug(*args, **kw)

    def log_exception(self, *args, **kw):
        self.logger.exception(*args, **kw)

    def cancel_slow_request(self):
        if self._request_handler is not None:
            self._request_handler.cancel()
            self._request_handler = None

        if self.transport is not None:
            self.transport.close()

        self.log_debug('Close slow request.')

    @asyncio.coroutine
    def start(self):
        """Start processing of incoming requests.

        It reads request line, request headers and request payload, then
        calls handle_request() method. Subclass has to override
        handle_request(). start() handles various exceptions in request
        or response handling. Connection is being closed always unless
        keep_alive(True) specified.
        """
        reader = self.reader

        while True:
            message = None
            self._keep_alive = False
            self._request_count += 1
            self._reading_request = False

            payload = None
            try:
                # read http request method
                prefix = reader.set_parser(self._request_prefix)
                yield from prefix.read()

                # start reading request
                self._reading_request = True

                # start slow request timer
                if self._timeout and self._timeout_handle is None:
                    now = self._loop.time()
                    self._timeout_handle = self._loop.call_at(
                        ceil(now+self._timeout), self.cancel_slow_request)

                # read request headers
                httpstream = reader.set_parser(self._request_parser)
                message = yield from httpstream.read()

                # cancel slow request timer
                if self._timeout_handle is not None:
                    self._timeout_handle.cancel()
                    self._timeout_handle = None

                # request may not have payload
                if (message.headers.get(hdrs.CONTENT_LENGTH, 0) or
                    hdrs.SEC_WEBSOCKET_KEY1 in message.headers or
                    'chunked' in message.headers.get(
                        hdrs.TRANSFER_ENCODING, '')):
                    payload = streams.FlowControlStreamReader(
                        reader, loop=self._loop)
                    reader.set_parser(
                        aiohttp.HttpPayloadParser(message), payload)
                else:
                    payload = EMPTY_PAYLOAD

                yield from self.handle_request(message, payload)

            except asyncio.CancelledError:
                return
            except errors.ClientDisconnectedError:
                self.log_debug(
                    'Ignored premature client disconnection #1.')
                return
            except errors.HttpProcessingError as exc:
                if self.transport is not None:
                    yield from self.handle_error(exc.code, message,
                                                 None, exc, exc.headers,
                                                 exc.message)
            except errors.LineLimitExceededParserError as exc:
                yield from self.handle_error(400, message, None, exc)
            except Exception as exc:
                yield from self.handle_error(500, message, None, exc)
            finally:
                if self.transport is None:
                    self.log_debug(
                        'Ignored premature client disconnection #2.')
                    return

                if payload and not payload.is_eof():
                    self.log_debug('Uncompleted request.')
                    self._request_handler = None
                    self.transport.close()
                    return
                else:
                    reader.unset_parser()

                if self._request_handler:
                    if self._keep_alive and self._keep_alive_period:
                        self.log_debug(
                            'Start keep-alive timer for %s sec.',
                            self._keep_alive_period)
                        now = self._loop.time()
                        self._keep_alive_handle = self._loop.call_at(
                            ceil(now+self._keep_alive_period),
                            self.transport.close)
                    elif self._keep_alive and self._keep_alive_on:
                        # do nothing, rely on kernel or upstream server
                        pass
                    else:
                        self.log_debug('Close client connection.')
                        self._request_handler = None
                        self.transport.close()
                        return
                else:
                    # connection is closed
                    return

    def handle_error(self, status=500, message=None,
                     payload=None, exc=None, headers=None, reason=None):
        """Handle errors.

        Returns http response with specific status code. Logs additional
        information. It always closes current connection."""
        now = self._loop.time()
        try:
            if self._request_handler is None:
                # client has been disconnected during writing.
                return ()

            if status == 500:
                self.log_exception("Error handling request")

            try:
                if reason is None or reason == '':
                    reason, msg = RESPONSES[status]
                else:
                    msg = reason
            except KeyError:
                status = 500
                reason, msg = '???', ''

            if self.debug and exc is not None:
                try:
                    tb = traceback.format_exc()
                    tb = html_escape(tb)
                    msg += '<br><h2>Traceback:</h2>\n<pre>{}</pre>'.format(tb)
                except:
                    pass

            html = DEFAULT_ERROR_MESSAGE.format(
                status=status, reason=reason, message=msg).encode('utf-8')

            response = aiohttp.Response(self.writer, status, close=True)
            response.add_header(hdrs.CONTENT_TYPE, 'text/html; charset=utf-8')
            response.add_header(hdrs.CONTENT_LENGTH, str(len(html)))
            if headers is not None:
                for name, value in headers:
                    response.add_header(name, value)
            response.send_headers()

            response.write(html)
            # disable CORK, enable NODELAY if needed
            self.writer.set_tcp_nodelay(True)
            drain = response.write_eof()

            self.log_access(message, None, response, self._loop.time() - now)
            return drain
        finally:
            self.keep_alive(False)

    def handle_request(self, message, payload):
        """Handle a single http request.

        Subclass should override this method. By default it always
        returns 404 response.

        :param message: Request headers
        :type message: aiohttp.protocol.HttpRequestParser
        :param payload: Request payload
        :type payload: aiohttp.streams.FlowControlStreamReader
        """
        now = self._loop.time()
        response = aiohttp.Response(
            self.writer, 404, http_version=message.version, close=True)

        body = b'Page Not Found!'

        response.add_header(hdrs.CONTENT_TYPE, 'text/plain')
        response.add_header(hdrs.CONTENT_LENGTH, str(len(body)))
        response.send_headers()
        response.write(body)
        drain = response.write_eof()

        self.keep_alive(False)
        self.log_access(message, None, response, self._loop.time() - now)

        return drain