This file is indexed.

/usr/lib/python2.7/dist-packages/sagenb/notebook/colorize.py is in python-sagenb 1.0.1+ds1-2.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
# -*- coding: utf-8 -*
"""nodoctest
"""
#############################################################################
#       Copyright (C) 2007 William Stein <wstein@gmail.com>
#  Distributed under the terms of the GNU General Public License (GPL)
#  The full text of the GPL is available at:
#                  http://www.gnu.org/licenses/
#############################################################################

"""
    Colorize - Python source formatter that outputs Python code in XHTML.
    This script is based on MoinMoin - The Python Source Parser.

    FROM: Modified version of
      http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/442482
"""

# Imports
import cgi
import string
import sys
from six.moves import cStringIO as StringIO
import keyword
import token
import tokenize
import re
import os

#Set up basic values.  
_KEYWORD = token.NT_OFFSET + 1
_TEXT    = token.NT_OFFSET + 2

_classes = {
    token.NUMBER:       'token_number',
    token.OP:           'token_op',
    token.STRING:       'token_string',
    tokenize.COMMENT:   'token_comment',
    token.NAME:         'token_name',
    token.ERRORTOKEN:   'token_error',
    _KEYWORD:           'keyword',
    _TEXT:              'text',
}

class Parser:
    """ Send colored python source.
    """

    def __init__(self, raw, out = sys.stdout):
        """ Store the source text.
        """
        self.raw = string.strip(string.expandtabs(raw))
        self.out = out

    def format(self, formatter, form):
        """ Parse and send the colored source.
        """
        # store line offsets in self.lines
        self.lines = [0, 0]
        pos = 0
        while 1:
            pos = string.find(self.raw, '\n', pos) + 1
            if not pos: break
            self.lines.append(pos)
        self.lines.append(len(self.raw))

        # parse the source and write it
        self.pos = 0
        text = StringIO(self.raw)
        try:
            tokenize.tokenize(text.readline, self)
        except tokenize.TokenError as ex:
            msg = ex[0]
            line = ex[1][0]
            self.out.write("<h3>ERROR: %s</h3>%s\n" % (
                msg, self.raw[self.lines[line]:]))


    def __call__(self, toktype, toktext, srow_scol, erow_ecol, line):
        """
        Token handler.
        """
        srow, scol = srow_scol
        erow, ecol = erow_ecol
        if 0:
            txt = "type {} {} text {} start {} {} end {} {} <br/>"
            print(txt.format(toktype, token.tok_name[toktype], toktext,
                             srow, scol, erow, ecol))

        # calculate new positions
        oldpos = self.pos
        newpos = self.lines[srow] + scol
        self.pos = newpos + len(toktext)

        # handle newlines
        if toktype in [token.NEWLINE, tokenize.NL]:
            self.out.write('\n')
            return

        # send the original whitespace, if needed
        if newpos > oldpos:
            self.out.write(self.raw[oldpos:newpos])

        # skip indenting tokens
        if toktype in [token.INDENT, token.DEDENT]:
            self.pos = newpos
            return

        # map token type to a color/class group
        if token.LPAR <= toktype and toktype <= token.OP:
            toktype = token.OP
        elif toktype == token.NAME and keyword.iskeyword(toktext):
            toktype = _KEYWORD
        classval = _classes.get(toktype, _classes[_TEXT])

        style = ''
        if toktype == token.ERRORTOKEN:
            style = ' style="border: solid 1.5pt #FF0000;"'

        # send text
        self.out.write('<span class="%s"%s>' % (classval, style))
        self.out.write(cgi.escape(toktext))
        self.out.write('</span>')


def colorize(source):
    """
    write colorized version to "[filename].py.html"
    """
    html = StringIO()
    Parser(source, html).format(None, None)
    html.flush()
    html.seek(0)
    return html.read()