This file is indexed.

/usr/lib/python3/dist-packages/ripe/atlas/cousteau/api_listing.py is in python3-ripe-atlas-cousteau 1.3-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
# Copyright (c) 2015 RIPE NCC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
import calendar
from datetime import datetime

try:
    # Python 3
    from urllib.parse import urlparse
except ImportError:
    # Python 2
    from urlparse import urlparse

from .api_meta_data import Probe, Measurement
from .request import AtlasRequest
from .exceptions import APIResponseError


class RequestGenerator(object):
    """
    Python generator class that yields results for meta APIs like
    probes/measurements as single objects. It supports any filter APIs support
    in a dummy way, which means it will take accept whatever it passed and
    build url_path from this.
    """

    url = ""
    id_filter = ""
    URL_LENGTH_LIMIT = 5000

    def __init__(self, return_objects=False, user_agent=None, server=None,
                 verify=True, **filters):
        self._user_agent = user_agent
        self.server = server
        self.verify = verify
        self.api_filters = filters
        self.split_urls = []
        self.total_count_flag = False
        self.current_batch = []
        self._count = []
        self.return_objects = return_objects
        self.atlas_url = self.build_url()

    def build_url(self):
        """Build the url path based on the filter options."""

        if not self.api_filters:
            return self.url

        # Reduce complex objects to simpler strings
        for k, v in self.api_filters.items():
            if isinstance(v, datetime):  # datetime > UNIX timestamp
                self.api_filters[k] = int(calendar.timegm(v.timetuple()))
            if isinstance(v, (tuple, list)):  # tuples & lists > x,y,z
                self.api_filters[k] = ",".join([str(_) for _ in v])

        if (
            self.id_filter in self.api_filters and
            len(str(self.api_filters[self.id_filter])) > self.URL_LENGTH_LIMIT
        ):
            self.build_url_chunks()
            return self.split_urls.pop(0)

        filters = '&'.join("%s=%s" % (k, v) for (k, v) in self.api_filters.items())

        return "%s?%s" % (self.url, filters)

    def build_url_chunks(self):
        """
        If url is too big because of id filter is huge, break id and construct
        several urls to call them in order to abstract this complexity from user.
        """
        CHUNK_SIZE = 500

        id_filter = str(self.api_filters.pop(self.id_filter)).split(',')
        chuncks = list(self.chunks(id_filter, CHUNK_SIZE))
        filters = '&'.join("%s=%s" % (k, v) for (k, v) in self.api_filters.items())

        for chunk in chuncks:
            if filters:
                url = "{0}?{1}&{2}={3}".format(self.url, filters, self.id_filter, ','.join(chunk))
            else:
                url = "{0}?{1}={2}".format(self.url, self.id_filter, ','.join(chunk))
            self.split_urls.append(url)

    def chunks(self, l, n):
        """Yield successive n-sized chunks from l."""
        for i in range(0, len(l), n):
            yield l[i:i + n]

    def __iter__(self):
        return self

    # Python 3 compatibility
    def __next__(self):
        return self.next()

    def next(self):
        if not self.current_batch:  # If first time or current batch was all given
            if not self.atlas_url:  # We don't have any next url any more, exit
                raise StopIteration()
            self.next_batch()
            if not self.current_batch:  # Server request gives empty batch, exit
                raise StopIteration()

        current_object = self.current_batch.pop(0)
        if self.return_objects:
            return self.object_class(meta_data=current_object)
        else:
            return current_object

    def next_batch(self):
        """
        Querying API for the next batch of objects and store next url and
        batch of objects.
        """
        is_success, results = AtlasRequest(
            url_path=self.atlas_url,
            user_agent=self._user_agent,
            server=self.server,
            verify=self.verify,
        ).get()

        if not is_success:
            raise APIResponseError(results)

        self.total_count = results.get("count")
        self.atlas_url = self.build_next_url(results.get("next"))
        self.current_batch = results.get("results", [])

    def build_next_url(self, url):
        """Builds next url in a format compatible with cousteau. Path + query"""
        if not url:
            if self.split_urls:  # If we had a long request give the next part
                self.total_count_flag = False  # Reset flag for count
                return self.split_urls.pop(0)
            else:
                return None

        parsed_url = urlparse(url)
        return "{0}?{1}".format(parsed_url.path, parsed_url.query)

    # count attribute to deal with split-up urls and total count
    def get_total_count(self):
        """Getter for count attribute"""
        if not self._count:
            return 0
        else:
            return sum(self._count)

    def set_total_count(self, value):
        """Setter for count attribute. Set should append only one count per splitted url."""
        if not self.total_count_flag and value:
            self._count.append(int(value))
            self.total_count_flag = True

    doc_count = "Defines how many objects returned."
    total_count = property(get_total_count, set_total_count, doc=doc_count)


class ProbeRequest(RequestGenerator):
    """
    Python generator for Probes meta api.
    e.g.
    for probe in ProbeRequest(**{"limit":200, "country_code": "GR"}):
        print(probe["id"])
    """
    url = "/api/v2/probes/"
    id_filter = "id__in"
    object_class = Probe


class MeasurementRequest(RequestGenerator):
    """
    Python generator for Measurement meta api.
    e.g.
    for measurement in MeasurementRequest(**{"status": 1}):
        print(measurement["id"])
    """
    url = "/api/v2/measurements/"
    id_filter = "id__in"
    object_class = Measurement


class AnchorRequest(RequestGenerator):
    """
    Python generator for Anchor meta api.
    e.g.
    for anchor in AnchorRequest():
        print(anchor["id"])
    """
    url = "/api/v2/anchors/"
    id_filter = "id__in"
    object_class = None

    def __init__(self, *args, **kwargs):
        super(AnchorRequest, self).__init__(*args, **kwargs)
        self.return_objects = None