This file is indexed.

/usr/lib/python2.7/dist-packages/koji/util.py is in koji-common 1.10.0-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
# Copyright (c) 2005-2014 Red Hat, Inc.
#
#    Koji is free software; you can redistribute it and/or
#    modify it under the terms of the GNU Lesser General Public
#    License as published by the Free Software Foundation;
#    version 2.1 of the License.
#
#    This software is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
#    Lesser General Public License for more details.
#
#    You should have received a copy of the GNU Lesser General Public
#    License along with this software; if not, write to the Free Software
#    Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301  USA
#
# Authors:
#       Mike McLean <mikem@redhat.com>
#       Mike Bonnet <mikeb@redhat.com>

import calendar
from fnmatch import fnmatch
import koji
import logging
import os
import os.path
import re
import resource
import stat
import sys
import time
import ConfigParser
from zlib import adler32

try:
    from hashlib import md5 as md5_constructor
except ImportError:
    from md5 import new as md5_constructor
try:
    from hashlib import sha1 as sha1_constructor
except ImportError:
    from sha import new as sha1_constructor

def _changelogDate(cldate):
    return time.strftime('%a %b %d %Y', time.strptime(koji.formatTime(cldate), '%Y-%m-%d %H:%M:%S'))

def formatChangelog(entries):
    """Format a list of changelog entries (dicts)
    into a string representation."""
    result = ''
    for entry in entries:
        result += """* %s %s
%s

""" % (_changelogDate(entry['date']), entry['author'].encode("utf-8"),
       entry['text'].encode("utf-8"))

    return result

DATE_RE = re.compile(r'(\d+)-(\d+)-(\d+)')
TIME_RE = re.compile(r'(\d+):(\d+):(\d+)')

def parseTime(val):
    """
    Parse a string time in either "YYYY-MM-DD HH24:MI:SS" or "YYYY-MM-DD"
    format into floating-point seconds since the epoch.  If the time portion
    is not specified, it will be padded with zeros.  The string time is treated
    as UTC.  If the time string cannot be parsed into a valid date, None will be
    returned.
    """
    result = DATE_RE.search(val)
    if not result:
        return None
    else:
        date = [int(r) for r in result.groups()]
    time = [0, 0, 0]
    rest = val[result.end():].strip()
    result = TIME_RE.search(rest)
    if result:
        time = [int(r) for r in result.groups()]
    return calendar.timegm(date + time + [0, 0, 0])

def checkForBuilds(session, tag, builds, event, latest=False):
    """Check that the builds existed in tag at the time of the event.
       If latest=True, check that the builds are the latest in tag."""
    for build in builds:
        if latest:
            tagged_list = session.getLatestBuilds(tag, event=event, package=build['name'])
        else:
            tagged_list = session.listTagged(tag, event=event, package=build['name'], inherit=True)
        for tagged in tagged_list:
            if tagged['version'] == build['version'] and tagged['release'] == build['release']:
                break
        else:
            return False

    return True

def duration(start):
    """Return the duration between start and now in MM:SS format"""
    elapsed = time.time() - start
    mins = int(elapsed / 60)
    secs = int(elapsed % 60)
    return '%s:%02i' % (mins, secs)

def printList(l):
    """Print the contents of the list comma-separated"""
    if len(l) == 0:
        return ''
    elif len(l) == 1:
        return l[0]
    elif len(l) == 2:
        return ' and '.join(l)
    else:
        ret = ', '.join(l[:-1])
        ret += ', and '
        ret += l[-1]
        return ret

def multi_fnmatch(s, patterns):
    """Returns true if s matches any pattern in the list

    If patterns is a string, it will be split() first
    """
    if isinstance(patterns, basestring):
        patterns = patterns.split()
    for pat in patterns:
        if fnmatch(s, pat):
            return True
    return False

def dslice(dict, keys, strict=True):
    """Returns a new dictionary containing only the specified keys"""
    ret = {}
    for key in keys:
        if strict or dict.has_key(key):
            #for strict we skip the has_key check and let the dict generate the KeyError
            ret[key] = dict[key]
    return ret

def dslice_ex(dict, keys, strict=True):
    """Returns a new dictionary with only the specified keys removed"""
    ret = dict.copy()
    for key in keys:
        if strict or ret.has_key(key):
            del ret[key]
    return ret

def call_with_argcheck(func, args, kwargs=None):
    """Call function, raising ParameterError if args do not match"""
    if kwargs is None:
        kwargs = {}
    try:
        return func(*args, **kwargs)
    except TypeError, e:
        if sys.exc_info()[2].tb_next is None:
            # The stack is only one high, so the error occurred in this function.
            # Therefore, we assume the TypeError is due to a parameter mismatch
            # in the above function call.
            raise koji.ParameterError, str(e)
        raise


class HiddenValue(object):
    """A wrapper that prevents a value being accidentally printed"""

    def __init__(self, value):
        if isinstance(value, HiddenValue):
            self.value = value.value
        else:
            self.value = value

    def __str__(self):
        return "[value hidden]"

    def __repr__(self):
        return "HiddenValue()"


class LazyValue(object):
    """Used to represent a value that is generated by a function call at access time
    """

    def __init__(self, func, args, kwargs=None, cache=False):
        if kwargs is None:
            kwargs = {}
        self.func = func
        self.args = args
        self.kwargs = kwargs
        self.cache = cache

    def get(self):
        if hasattr(self, '_value'):
            return self._value
        value = self.func(*self.args, **self.kwargs)
        if self.cache:
            self._value = value
        return value


class LazyString(LazyValue):
    """Lazy values that should be expanded when printed"""

    def __str__(self):
        return str(self.get())


def lazy_eval(value):
    if isinstance(value, LazyValue):
        return value.get()
    return value


class LazyDict(dict):
    """A container for lazy data

    fields can refer to function calls, which can optionally be cached
    """

    def __getitem__(self, key):
        return lazy_eval(super(LazyDict, self).__getitem__(key))

    def lazyset(self, key, func, args, kwargs=None, cache=False):
        self[key] = LazyValue(func, args, kwargs=kwargs, cache=cache)

    def get(self, *args, **kwargs):
        return lazy_eval(super(LazyDict, self).get(*args, **kwargs))

    def copy(self):
        return LazyDict(self)

    def values(self):
        return [lazy_eval(val) for val in super(LazyDict, self).values()]

    def items(self):
        return [(key, lazy_eval(val)) for key, val in super(LazyDict, self).items()]

    def itervalues(self):
        for val in super(LazyDict, self).itervalues():
            yield lazy_eval(val)

    def iteritems(self):
        for key, val in super(LazyDict, self).iteritems():
            yield key, lazy_eval(val)

    def pop(self, key, *args, **kwargs):
        return lazy_eval(super(LazyDict, self).pop(key, *args, **kwargs))

    def popitem(self):
        key, val = super(LazyDict, self).popitem()
        return key, lazy_eval(val)


class LazyRecord(object):
    """A object whose attributes can reference lazy data

    Use lazysetattr to set lazy attributes, or just set them to a LazyValue
    object directly"""

    def __init__(self, base=None):
        if base is not None:
            self.__dict__.update(base.__dict__)
        self._base_record = base

    def __getattribute__(self, name):
        try:
            val = object.__getattribute__(self, name)
        except AttributeError:
            base = object.__getattribute__(self, '_base_record')
            val = getattr(base, name)
        return lazy_eval(val)


def lazysetattr(object, name, func, args, kwargs=None, cache=False):
    if not isinstance(object, LazyRecord):
        raise TypeError, 'object does not support lazy attributes'
    value = LazyValue(func, args, kwargs=kwargs, cache=cache)
    setattr(object, name, value)


def rmtree(path):
    """Delete a directory tree without crossing fs boundaries"""
    st = os.lstat(path)
    if not stat.S_ISDIR(st.st_mode):
        raise koji.GenericError, "Not a directory: %s" % path
    dev = st.st_dev
    dirlist = []
    for dirpath, dirnames, filenames in os.walk(path):
        dirlist.append(dirpath)
        newdirs = []
        dirsyms = []
        for fn in dirnames:
            path = os.path.join(dirpath, fn)
            st = os.lstat(path)
            if st.st_dev != dev:
                # don't cross fs boundary
                continue
            if stat.S_ISLNK(st.st_mode):
                #os.walk includes symlinks to dirs here
                dirsyms.append(fn)
                continue
            newdirs.append(fn)
        #only walk our filtered dirs
        dirnames[:] = newdirs
        for fn in filenames + dirsyms:
            path = os.path.join(dirpath, fn)
            st = os.lstat(path)
            if st.st_dev != dev:
                #shouldn't happen, but just to be safe...
                continue
            os.unlink(path)
    dirlist.reverse()
    for dirpath in dirlist:
        if os.listdir(dirpath):
            # dir not empty. could happen if a mount was present
            continue
        os.rmdir(dirpath)

def _relpath(path, start=getattr(os.path, 'curdir', '.')):
    """Backport of os.path.relpath for python<2.6"""

    sep = getattr(os.path, 'sep', '/')
    pardir = getattr(os.path, 'pardir', '..')
    if not path:
        raise ValueError("no path specified")
    start_list = [x for x in os.path.abspath(start).split(sep) if x]
    path_list = [x for x in os.path.abspath(path).split(sep) if x]
    i = -1
    for i in range(min(len(start_list), len(path_list))):
        if start_list[i] != path_list[i]:
            break
    else:
        i += 1
    rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
    if not rel_list:
        return getattr(os.path, 'curdir', '.')
    return os.path.join(*rel_list)

relpath = getattr(os.path, 'relpath', _relpath)

def eventFromOpts(session, opts):
    """Determine event id from standard cli options

    Standard options are:
        event: an event id (int)
        ts: an event timestamp (int)
        repo: pull event from given repo
    """
    event_id = getattr(opts, 'event')
    if event_id:
        return session.getEvent(event_id)
    ts = getattr(opts, 'ts')
    if ts:
        return session.getLastEvent(before=ts)
    repo = getattr(opts, 'repo')
    if repo:
        rinfo = session.repoInfo(repo)
        if rinfo:
            return {'id' : rinfo['create_event'],
                    'ts' : rinfo['create_ts'] }
    return None

def filedigestAlgo(hdr):
    """
    Get the file digest algorithm used in hdr.
    If there is no algorithm flag in the header,
    default to md5.  If the flag contains an unknown,
    non-None value, return 'unknown'.
    """
    # need to use the header ID hard-coded into Koji so we're not dependent on the
    # version of rpm installed on the hub
    digest_algo_id = hdr[koji.RPM_TAG_FILEDIGESTALGO]
    if not digest_algo_id:
        # certain versions of rpm return an empty list instead of None
        # for missing header fields
        digest_algo_id = None
    digest_algo = koji.RPM_FILEDIGESTALGO_IDS.get(digest_algo_id, 'unknown')
    return digest_algo.lower()

def parseStatus(rv, prefix):
    if isinstance(prefix, list) or isinstance(prefix, tuple):
        prefix = ' '.join(prefix)
    if os.WIFSIGNALED(rv):
        return '%s was killed by signal %i' % (prefix, os.WTERMSIG(rv))
    elif os.WIFEXITED(rv):
        return '%s exited with status %i' % (prefix, os.WEXITSTATUS(rv))
    else:
        return '%s terminated for unknown reasons' % prefix

def isSuccess(rv):
    """Return True if rv indicates successful completion
    (exited with status 0), False otherwise."""
    if os.WIFEXITED(rv) and os.WEXITSTATUS(rv) == 0:
        return True
    else:
        return False

def setup_rlimits(opts, logger=None):
    logger = logger or logging.getLogger("koji")
    for key in opts:
        if not key.startswith('RLIMIT_') or not opts[key]:
            continue
        rcode = getattr(resource, key, None)
        if rcode is None:
            continue
        orig = resource.getrlimit(rcode)
        try:
            limits = [int(x) for x in opts[key].split()]
        except ValueError:
            logger.error("Invalid resource limit: %s=%s", key, opts[key])
            continue
        if len(limits) not in (1,2):
            logger.error("Invalid resource limit: %s=%s", key, opts[key])
            continue
        if len(limits) == 1:
            limits.append(orig[1])
        logger.warn('Setting resource limit: %s = %r', key, limits)
        try:
            resource.setrlimit(rcode, tuple(limits))
        except ValueError, e:
            logger.error("Unable to set %s: %s", key, e)

class adler32_constructor(object):

    #mimicing the hashlib constructors
    def __init__(self, arg=''):
        self._value = adler32(arg) & 0xffffffffL
        #the bitwise and works around a bug in some versions of python
        #see: http://bugs.python.org/issue1202

    def update(self, arg):
        self._value = adler32(arg, self._value) & 0xffffffffL

    def digest(self):
        return self._value

    def hexdigest(self):
        return "%08x" % self._value

    def copy(self):
        dup = adler32_constructor()
        dup._value = self._value
        return dup

    digest_size = 4
    block_size = 1      #I think

def tsort(parts):
    """Given a partial ordering, return a totally ordered list.

    part is a dict of partial orderings.  Each value is a set,
    which the key depends on.

    The return value is a list of sets, each of which has only
    dependencies on items in previous entries in the list."""
    parts = parts.copy()
    result = []
    while True:
        level = set([name for name, deps in parts.iteritems() if not deps])
        if not level:
            break
        result.append(level)
        parts = dict([(name, deps - level) for name, deps in parts.iteritems()
                      if name not in level])
    if parts:
        raise ValueError, 'total ordering not possible'
    return result

class MavenConfigOptAdapter(object):
    """
    Wrap a ConfigParser so it looks like a optparse.Values instance
    used by maven-build.
    """
    MULTILINE = ['properties', 'envs']
    MULTIVALUE = ['goals', 'profiles', 'packages',
                   'jvm_options', 'maven_options', 'buildrequires']

    def __init__(self, conf, section):
        self._conf = conf
        self._section = section

    def __getattr__(self, name):
        if self._conf.has_option(self._section, name):
            value = self._conf.get(self._section, name)
            if name in self.MULTIVALUE:
                value = value.split()
            elif name in self.MULTILINE:
                value = value.splitlines()
            return value
        raise AttributeError, name

def maven_opts(values, chain=False, scratch=False):
    """
    Convert the argument (an optparse.Values object) to a dict of build options
    suitable for passing to maven-build or maven-chain.
    """
    opts = {}
    for key in ('scmurl', 'patches', 'specfile', 'goals', 'profiles', 'packages',
                'jvm_options', 'maven_options'):
        val = getattr(values, key, None)
        if val:
            opts[key] = val
    props = {}
    for prop in getattr(values, 'properties', []):
        fields = prop.split('=', 1)
        if len(fields) != 2:
            fields.append(None)
        props[fields[0]] = fields[1]
    if props:
        opts['properties'] = props
    envs = {}
    for env in getattr(values, 'envs', []):
        fields = env.split('=', 1)
        if len(fields) != 2:
            raise ValueError, "Environment variables must be in NAME=VALUE format"
        envs[fields[0]] = fields[1]
    if envs:
        opts['envs'] = envs
    if chain:
        val = getattr(values, 'buildrequires', [])
        if val:
            opts['buildrequires'] = val
    if scratch and not chain:
        opts['scratch'] = True
    return opts

def maven_params(config, package, chain=False, scratch=False):
    values = MavenConfigOptAdapter(config, package)
    return maven_opts(values, chain=chain, scratch=scratch)

def wrapper_params(config, package, chain=False, scratch=False):
    params = {}
    values = MavenConfigOptAdapter(config, package)
    params['type'] = getattr(values, 'type', None)
    params['scmurl'] = getattr(values, 'scmurl', None)
    params['buildrequires'] = getattr(values, 'buildrequires', [])
    if not scratch:
        params['create_build'] = True
    return params

def parse_maven_params(confs, chain=False, scratch=False):
    """
    Parse .ini files that contain parameters to launch a Maven build.

    Return a map whose keys are package names and values are config parameters.
    """
    if not isinstance(confs, (list, tuple)):
        confs = [confs]
    config = ConfigParser.ConfigParser()
    for conf in confs:
        conf_fd = file(conf)
        config.readfp(conf_fd)
        conf_fd.close()
    builds = {}
    for package in config.sections():
        params = {}
        buildtype = 'maven'
        if config.has_option(package, 'type'):
            buildtype = config.get(package, 'type')
        if buildtype == 'maven':
            params = maven_params(config, package, chain=chain, scratch=scratch)
        elif buildtype == 'wrapper':
            params = wrapper_params(config, package, chain=chain, scratch=scratch)
            if len(params.get('buildrequires')) != 1:
                raise ValueError, "A wrapper-rpm must depend on exactly one package"
        else:
            raise ValueError, "Unsupported build type: %s" % buildtype
        if not 'scmurl' in params:
            raise ValueError, "%s is missing the scmurl parameter" % package
        builds[package] = params
    if not builds:
        raise ValueError, "No sections found in: %s" % ', '.join(confs)
    return builds

def parse_maven_param(confs, chain=False, scratch=False, section=None):
    """
    Parse .ini files that contain parameters to launch a Maven build.

    Return a map that contains a single entry corresponding to the given
    section of the .ini file.  If the config file only contains a single
    section, section does not need to be specified.
    """
    if not isinstance(confs, (list, tuple)):
        confs = [confs]
    builds = parse_maven_params(confs, chain=chain, scratch=scratch)
    if section:
        if section in builds:
            builds = {section: builds[section]}
        else:
            raise ValueError, "Section %s does not exist in: %s" % (section, ', '.join(confs))
    elif len(builds) > 1:
        raise ValueError, "Multiple sections in: %s, you must specify the section" % ', '.join(confs)
    return builds

def parse_maven_chain(confs, scratch=False):
    """
    Parse maven-chain config.

    confs is a path to a config file or a list of paths to config files.

    Return a map whose keys are package names and values are config parameters.
    """
    builds = parse_maven_params(confs, chain=True, scratch=scratch)
    depmap = {}
    for package, params in builds.items():
        depmap[package] = set(params.get('buildrequires', []))
    try:
        order = tsort(depmap)
    except ValueError, e:
        raise ValueError, 'No possible build order, missing/circular dependencies'
    return builds