This file is indexed.

/usr/share/pyshared/jsb/lib/persist.py is in jsonbot 0.84.4-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
# jsb/persist.py
#
#

"""
    allow data to be written to disk or BigTable in JSON format. creating 
    the persisted object restores data. 

"""

## jsb imports

from jsb.utils.trace import whichmodule, calledfrom, callstack, where
from jsb.utils.lazydict import LazyDict
from jsb.utils.exception import handle_exception
from jsb.utils.name import stripname
from jsb.utils.locking import lockdec
from jsb.utils.timeutils import elapsedstring
from jsb.lib.callbacks import callbacks
from jsb.lib.errors import MemcachedCounterError, JSONParseError

from datadir import getdatadir

## simplejson imports

from jsb.imports import getjson
json = getjson()

## basic imports

from collections import deque
import thread
import logging
import os
import os.path
import types
import copy
import sys
import time

## defines

cpy = copy.deepcopy

## locks


persistlock = thread.allocate_lock()
persistlocked = lockdec(persistlock)

## global list to keeptrack of what persist objects need to be saved

needsaving = deque()

def cleanup(bot=None, event=None):
    global needsaving
    todo = cpy(needsaving)
    r = []
    for p in todo:
        try: p.dosave() ; r.append(p) ; logging.warn("saved on retry - %s" % p.fn)
        except (OSError, IOError), ex: logging.error("failed to save %s - %s" % (p, str(ex)))
    for p in r:
        try: needsaving.remove(p)
        except ValueError: pass
    return needsaving


## try google first

try:
    from google.appengine.ext.db.metadata import Kind
    from google.appengine.ext import db
    import google.appengine.api.memcache as mc
    from google.appengine.api.datastore_errors import Timeout, TransactionFailedError
    from cache import get, set, delete
    logging.debug("using BigTable based Persist")

    ## JSONindb class

    class JSONindb(db.Model):
        """ model to store json files in. """
        modtime = db.DateTimeProperty(auto_now=True, indexed=False)
        createtime = db.DateTimeProperty(auto_now_add=True, indexed=False)
        filename = db.StringProperty()
        content = db.TextProperty(indexed=False)

    ## Persist class

    class Persist(object):

        """ persist data attribute to database backed JSON file. """ 

        def __init__(self, filename, default={}, type="cache"):
            self.cachtype = None
            self.plugname = calledfrom(sys._getframe())
            if 'lib' in self.plugname: self.plugname = calledfrom(sys._getframe(1))
            try: del self.fn
            except: pass 
            self.fn = unicode(filename.strip()) # filename to save to
            self.logname = os.sep.join(self.fn.split(os.sep)[-1:])
            self.countername = self.fn + "_" + "counter"
            self.mcounter = mc.get(self.countername) or mc.set(self.countername, "1")
            try: self.mcounter = int(self.mcounter)
            except ValueError: logging.warn("can't parse %s mcounter, setting to zero: %s" % (self.fn, self.mcounter)) ; self.mcounter = 0
            self.data = None
            self.type = type
            self.counter = self.mcounter
            self.key = None
            self.obj = None
            self.size = 0
            self.jsontxt = ""
            self.init(default)

        def init(self, default={}, filename=None):
            if self.checkmc(): self.jsontxt = self.updatemc() ; self.cachetype = "cache"
            else:
                tmp = get(self.fn)
                self.cachetype = "mem"
                if tmp != None:
                    logging.warn("*%s* - loaded %s" % (self.cachetype, self.fn))
                    self.data = tmp
                    if type(self.data) == types.DictType: self.data = LazyDict(self.data)
                    return self.data
            if self.jsontxt == "": self.cachetype = "cache" ; self.jsontxt = mc.get(self.fn)
            if self.jsontxt == None: 
                self.cachetype = "db"
                logging.debug("%s - loading from db" % self.fn) 
                try:
                    try: self.obj = JSONindb.get_by_key_name(self.fn)
                    except Timeout: self.obj = JSONindb.get_by_key_name(self.fn)
                except Exception, ex:
                    # bw compat sucks
                    try: self.obj = JSONindb.get_by_key_name(self.fn)
                    except Exception, ex:
                        handle_exception()
                        self.obj = None
                if self.obj == None:
                    logging.warn("%s - no entry found, using default" % self.fn)
                    self.jsontext = json.dumps(default) ; self.cachetype = "default"
                else:
                    self.jsontxt = self.obj.content; self.cachetype = "db"
                if self.jsontxt:
                    mc.set(self.fn, self.jsontxt)
                    incr = mc.incr(self.countername)
                    if incr:
                        try: self.mcounter = self.counter = int(incr)
                        except ValueError: logging.error("can't make counter out of %s" % incr) 
                    else: self.mcounter = 1
            logging.debug("memcached counters for %s: %s" % (self.fn, self.mcounter))
            if self.jsontxt == None: self.jsontxt = json.dumps(default) 
            logging.warn('%s - jsontxt is %s' % (self.fn, self.jsontxt))
            try:
                self.data = json.loads(self.jsontxt)
            except: raise JSONParseError(self.fn)
            if not self.data: self.data = default
            self.size = len(self.jsontxt)
            if type(self.data) == types.DictType: self.data = LazyDict(self.data)
            set(self.fn, self.data)
            logging.warn("*%s* - loaded %s (%s)" % (self.cachetype, self.fn, len(self.jsontxt)))

        def get(self):
            logging.debug("getting %s from local cache" % self.fn)
            a = get(self.fn)
            logging.debug("got %s from local cache" % type(a))
            return a

        def sync(self):
            logging.debug("syncing %s" % self.fn)
            tmp = cpy(self.data)
            data = json.dumps(tmp)
            mc.set(self.fn, data)
            if type(self.data) == types.DictType:
                self.data = LazyDict(self.data)
            set(self.fn, self.data)
            return data

        def updatemc(self):
            tmp = mc.get(self.fn)
            if tmp != None:
                try:
                    t = json.loads(tmp)
                    if self.data: t.update(self.data)
                    self.data = LazyDict(t)
                    logging.warn("updated %s" % self.fn)
                except AttributeError, ex: logging.warn(str(ex))
                return self.data

        def checkmc(self):
            try:
                self.mcounter = int(mc.get(self.countername)) or 0
            except: self.mcounter = 0
            logging.warn("mcounter for %s is %s (%s)" % (self.fn, self.mcounter, self.counter))
            if (self.mcounter - self.counter) < 0: return True
            elif (self.mcounter - self.counter) > 0: return True
            return False

        def save(self):
            cleanup()
            global needsaving
            try: self.dosave()
            except (IOError, OSError, TransactionFailedError):
                handle_exception()
                logging.error("PUSHED ON RETRY QUEUE") 
                self.sync()
                if self not in needsaving: needsaving.appendleft(self)

        @persistlocked
        def dosave(self, filename=None):
            """ save json data to database. """
            if self.checkmc(): self.updatemc()
            fn = filename or self.fn
            bla = json.dumps(self.data)
            if filename or self.obj == None:
                self.obj = JSONindb(key_name=fn)
                self.obj.content = bla
            else: self.obj.content = bla
            self.obj.filename = fn
            from google.appengine.ext import db
            key = db.run_in_transaction(self.obj.put)
            logging.debug("transaction returned %s" % key)
            mc.set(fn, bla)
            if type(self.data) == types.DictType: self.data = LazyDict(self.data)
            set(fn, self.data)
            incr = mc.incr(self.countername)
            if incr:
                try: self.mcounter = self.counter = int(incr)
                except ValueError: logging.error("can't make counter out of %s" % incr) 
            else: self.mcounter = 1
            self.counter = self.mcounter
            logging.debug("memcached counters for %s: %s" % (fn, self.mcounter))
            logging.warn('saved %s (%s)' % (fn, len(bla)))
            logging.debug('saved %s from %s' % (fn, where()))

        def upgrade(self, filename):
            self.init(self.data, filename=filename)

    ## findfilenames function 

    def findfilenames(target, filter=[], skip=[]):
        res = []
        targetkey = db.Key.from_path(JSONindb.kind(), target)
        targetkey2 = db.Key.from_path(JSONindb.kind(), target + "zzz")
        logging.warn("key for %s is %s" % (target, str(targetkey)))
        q = db.Query(JSONindb, keys_only=True)
        q.filter("__key__ >", targetkey)
        q.filter("__key__ <", targetkey2)
        for key in q:
            fname = key.name()
            if fname in skip: continue
            fname = str(fname)
            logging.warn("using %s" % fname)
            go = True
            for fil in filter:
                if fil not in fname.lower(): go = False ; break
            if not go: continue
            res.append(fname)
        return res

    def findnames(target, filter=[], skip=[]):
        res = []
        for f in findfilenames(target, filter, skip):
            res.append(f.split(os.sep)[-1])
        return res


except ImportError:

    ## file based persist

    logging.debug("using file based Persist")


    ## imports for shell bots

    if True:
        got = False
        from jsb.memcached import getmc
        mc = getmc()
        if mc:
            status = mc.get_stats()
            if status:
                logging.warn("memcached uptime is %s" % elapsedstring(status[0][1]['uptime']))
                got = True
        if got == False:
            logging.debug("no memcached found - using own cache")
        from cache import get, set, delete

    import fcntl

    ## classes

    class Persist(object):

        """ persist data attribute to JSON file. """
        
        def __init__(self, filename, default=None, init=True, postfix=None):
            """ Persist constructor """
            if postfix: self.fn = str(filename.strip()) + str("-%s" % postfix)
            else: self.fn = str(filename.strip())
            self.lock = thread.allocate_lock() # lock used when saving)
            self.data = LazyDict(default=default) # attribute to hold the data
            try:
                res = []
                target = getdatadir().split(os.sep)[-1]
                for i in self.fn.split(os.sep)[::-1]:
                    if target in i: break
                    res.append(i)
                self.logname = os.sep.join(res[::-1])
                if not self.logname: self.logname = self.fn
            except: handle_exception() ; self.logname = self.fn
            self.countername = self.fn + "_" + "counter"
            if got:
                count = mc.get(self.countername)
                try:
                    self.mcounter = self.counter = int(count)
                except (ValueError, TypeError):
                    self.mcounter = self.counter = mc.set(self.countername, "1") or 0
            else:
                self.mcounter = self.counter = 0
            self.ssize = 0
            self.jsontxt = ""
            self.dontsave = False
            if init:
                self.init(default)
                if default == None: default = LazyDict()

        def size(self):
            return "%s (%s)" % (len(self.data), len(self.jsontxt))

        def init(self, default={}, filename=None):
            """ initialize the data. """
            gotcache = False
            cachetype = "cache"
            try:
                logging.debug("using name %s" % self.fn)
                a = get(self.fn)
                if a: self.data = a
                else: self.data = None
                if self.data != None:
                    logging.debug("got data from local cache")
                    return self
                if got: self.jsontxt = mc.get(self.fn) ; cachetype = "cache"
                if not self.jsontxt:
                   datafile = open(self.fn, 'r')
                   self.jsontxt = datafile.read()
                   datafile.close()
                   self.ssize = len(self.jsontxt)
                   cachetype = "file"
                   if got: mc.set(self.fn, self.jsontxt)
            except IOError, ex:
                if not 'No such file' in str(ex):
                    logging.error('failed to read %s: %s' % (self.fn, str(ex)))
                    raise
                else:
                    logging.debug("%s doesn't exist yet" % self.fn)
                    self.jsontxt = json.dumps(default)
            try:
                if self.jsontxt:
                    logging.debug(u"loading: %s" % type(self.jsontxt))
                    try: self.data = json.loads(str(self.jsontxt))
                    except Exception, ex: logging.error("couldn't parse %s" % self.jsontxt) ; self.data = None ; self.dontsave = True
                if not self.data: self.data = LazyDict()
                elif type(self.data) == types.DictType:
                    logging.debug("converting dict to LazyDict")
                    d = LazyDict()
                    d.update(self.data)
                    self.data = d
                set(self.fn, self.data)
                logging.debug("loaded %s - %s" % (self.logname, cachetype))
            except Exception, ex:
                logging.error('ERROR: %s' % self.fn)
                raise

        def upgrade(self, filename):
            self.init(self.data, filename=filename)
            self.save(filename)

        def get(self):
            logging.debug("getting %s from local cache" % self.fn)
            a = get(self.fn)
            logging.debug("got %s from local cache" % type(a))
            return a

        def sync(self):
            logging.debug("syncing %s" % self.fn)
            if got: mc.set(self.fn, json.dumps(self.data))
            set(self.fn, self.data)
            return self

        def save(self):
            cleanup()
            global needsaving
            try: self.dosave()
            except (IOError, OSError):
                self.sync()
                if self not in needsaving: needsaving.append(self)

        @persistlocked
        def dosave(self):
            """ persist data attribute. """
            try:
                if self.dontsave: logging.error("dontsave is set on  %s - not saving" % self.fn) ; return
                fn = self.fn
                if got: self.mcounter = int(mc.incr(self.countername))
                if got and (self.mcounter - self.counter) > 1:
                    tmp = json.loads(mc.get(fn))
                    if tmp:
                        try: tmp.update(self.data) ; self.data = LazyDict(tmp) ; logging.warn("updated %s" % fn)
                        except AttributeError: pass
                    self.counter = self.mcounter
                d = []
                if fn.startswith(os.sep): d = [os.sep,]
                for p in fn.split(os.sep)[:-1]:
                    if not p: continue
                    d.append(p)
                    pp = os.sep.join(d)
                    if not os.path.isdir(pp):
                        logging.warn("creating %s dir" % pp)
                        os.mkdir(pp)
                tmp = fn + '.tmp' # tmp file to save to
                datafile = open(tmp, 'w')
                fcntl.flock(datafile, fcntl.LOCK_EX | fcntl.LOCK_NB)
                json.dump(self.data, datafile, indent=True)
                fcntl.flock(datafile, fcntl.LOCK_UN)
                datafile.close()
                try: os.rename(tmp, fn)
                except (IOError, OSError):
                    os.remove(fn)
                    os.rename(tmp, fn)
                jsontxt = json.dumps(self.data)
                logging.debug("setting cache %s - %s" % (fn, jsontxt))
                self.jsontxt = jsontxt
                set(fn, self.data)
                if got: mc.set(fn, jsontxt)
                logging.info('%s saved' % self.logname)
            except IOError, ex: logging.error("not saving %s: %s" % (self.fn, str(ex))) ; raise
            except: raise
            finally: pass

    ## findfilenames function 

    def findfilenames(target, filter=[], skip=[]):
        res = []
        if not os.path.isdir(target): return res
        for f in os.listdir(target):
            if f in skip: continue
            fname = target + os.sep + f
            if os.path.isdir(fname): res.extend(findfilenames(fname, skip))
            go = True
            for fil in filter:
                if fil not in fname.lower(): go = False ; break
            if not go: continue
            res.append(fname)
        return res

    def findnames(target, filter=[], skip=[]):
        res = []
        for f in findfilenames(target, filter, skip):
            res.append(f.split(os.sep)[-1])
        return res


class PlugPersist(Persist):

    """ persist plug related data. data is stored in jsondata/plugs/{plugname}/{filename}. """

    def __init__(self, filename, default={}, *args, **kwargs):
        plugname = calledfrom(sys._getframe())
        Persist.__init__(self, getdatadir() + os.sep + 'plugs' + os.sep + stripname(plugname) + os.sep + stripname(filename), default=default, *args, **kwargs)

class GlobalPersist(Persist):

    """ persist plug related data. data is stored in jsondata/plugs/{plugname}/{filename}. """

    def __init__(self, filename, default={}, *args, **kwargs):
        if not filename: raise Exception("filename not set in GlobalPersist")
        logging.warn("filename is %s" % filename)
        Persist.__init__(self, getdatadir() + os.sep + 'globals' + os.sep + stripname(filename), default=default, *args, **kwargs)

## PersistCollection class

class PersistCollection(object):

    """ maintain a collection of Persist objects. """

    def __init__(self, path, *args, **kwargs):
        assert path
        self.path = path
        d = [os.sep, ]
        for p in path.split(os.sep):
            if not p: continue
            d.append(p)
            pp = os.sep.join(d)
            try:
                os.mkdir(pp)
                logging.warn("creating %s dir" % pp)
            except OSError, ex:
                if 'Errno 13' in str(ex) or 'Errno 2' in str(ex): continue
                logging.warn("can't make %s - %s" % (pp,str(ex))) ; continue
                
    def filenames(self, filter=[], path=None, skip=[], result=[]):
        target = path or self.path
        res = findfilenames(target, filter, skip)
        logging.info("filenames are %s" % str(res))
        return res

    def names(self, filter=[], path=None, skip=[], result=[]):
        target = path or self.path
        res = findnames(target, filter, skip)
        return res

    def search(self, field, target):
        res = []
        for obj in self.objects().values():
            try: item = getattr(obj.data, field)
            except AttributeError: handle_exception() ; continue
            if not item: continue
            if target in item: res.append(obj)
        return res
            
    def objects(self, filter=[], path=None):
        if type(filter) != types.ListType: filter = [filter, ] 
        res = {}
        target = path or self.path
        for f in self.filenames(filter, target):
             res[f] = Persist(f)
        return res

## PlugPersistCollection class

class PlugPersistCollection(PersistCollection):

    def __init__(self):
        plugname = calledfrom(sys._getframe())
        self.path =  getdatadir() + os.sep + 'plugs' + os.sep + stripname(plugname) + os.sep
        PersistCollection.__init__(self, self.path)

## GlobalPersistCollection class

class GlobalPersistCollection(PersistCollection):

    def __init__(self):
        self.path =  getdatadir() + os.sep + 'globals'
        GlobalCollection(self, self.path)


callbacks.add("TICK60", cleanup)