This file is indexed.

/usr/share/stackapplet/network_thread.py is in stackapplet 1.5.2-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
#====================================
#          network_thread
#   Copyright 2011 - Nathan Osman
#
#  Manages network reuqests to and
# from the SE API and processes them
#       in a separate thread.
#
#   StackApplet is released under
#         the MIT license
#====================================

import Queue
import threading
import time

import urllib2
import zlib

# We need gobject for cross-thread signalling
import gobject
gobject.threads_init()

# Now lets see if we can import the JSON
# module. We try either of these two classes
try:
	import json
except ImportError:
	import simplejson as json

# Constants
API_VERSION = "2.2"
API_KEY     = "VN8jyf7k8STlkO3lx*iw6w(("

class network_thread(threading.Thread):
	
	def __init__(self, error_handler):
		
		# Initialize the thread object
		threading.Thread.__init__(self)
		
		self.error_handler = error_handler
		
		# The list of items that we will
		# be requesting.
		self.request_queue = Queue.PriorityQueue()
		
		# Start it
		self.start()
	
	def issue_request(self, url, callback, data, decompress=True, priority=2):
		
		# Append the URL to the queue
		self.request_queue.put([priority, [url, callback, data, decompress]])
	
	def issue_api_request(self, site, method, callback, data, additional_params=''):
		
		url = "http://api.stackexchange.com/" + API_VERSION + method + "?key=" + API_KEY + "&site=" + site
		
		if not additional_params == '':
			url += '&' + additional_params
		
		self.issue_request(url, callback, data)
	
	def run(self):
	
		# Now we run in circles waiting
		# for each request to come in.
		while True:
			
			# Get the next item
			item_block = self.request_queue.get(True)
			
			item = item_block[1]
			
			if item == None:
				break;
			
			# Note that we need to ensure that there are at
			# least 100ms between requests, so we simply insert
			# a small sleep here.
			time.sleep(0.1)
			
			try:
			
				# Issue the request
				request = urllib2.Request(item[0])
				request.add_header('Accept-Encoding', 'gzip,deflate')
			
				opener = urllib2.build_opener()
				gzipped_stream = opener.open(request)
			
				# If the request needs decompression, assume
				# that it also needs json decoding.
				if item[3]:
					raw_data = zlib.decompress(gzipped_stream.read(), 16+zlib.MAX_WBITS)
					json_data = json.loads(raw_data)
				else:
					json_data = gzipped_stream.read()			
			
				time.sleep(1)
				gobject.idle_add(item[1], json_data, item[2])
			
			except urllib2.URLError:
				
				# There was an error accessing the URL - a
				# number of different things can cause this.
				gobject.idle_add(self.error_handler, item[2], "Network error.")
			
			except zlib.error:
			
				# There was an error decompressing the stuff.
				gobject.idle_add(self.error_handler, item[2], "GZip error.")
				
			except Exception, e:
				
				gobject.idle_add(self.error_handler, item[2], "Unknown error.")