This file is indexed.

/usr/share/pyshared/pebl/learner/simanneal.py is in python-pebl 1.0.2-2.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
"""Classes and functions for Simulated Annealing learner"""

from math import exp
import random

from pebl import network, result, evaluator, config
from pebl.learner.base import *


class SALearnerStatistics:
    def __init__(self, starting_temp, delta_temp, max_iterations_at_temp):
        self.temp = starting_temp
        self.iterations_at_temp = 0
        self.max_iterations_at_temp = max_iterations_at_temp
        self.delta_temp = delta_temp

        self.iterations = 0
        self.best_score = 0
        self.current_score = 0

    def update(self):
        self.iterations += 1
        self.iterations_at_temp += 1

        if self.iterations_at_temp >= self.max_iterations_at_temp:
            self.temp *= self.delta_temp
            self.iterations_at_temp = 0


class SimulatedAnnealingLearner(Learner):
    #
    # Parameters
    #
    _params = (
        config.FloatParameter(
            'simanneal.start_temp',
            "Starting temperature for a run.",
            config.atleast(0.0),
            default=100.0
        ),
        config.FloatParameter(
            'simanneal.delta_temp',
            'Change in temp between steps.',
            config.atleast(0.0),
            default=0.5
        ),
        config.IntParameter(
            'simanneal.max_iters_at_temp',
            'Max iterations at any temperature.',
            config.atleast(0),
            default=100
        ),
        config.StringParameter(
            'simanneal.seed',
            'Starting network for a greedy search.',
            default=''
        )
    )

    def __init__(self, data_=None, prior_=None, **options):
        """Create a Simulated Aneaaling learner.

        For more information about Simulated Annealing algorithms, consult:

            1. http://en.wikipedia.org/wiki/Simulated_annealing
            2. D. Heckerman. A Tutorial on Learning with Bayesian Networks. 
               Microsoft Technical Report MSR-TR-95-06, 1995. p.35-36.

        Any config param for 'simanneal' can be passed in via options.
        Use just the option part of the parameter name.
        
        """

        super(SimulatedAnnealingLearner,self).__init__(data_, prior_)
        config.setparams(self, options)
        if not isinstance(self.seed, network.Network):
            self.seed = network.Network(self.data.variables, self.seed)
        
    def run(self):
        """Run the learner."""

        self.stats = SALearnerStatistics(self.start_temp, self.delta_temp, 
                                         self.max_iters_at_temp)
        self.result =  result.LearnerResult(self)
        self.evaluator = evaluator.fromconfig(self.data, self.seed, self.prior)
        self.evaluator.score_network(self.seed.copy())

        self.result.start_run()
        curscore = self.evaluator.score_network()
        
        # temperature decays exponentially, so we'll never get to 0. 
        # So, we continue until temp < 1
        while self.stats.temp >= 1:
            try:
                newscore = self._alter_network_randomly_and_score()
            except CannotAlterNetworkException:
                return

            self.result.add_network(self.evaluator.network, newscore)

            if self._accept(newscore):
                # set current score
                self.stats.current_score = newscore
                if self.stats.current_score > self.stats.best_score:
                    self.stats.best_score = self.stats.current_score
            else:
                # undo network alteration
                self.evaluator.restore_network()

            # temp not updated EVERY iteration. just whenever criteria met.
            self.stats.update() 

        self.result.stop_run()
        return self.result

    def _accept(self, newscore):
        oldscore = self.stats.current_score

        if newscore >= oldscore:
            return True
        elif random.random() < exp((newscore - oldscore)/self.stats.temp):
            return True
        else:
            return False