v3bwfile.py 38 KB
Newer Older
juga  's avatar
juga committed
1
2
3
4
# -*- coding: utf-8 -*-
"""Classes and functions that create the bandwidth measurements document
(v3bw) used by bandwidth authorities."""

juga  's avatar
juga committed
5
import copy
juga  's avatar
juga committed
6
import logging
7
import math
juga  's avatar
juga committed
8
import os
9
from itertools import combinations
juga  's avatar
juga committed
10
from statistics import median, mean
11
from stem.descriptor import parse_file
12

13
from sbws import __version__
14
from sbws.globals import (SPEC_VERSION, BW_LINE_SIZE, SBWS_SCALE_CONSTANT,
15
16
                          TORFLOW_SCALING, SBWS_SCALING, TORFLOW_BW_MARGIN,
                          TORFLOW_OBS_LAST, TORFLOW_OBS_MEAN,
17
                          PROP276_ROUND_DIG, MIN_REPORT, MAX_BW_DIFF_PERC)
juga  's avatar
juga committed
18
from sbws.lib.resultdump import ResultSuccess, _ResultType
19
from sbws.util.filelock import DirectoryLock
juga  's avatar
juga committed
20
21
from sbws.util.timestamp import (now_isodt_str, unixts_to_isodt_str,
                                 now_unixts)
22
from sbws.util.state import State
juga  's avatar
juga committed
23
24
25

log = logging.getLogger(__name__)

26
LINE_SEP = '\n'
27
28
KEYVALUE_SEP_V1 = '='
KEYVALUE_SEP_V2 = ' '
29
30
31
# List of the extra KeyValues accepted by the class
EXTRA_ARG_KEYVALUES = ['software', 'software_version', 'file_created',
                       'earliest_bandwidth', 'generator_started']
juga  's avatar
juga committed
32
33
34
STATS_KEYVALUES = ['number_eligible_relays', 'minimum_number_eligible_relays',
                   'number_consensus_relays', 'percent_eligible_relays',
                   'minimum_percent_eligible_relays']
35
KEYVALUES_INT = STATS_KEYVALUES
36
# List of all unordered KeyValues currently being used to generate the file
37
38
UNORDERED_KEYVALUES = EXTRA_ARG_KEYVALUES + STATS_KEYVALUES + \
                      ['latest_bandwidth']
39
40
# List of all the KeyValues currently being used to generate the file
ALL_KEYVALUES = ['version'] + UNORDERED_KEYVALUES
41
TERMINATOR = '====='
42
43
# Num header lines in v1.X.X using all the KeyValues
NUM_LINES_HEADER_V1 = len(ALL_KEYVALUES) + 2
44
45
LINE_TERMINATOR = TERMINATOR + LINE_SEP

juga  's avatar
juga committed
46
# KeyValue separator in Bandwidth Lines
47
BW_KEYVALUE_SEP_V1 = ' '
48
49
50
51
52
# not inclding in the files the extra bws for now
BW_KEYVALUES_BASIC = ['node_id', 'bw']
BW_KEYVALUES_FILE = BW_KEYVALUES_BASIC + \
                    ['master_key_ed25519', 'nick', 'rtt', 'time',
                     'success', 'error_stream', 'error_circ', 'error_misc']
53
54
BW_KEYVALUES_EXTRA_BWS = ['bw_median', 'bw_mean', 'desc_bw_avg',
                          'desc_bw_obs_last', 'desc_bw_obs_mean']
55
BW_KEYVALUES_EXTRA = BW_KEYVALUES_FILE + BW_KEYVALUES_EXTRA_BWS
56
BW_KEYVALUES_INT = ['bw', 'rtt', 'success', 'error_stream',
57
58
                    'error_circ', 'error_misc'] + BW_KEYVALUES_EXTRA_BWS
BW_KEYVALUES = BW_KEYVALUES_BASIC + BW_KEYVALUES_EXTRA
juga  's avatar
juga committed
59
60


61
def round_sig_dig(n, digits=PROP276_ROUND_DIG):
62
63
64
65
66
67
68
    """Round n to 'digits' significant digits in front of the decimal point.
       Results less than or equal to 1 are rounded to 1.
       Returns an integer.

       digits must be greater than 0.
       n must be less than or equal to 2**73, to avoid floating point errors.
       """
69
    digits = int(digits)
70
71
72
73
74
75
76
77
78
    assert digits >= 1
    if n <= 1:
        return 1
    digits_in_n = int(math.log10(n)) + 1
    round_digits = max(digits_in_n - digits, 0)
    rounded_n = round(n, -round_digits)
    return int(rounded_n)


79
def kb_round_x_sig_dig(bw_bs, digits=PROP276_ROUND_DIG):
80
81
82
83
84
85
86
87
88
89
90
    """Convert bw_bs from bytes to kilobytes, and round the result to
       'digits' significant digits.
       Results less than or equal to 1 are rounded up to 1.
       Returns an integer.

       digits must be greater than 0.
       n must be less than or equal to 2**82, to avoid floating point errors.
       """
    # avoid double-rounding by using floating-point
    bw_kb = bw_bs / 1000.0
    return round_sig_dig(bw_kb, digits=digits)
91
92


juga  's avatar
juga committed
93
94
95
96
97
98
99
100
101
def num_results_of_type(results, type_str):
    return len([r for r in results if r.type == type_str])


# Better way to use enums?
def result_type_to_key(type_str):
    return type_str.replace('-', '_')


102
class V3BWHeader(object):
juga  's avatar
juga committed
103
104
    """
    Create a bandwidth measurements (V3bw) header
105
    following bandwidth measurements document spec version 1.X.X.
juga  's avatar
juga committed
106

107
    :param str timestamp: timestamp in Unix Epoch seconds of the most recent
108
        generator result.
juga  's avatar
juga committed
109
110
111
    :param str version: the spec version
    :param str software: the name of the software that generates this
    :param str software_version: the version of the software
112
    :param dict kwargs: extra headers. Currently supported:
juga  's avatar
juga committed
113

114
115
116
117
        - earliest_bandwidth: str, ISO 8601 timestamp in UTC time zone
          when the first bandwidth was obtained
        - generator_started: str, ISO 8601 timestamp in UTC time zone
          when the generator started
juga  's avatar
juga committed
118
    """
119
    def __init__(self, timestamp, **kwargs):
120
121
122
123
        assert isinstance(timestamp, str)
        for v in kwargs.values():
            assert isinstance(v, str)
        self.timestamp = timestamp
124
125
126
127
        # KeyValues with default value when not given by kwargs
        self.version = kwargs.get('version', SPEC_VERSION)
        self.software = kwargs.get('software', 'sbws')
        self.software_version = kwargs.get('software_version', __version__)
128
        self.file_created = kwargs.get('file_created', now_isodt_str())
juga  's avatar
juga committed
129
        # latest_bandwidth should not be in kwargs, since it MUST be the
130
        # same as timestamp
juga  's avatar
juga committed
131
        self.latest_bandwidth = unixts_to_isodt_str(timestamp)
132
133
        [setattr(self, k, v) for k, v in kwargs.items()
         if k in EXTRA_ARG_KEYVALUES]
134

135
    def __str__(self):
136
        if self.version.startswith('1.'):
137
138
            return self.strv1
        return self.strv2
139

juga  's avatar
juga committed
140
    @classmethod
141
    def from_results(cls, results, state_fpath=''):
juga  's avatar
juga committed
142
143
144
        kwargs = dict()
        latest_bandwidth = cls.latest_bandwidth_from_results(results)
        earliest_bandwidth = cls.earliest_bandwidth_from_results(results)
145
        generator_started = cls.generator_started_from_file(state_fpath)
juga  's avatar
juga committed
146
147
148
149
150
151
152
153
        timestamp = str(latest_bandwidth)
        kwargs['latest_bandwidth'] = unixts_to_isodt_str(latest_bandwidth)
        kwargs['earliest_bandwidth'] = unixts_to_isodt_str(earliest_bandwidth)
        if generator_started is not None:
            kwargs['generator_started'] = generator_started
        h = cls(timestamp, **kwargs)
        return h

154
    @classmethod
155
    def from_lines_v1(cls, lines):
156
157
        """
        :param list lines: list of lines to parse
158
        :returns: tuple of V3BWHeader object and non-header lines
159
160
161
162
        """
        assert isinstance(lines, list)
        try:
            index_terminator = lines.index(TERMINATOR)
Matt Traudt's avatar
Matt Traudt committed
163
        except ValueError:
164
165
166
            # is not a bw file or is v100
            log.warn('Terminator is not in lines')
            return None
167
        ts = lines[0]
168
        kwargs = dict([l.split(KEYVALUE_SEP_V1)
169
                       for l in lines[:index_terminator]
170
                       if l.split(KEYVALUE_SEP_V1)[0] in ALL_KEYVALUES])
171
        h = cls(ts, **kwargs)
juga  's avatar
juga committed
172
        # last line is new line
173
        return h, lines[index_terminator + 1:-1]
174
175

    @classmethod
176
    def from_text_v1(self, text):
177
        """
178
        :param str text: text to parse
179
        :returns: tuple of V3BWHeader object and non-header lines
180
181
        """
        assert isinstance(text, str)
182
        return self.from_lines_v1(text.split(LINE_SEP))
183

juga  's avatar
juga committed
184
185
186
187
188
189
190
191
192
193
194
    @classmethod
    def from_lines_v100(cls, lines):
        """
        :param list lines: list of lines to parse
        :returns: tuple of V3BWHeader object and non-header lines
        """
        assert isinstance(lines, list)
        h = cls(lines[0])
        # last line is new line
        return h, lines[1:-1]

195
    @staticmethod
196
    def generator_started_from_file(state_fpath):
197
198
199
200
        '''
        ISO formatted timestamp for the time when the scanner process most
        recently started.
        '''
201
        state = State(state_fpath)
202
203
204
205
        if 'scanner_started' in state:
            return state['scanner_started']
        else:
            return None
206
207

    @staticmethod
juga  's avatar
juga committed
208
    def latest_bandwidth_from_results(results):
209
        return round(max([r.time for fp in results for r in results[fp]]))
210
211
212

    @staticmethod
    def earliest_bandwidth_from_results(results):
213
        return round(min([r.time for fp in results for r in results[fp]]))
214

juga  's avatar
juga committed
215
216
217
218
219
220
221
222
223
224
225
226
227
228
    @property
    def keyvalue_unordered_tuple_ls(self):
        """Return list of KeyValue tuples that do not have specific order."""
        # sort the list to generate determinist headers
        keyvalue_tuple_ls = sorted([(k, v) for k, v in self.__dict__.items()
                                    if k in UNORDERED_KEYVALUES])
        return keyvalue_tuple_ls

    @property
    def keyvalue_tuple_ls(self):
        """Return list of all KeyValue tuples"""
        return [('version', self.version)] + self.keyvalue_unordered_tuple_ls

    @property
229
230
231
    def keyvalue_v1str_ls(self):
        """Return KeyValue list of strings following spec v1.X.X."""
        keyvalues = [self.timestamp] + [KEYVALUE_SEP_V1.join([k, v])
juga  's avatar
juga committed
232
233
234
235
                                        for k, v in self.keyvalue_tuple_ls]
        return keyvalues

    @property
236
237
238
    def strv1(self):
        """Return header string following spec v1.X.X."""
        header_str = LINE_SEP.join(self.keyvalue_v1str_ls) + LINE_SEP + \
juga  's avatar
juga committed
239
240
241
242
            LINE_TERMINATOR
        return header_str

    @property
243
244
245
    def keyvalue_v2_ls(self):
        """Return KeyValue list of strings following spec v2.X.X."""
        keyvalue = [self.timestamp] + [KEYVALUE_SEP_V2.join([k, v])
juga  's avatar
juga committed
246
247
248
249
                                       for k, v in self.keyvalue_tuple_ls]
        return keyvalue

    @property
250
251
252
    def strv2(self):
        """Return header string following spec v2.X.X."""
        header_str = LINE_SEP.join(self.keyvalue_v2_ls) + LINE_SEP + \
juga  's avatar
juga committed
253
254
255
256
257
258
            LINE_TERMINATOR
        return header_str

    @property
    def num_lines(self):
        return len(self.__str__().split(LINE_SEP))
juga  's avatar
juga committed
259

260
261
262
263
264
    def add_stats(self, **kwargs):
        # Using kwargs because attributes might chage.
        [setattr(self, k, str(v)) for k, v in kwargs.items()
         if k in STATS_KEYVALUES]

juga  's avatar
juga committed
265

juga  's avatar
juga committed
266
class V3BWLine(object):
juga  's avatar
juga committed
267
    """
268
    Create a Bandwidth List line following the spec version 1.X.X.
juga  's avatar
juga committed
269
270
271
272
273
274
275
276
277
278
279
280
281
282

    :param str node_id:
    :param int bw:
    :param dict kwargs: extra headers. Currently supported:

        - nickname, str
        - master_key_ed25519, str
        - rtt, int
        - time, str
        - sucess, int
        - error_stream, int
        - error_circ, int
        - error_misc, int
    """
juga  's avatar
juga committed
283
284
285
    def __init__(self, node_id, bw, **kwargs):
        assert isinstance(node_id, str)
        assert isinstance(bw, int)
286
        assert node_id.startswith('$')
juga  's avatar
juga committed
287
288
289
        self.node_id = node_id
        self.bw = bw
        [setattr(self, k, v) for k, v in kwargs.items()
290
         if k in BW_KEYVALUES_EXTRA]
juga  's avatar
juga committed
291

juga  's avatar
juga committed
292
    def __str__(self):
293
        return self.bw_strv1
juga  's avatar
juga committed
294

juga  's avatar
juga committed
295
    @classmethod
juga  's avatar
juga committed
296
297
    def from_results(cls, results, secs_recent=None, secs_away=None,
                     min_num=0):
298
299
300
        """Convert sbws results to relays' Bandwidth Lines

        ``bs`` stands for Bytes/seconds
301
        ``bw_mean`` means the bw is obtained from the mean of the all the
302
303
304
305
306
        downloads' bandwidth.
        Downloads' bandwidth are calculated as the amount of data received
        divided by the the time it took to received.
        bw = data (Bytes) / time (seconds)
        """
juga  's avatar
juga committed
307
        success_results = [r for r in results if isinstance(r, ResultSuccess)]
308
        # log.debug("Len success_results %s", len(success_results))
juga  's avatar
juga committed
309
310
311
312
313
314
315
        node_id = '$' + results[0].fingerprint
        kwargs = dict()
        kwargs['nick'] = results[0].nickname
        if getattr(results[0], 'master_key_ed25519'):
            kwargs['master_key_ed25519'] = results[0].master_key_ed25519
        kwargs['time'] = cls.last_time_from_results(results)
        kwargs.update(cls.result_types_from_results(results))
316
317
        # useful args for scaling
        if success_results:
318
            results_away = \
319
320
321
                cls.results_away_each_other(success_results, secs_away)
            if not results_away:
                return None
322
323
            # log.debug("Results away from each other: %s",
            #           [unixts_to_isodt_str(r.time) for r in results_away])
324
325
326
            results_recent = cls.results_recent_than(results_away, secs_recent)
            if not results_recent:
                return None
327
328
329
            if not len(results_recent) >= min_num:
                # log.debug('The number of results is less than %s', min_num)
                return None
330
            kwargs['desc_bw_avg'] = \
331
                results_recent[-1].relay_average_bandwidth
juga  's avatar
juga committed
332
333
334
            rtt = cls.rtt_from_results(results_recent)
            if rtt:
                kwargs['rtt'] = rtt
335
336
337
            bw = cls.bw_median_from_results(results_recent)
            kwargs['bw_mean'] = cls.bw_mean_from_results(results_recent)
            kwargs['bw_median'] = cls.bw_median_from_results(
338
                results_recent)
339
340
341
342
            kwargs['desc_bw_obs_last'] = \
                cls.desc_bw_obs_last_from_results(results_recent)
            kwargs['desc_bw_obs_mean'] = \
                cls.desc_bw_obs_mean_from_results(results_recent)
343
344
345
            bwl = cls(node_id, bw, **kwargs)
            return bwl
        return None
juga  's avatar
juga committed
346
347
348
349
350
351

    @classmethod
    def from_data(cls, data, fingerprint):
        assert fingerprint in data
        return cls.from_results(data[fingerprint])

juga  's avatar
juga committed
352
    @classmethod
353
    def from_bw_line_v1(cls, line):
juga  's avatar
juga committed
354
        assert isinstance(line, str)
355
356
357
        kwargs = dict([kv.split(KEYVALUE_SEP_V1)
                       for kv in line.split(BW_KEYVALUE_SEP_V1)
                       if kv.split(KEYVALUE_SEP_V1)[0] in BW_KEYVALUES])
juga  's avatar
juga committed
358
359
360
        for k, v in kwargs.items():
            if k in BW_KEYVALUES_INT:
                kwargs[k] = int(v)
361
362
363
364
365
        node_id = kwargs['node_id']
        bw = kwargs['bw']
        del kwargs['node_id']
        del kwargs['bw']
        bw_line = cls(node_id, bw, **kwargs)
juga  's avatar
juga committed
366
367
        return bw_line

juga  's avatar
juga committed
368
369
    @staticmethod
    def results_away_each_other(results, secs_away=None):
juga  's avatar
juga committed
370
371
        # log.debug("Checking whether results are away from each other in %s "
        #           "secs.", secs_away)
juga  's avatar
juga committed
372
373
        if secs_away is None or len(results) < 2:
            return results
374
375
376
377
378
379
        for a, b in combinations(results, 2):
            if abs(a.time - b.time) > secs_away:
                return results
        # log.debug("Results are NOT away from each other in at least %ss: %s",
        #           secs_away, [unixts_to_isodt_str(r.time) for r in results])
        return None
juga  's avatar
juga committed
380
381
382
383
384

    @staticmethod
    def results_recent_than(results, secs_recent=None):
        if secs_recent is None:
            return results
385
        results_recent = list(filter(
juga  's avatar
juga committed
386
                            lambda x: (now_unixts() - x.time) < secs_recent,
387
388
389
390
391
392
                            results))
        # if not results_recent:
        #     log.debug("Results are NOT more recent than %ss: %s",
        #               secs_recent,
        #               [unixts_to_isodt_str(r.time) for r in results])
        return results_recent
juga  's avatar
juga committed
393

394
    @staticmethod
395
    def bw_median_from_results(results):
396
397
398
399
        return max(round(median([dl['amount'] / dl['duration']
                                 for r in results for dl in r.downloads])), 1)

    @staticmethod
400
    def bw_mean_from_results(results):
401
402
403
        return max(round(mean([dl['amount'] / dl['duration']
                               for r in results for dl in r.downloads])), 1)

juga  's avatar
juga committed
404
405
406
407
408
409
410
411
    @staticmethod
    def last_time_from_results(results):
        return unixts_to_isodt_str(round(max([r.time for r in results])))

    @staticmethod
    def rtt_from_results(results):
        # convert from miliseconds to seconds
        rtts = [(round(rtt * 1000)) for r in results for rtt in r.rtts]
juga  's avatar
juga committed
412
        rtt = round(median(rtts)) if rtts else None
juga  's avatar
juga committed
413
414
415
416
417
418
419
420
421
        return rtt

    @staticmethod
    def result_types_from_results(results):
        rt_dict = dict([(result_type_to_key(rt.value),
                         num_results_of_type(results, rt.value))
                        for rt in _ResultType])
        return rt_dict

422
    @staticmethod
423
424
    def desc_bw_obs_mean_from_results(results):
        desc_bw_obs_ls = []
425
426
        for r in results:
            if r.relay_observed_bandwidth is not None:
427
428
429
                desc_bw_obs_ls.append(r.relay_observed_bandwidth)
        if desc_bw_obs_ls:
            return max(round(mean(desc_bw_obs_ls)), 1)
430
431
432
        return None

    @staticmethod
433
    def desc_bw_obs_last_from_results(results):
434
435
436
437
438
439
        # the last is at the end of the list
        for r in reversed(results):
            if r.relay_observed_bandwidth is not None:
                return r.relay_observed_bandwidth
        return None

juga  's avatar
juga committed
440
441
442
443
444
445
446
    @property
    def bw_keyvalue_tuple_ls(self):
        """Return list of KeyValue Bandwidth Line tuples."""
        # sort the list to generate determinist headers
        keyvalue_tuple_ls = sorted([(k, v) for k, v in self.__dict__.items()
                                    if k in BW_KEYVALUES])
        return keyvalue_tuple_ls
juga  's avatar
juga committed
447

juga  's avatar
juga committed
448
    @property
449
    def bw_keyvalue_v1str_ls(self):
juga  's avatar
juga committed
450
        """Return list of KeyValue Bandwidth Line strings following
451
        spec v1.X.X.
juga  's avatar
juga committed
452
        """
453
        bw_keyvalue_str = [KEYVALUE_SEP_V1 .join([k, str(v)])
juga  's avatar
juga committed
454
455
                           for k, v in self.bw_keyvalue_tuple_ls]
        return bw_keyvalue_str
juga  's avatar
juga committed
456

juga  's avatar
juga committed
457
    @property
458
459
460
461
    def bw_strv1(self):
        """Return Bandwidth Line string following spec v1.X.X."""
        bw_line_str = BW_KEYVALUE_SEP_V1.join(
                        self.bw_keyvalue_v1str_ls) + LINE_SEP
juga  's avatar
juga committed
462
463
464
465
466
467
        if len(bw_line_str) > BW_LINE_SIZE:
            # if this is the case, probably there are too many KeyValues,
            # or the limit needs to be changed in Tor
            log.warn("The bandwidth line %s is longer than %s",
                     len(bw_line_str), BW_LINE_SIZE)
        return bw_line_str
juga  's avatar
juga committed
468

juga  's avatar
juga committed
469

470
class V3BWFile(object):
juga  's avatar
juga committed
471
    """
472
    Create a Bandwidth List file following spec version 1.X.X
juga  's avatar
juga committed
473
474
475
476

    :param V3BWHeader v3bwheader: header
    :param list v3bwlines: V3BWLines
    """
juga  's avatar
juga committed
477
478
479
480
481
    def __init__(self, v3bwheader, v3bwlines):
        self.header = v3bwheader
        self.bw_lines = v3bwlines

    def __str__(self):
482
        return str(self.header) + ''.join([str(bw_line) or ''
juga  's avatar
juga committed
483
484
                                           for bw_line in self.bw_lines])

juga  's avatar
juga committed
485
    @classmethod
486
    def from_results(cls, results, state_fpath='',
487
                     scale_constant=SBWS_SCALE_CONSTANT,
488
489
                     scaling_method=TORFLOW_SCALING,
                     torflow_obs=TORFLOW_OBS_LAST,
juga  's avatar
juga committed
490
                     torflow_cap=TORFLOW_BW_MARGIN,
491
                     round_digs=PROP276_ROUND_DIG,
492
                     secs_recent=None, secs_away=None, min_num=0,
493
494
                     consensus_path=None, max_bw_diff_perc=MAX_BW_DIFF_PERC,
                     reverse=False):
495
496
497
498
499
500
        """Create V3BWFile class from sbws Results.

        :param dict results: see below
        :param str state_fpath: path to the state file
        :param int scaling_method:
            Scaling method to obtain the bandwidth
501
            Possible values: {None, SBWS_SCALING, TORFLOW_SCALING} = {0, 1, 2}
502
503
504
505
506
507
508
509
510
511
512
513
514
        :param int scale_constant: sbws scaling constant
        :param int torflow_obs: method to choose descriptor observed bandwidth
        :param bool reverse: whether to sort the bw lines descending or not

        Results are in the form::

            {'relay_fp1': [Result1, Result2, ...],
             'relay_fp2': [Result1, Result2, ...]}

        """
        log.info('Processing results to generate a bandwidth list file.')
        header = V3BWHeader.from_results(results, state_fpath)
        bw_lines_raw = []
juga  's avatar
juga committed
515
516
        number_consensus_relays = cls.read_number_consensus_relays(
            consensus_path)
517
        state = State(state_fpath)
juga  's avatar
juga committed
518
        for fp, values in results.items():
519
            # log.debug("Relay fp %s", fp)
juga  's avatar
juga committed
520
521
            line = V3BWLine.from_results(values, secs_recent, secs_away,
                                         min_num)
juga  's avatar
juga committed
522
523
            if line is not None:
                bw_lines_raw.append(line)
524
        if not bw_lines_raw:
525
526
            log.info("After applying restrictions to the raw results, "
                     "there is not any. Scaling can not be applied.")
juga  's avatar
juga committed
527
528
            cls.update_progress(
                cls, bw_lines_raw, header, number_consensus_relays, state)
529
530
531
            return cls(header, [])
        if scaling_method == SBWS_SCALING:
            bw_lines = cls.bw_sbws_scale(bw_lines_raw, scale_constant)
532
            cls.warn_if_not_accurate_enough(bw_lines, scale_constant)
533
534
            # log.debug(bw_lines[-1])
        elif scaling_method == TORFLOW_SCALING:
juga  's avatar
juga committed
535
            bw_lines = cls.bw_torflow_scale(bw_lines_raw, torflow_obs,
536
                                            torflow_cap, round_digs)
537
            # log.debug(bw_lines[-1])
juga  's avatar
juga committed
538
539
            cls.update_progress(
                cls, bw_lines, header, number_consensus_relays, state)
juga  's avatar
juga committed
540
        else:
541
542
            bw_lines = cls.bw_kb(bw_lines_raw)
            # log.debug(bw_lines[-1])
543
544
        # Not using the result for now, just warning
        cls.is_max_bw_diff_perc_reached(bw_lines, max_bw_diff_perc)
juga  's avatar
juga committed
545
546
547
        f = cls(header, bw_lines)
        return f

548
    @classmethod
549
    def from_v1_fpath(cls, fpath):
550
551
552
553
        log.info('Parsing bandwidth file %s', fpath)
        with open(fpath) as fd:
            text = fd.read()
        all_lines = text.split(LINE_SEP)
554
555
        header, lines = V3BWHeader.from_lines_v1(all_lines)
        bw_lines = [V3BWLine.from_bw_line_v1(line) for line in lines]
556
557
        return cls(header, bw_lines)

558
559
560
561
562
563
564
    @classmethod
    def from_v100_fpath(cls, fpath):
        log.info('Parsing bandwidth file %s', fpath)
        with open(fpath) as fd:
            text = fd.read()
        all_lines = text.split(LINE_SEP)
        header, lines = V3BWHeader.from_lines_v100(all_lines)
565
        bw_lines = sorted([V3BWLine.from_bw_line_v1(l) for l in lines],
566
567
568
                          key=lambda l: l.bw)
        return cls(header, bw_lines)

juga  's avatar
juga committed
569
570
571
572
573
574
575
    @staticmethod
    def bw_kb(bw_lines, reverse=False):
        bw_lines_scaled = copy.deepcopy(bw_lines)
        for l in bw_lines_scaled:
            l.bw = max(round(l.bw / 1000), 1)
        return sorted(bw_lines_scaled, key=lambda x: x.bw, reverse=reverse)

juga  's avatar
juga committed
576
    @staticmethod
577
    def bw_sbws_scale(bw_lines, scale_constant=SBWS_SCALE_CONSTANT,
juga  's avatar
juga committed
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
                      reverse=False):
        """Return a new V3BwLine list scaled using sbws method.

        :param list bw_lines:
            bw lines to scale, not self.bw_lines,
            since this method will be before self.bw_lines have been
            initialized.
        :param int scale_constant:
            the constant to multiply by the ratio and
            the bandwidth to obtain the new bandwidth
        :returns list: V3BwLine list
        """
        # If a relay has MaxAdvertisedBandwidth set, they may be capable of
        # some large amount of bandwidth but prefer if they didn't receive it.
        # We also could have managed to measure them faster than their
        # {,Relay}BandwidthRate somehow.
        #
        # See https://github.com/pastly/simple-bw-scanner/issues/155 and
        # https://trac.torproject.org/projects/tor/ticket/8494
        #
        # Note how this isn't some measured-by-us average of bandwidth. It's
        # the first value on the 'bandwidth' line in the relay's server
        # descriptor.
        log.debug('Scaling bandwidth using sbws method.')
        m = median([l.bw for l in bw_lines])
        bw_lines_scaled = copy.deepcopy(bw_lines)
        for l in bw_lines_scaled:
            # min is to limit the bw to descriptor average-bandwidth
            # max to avoid bandwidth with 0 value
607
            l.bw = max(round(min(l.desc_bw_avg,
608
                                 l.bw * scale_constant / m)
juga  's avatar
juga committed
609
610
611
612
613
                             / 1000), 1)
        return sorted(bw_lines_scaled, key=lambda x: x.bw, reverse=reverse)

    @staticmethod
    def warn_if_not_accurate_enough(bw_lines,
614
                                    scale_constant=SBWS_SCALE_CONSTANT):
juga  's avatar
juga committed
615
616
617
618
619
620
621
622
        margin = 0.001
        accuracy_ratio = median([l.bw for l in bw_lines]) / scale_constant
        log.info('The generated lines are within {:.5}% of what they should '
                 'be'.format((1 - accuracy_ratio) * 100))
        if accuracy_ratio < 1 - margin or accuracy_ratio > 1 + margin:
            log.warning('There was %f%% error and only +/- %f%% is '
                        'allowed', (1 - accuracy_ratio) * 100, margin * 100)

623
    @staticmethod
624
625
626
    def is_max_bw_diff_perc_reached(bw_lines,
                                    max_bw_diff_perc=MAX_BW_DIFF_PERC):
        sum_consensus_bw = sum([l.desc_bw_obs_last for l in bw_lines])
627
628
629
630
631
632
633
634
635
636
637
        sum_bw = sum([l.bw for l in bw_lines])
        diff = min(sum_consensus_bw, sum_bw) / max(sum_consensus_bw, sum_bw)
        diff_perc = diff * 100
        log.info("The difference between the total consensus bandwidth "
                 "and the total measured bandwidth is %s%% percent",
                 diff_perc)
        if diff_perc > MAX_BW_DIFF_PERC:
            log.warning("It is more than %s%%", max_bw_diff_perc)
            return True
        return False

638
    @staticmethod
639
    def bw_torflow_scale(bw_lines, desc_bw_obs_type=TORFLOW_OBS_MEAN,
640
                         cap=TORFLOW_BW_MARGIN,
641
                         num_round_dig=PROP276_ROUND_DIG, reverse=False):
juga  's avatar
juga committed
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
        """
        Obtain final bandwidth measurements applying Torflow's scaling
        method.

        From Torflow's README.spec.txt (section 2.2)::

            In this way, the resulting network status consensus bandwidth values  # NOQA
            are effectively re-weighted proportional to how much faster the node  # NOQA
            was as compared to the rest of the network.

        The variables and steps used in Torflow:

        **strm_bw**::

            The strm_bw field is the average (mean) of all the streams for the relay  # NOQA
            identified by the fingerprint field.
            strm_bw = sum(bw stream x)/|n stream|

        **filt_bw**::

            The filt_bw field is computed similarly, but only the streams equal to  # NOQA
            or greater than the strm_bw are counted in order to filter very slow  # NOQA
            streams due to slow node pairings.

        **filt_sbw and strm_sbw**::

            for rs in RouterStats.query.filter(stats_clause).\
                  options(eagerload_all('router.streams.circuit.routers')).all():  # NOQA
              tot_sbw = 0
              sbw_cnt = 0
              for s in rs.router.streams:
                if isinstance(s, ClosedStream):
                  skip = False
                  #for br in badrouters:
                  #  if br != rs:
                  #    if br.router in s.circuit.routers:
                  #      skip = True
                  if not skip:
                    # Throw out outliers < mean
                    # (too much variance for stddev to filter much)
                    if rs.strm_closed == 1 or s.bandwidth() >= rs.sbw:
                      tot_sbw += s.bandwidth()
                      sbw_cnt += 1

            if sbw_cnt: rs.filt_sbw = tot_sbw/sbw_cnt
            else: rs.filt_sbw = None

        **filt_avg, and strm_avg**::

            Once we have determined the most recent measurements for each node, we  # NOQA
            compute an average of the filt_bw fields over all nodes we have measured.  # NOQA

        ::

            filt_avg = sum(map(lambda n: n.filt_bw, nodes.itervalues()))/float(len(nodes))  # NOQA
            strm_avg = sum(map(lambda n: n.strm_bw, nodes.itervalues()))/float(len(nodes))  # NOQA

        **true_filt_avg and true_strm_avg**::

            for cl in ["Guard+Exit", "Guard", "Exit", "Middle"]:
                true_filt_avg[cl] = filt_avg
                true_strm_avg[cl] = strm_avg

        In the non-pid case, all types of nodes get the same avg

        **n.fbw_ratio and n.fsw_ratio**::

            for n in nodes.itervalues():
                n.fbw_ratio = n.filt_bw/true_filt_avg[n.node_class()]
                n.sbw_ratio = n.strm_bw/true_strm_avg[n.node_class()]

        **n.ratio**::

            These averages are used to produce ratios for each node by dividing the  # NOQA
            measured value for that node by the network average.

        ::

            # Choose the larger between sbw and fbw
              if n.sbw_ratio > n.fbw_ratio:
                n.ratio = n.sbw_ratio
              else:
                n.ratio = n.fbw_ratio

        **desc_bw**:

        It is the ``observed bandwidth`` in the descriptor, NOT the ``average
        bandwidth``::

            return Router(ns.idhex, ns.nickname, bw_observed, dead, exitpolicy,
            ns.flags, ip, version, os, uptime, published, contact, rate_limited,  # NOQA
            ns.orhash, ns.bandwidth, extra_info_digest, ns.unmeasured)
            self.desc_bw = max(bw,1) # Avoid div by 0

        **new_bw**::

            These ratios are then multiplied by the most recent observed descriptor  # NOQA
            bandwidth we have available for each node, to produce a new value for  # NOQA
            the network status consensus process.

        ::

            n.new_bw = n.desc_bw*n.ratio

        The descriptor observed bandwidth is multiplied by the ratio.

        **Limit the bandwidth to a maximum**::

            NODE_CAP = 0.05

        ::

            if n.new_bw > tot_net_bw*NODE_CAP:
              plog("INFO", "Clipping extremely fast "+n.node_class()+" node "+n.idhex+"="+n.nick+  # NOQA
                   " at "+str(100*NODE_CAP)+"% of network capacity ("+
                   str(n.new_bw)+"->"+str(int(tot_net_bw*NODE_CAP))+") "+
                   " pid_error="+str(n.pid_error)+
                   " pid_error_sum="+str(n.pid_error_sum))
              n.new_bw = int(tot_net_bw*NODE_CAP)

        However, tot_net_bw does not seems to be updated when not using pid.
        This clipping would make faster relays to all have the same value.

        All of that can be expressed as:

        .. math::
768

769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
           bwn_i =& min\\left(bwnew_i,
                      \\sum_{i=1}^{n}bwnew_i \\times 0.05\\right) \\

                 &= min\\left(
                      \\left(bwobs_i \\times r_i\\right),
                        \\sum_{i=1}^{n}\\left(bwobs_i \\times r_i\\right)
                        \\times 0.05\\right)\\

                 &= min\\left(
                      \\left(bwobs_i \\times max\\left(rf_i, rs_i\\right)\\right),
                        \\sum_{i=1}^{n}\\left(bwobs_i \\times
                          max\\left(rf_i, rs_i\\right)\\right) \\times 0.05\\right)\\

                 &= min\\left(
                      \\left(bwobs_i \\times max\\left(\\frac{bwfilt_i}{bwfilt},
                          \\frac{bw_i}{bwstrm}\\right)\\right),
                        \\sum_{i=1}^{n}\\left(bwobs_i \\times
                          max\\left(\\frac{bwfilt_i}{bwfilt},
                            \\frac{bw_i}{bwstrm}\\right)\\right) \\times 0.05\\right)
juga  's avatar
juga committed
788
789
790
791
792

        """
        log.info("Calculating relays' bandwidth using Torflow method.")
        bw_lines_tf = copy.deepcopy(bw_lines)
        # mean (Torflow's strm_avg)
793
        mu = mean([l.bw_mean for l in bw_lines])
juga  's avatar
juga committed
794
        # filtered mean (Torflow's filt_avg)
795
        muf = mean([max(l.bw_mean, mu) for l in bw_lines])
juga  's avatar
juga committed
796
        # bw sum (Torflow's tot_net_bw or tot_sbw)
797
        sum_bw = sum([l.bw_mean for l in bw_lines])
juga  's avatar
juga committed
798
799
        # Torflow's clipping
        hlimit = sum_bw * TORFLOW_BW_MARGIN
juga  's avatar
juga committed
800
801
802
        log.debug('sum %s', sum_bw)
        log.debug('mu %s', mu)
        log.debug('muf %s', muf)
juga  's avatar
juga committed
803
        log.debug('hlimit %s', hlimit)
juga  's avatar
juga committed
804
        for l in bw_lines_tf:
805
806
807
808
            if desc_bw_obs_type == TORFLOW_OBS_LAST:
                desc_bw_obs = l.desc_bw_obs_last
            elif desc_bw_obs_type == TORFLOW_OBS_MEAN:
                desc_bw_obs = l.desc_bw_obs_mean
juga  's avatar
juga committed
809
            # just applying the formula above:
810
811
            bw_new = kb_round_x_sig_dig(
                max(
812
                    l.bw_mean / mu,  # ratio
813
                    max(l.bw_mean, mu) / muf  # ratio filtered
814
                    ) * desc_bw_obs, \
815
                digits=num_round_dig)  # convert to KB
juga  's avatar
juga committed
816
817
818
819
820
            # Cap maximum bw
            if cap is not None:
                bw_new = min(hlimit, bw_new)
            # remove decimals and avoid 0
            l.bw = max(round(bw_new), 1)
juga  's avatar
juga committed
821
        return sorted(bw_lines_tf, key=lambda x: x.bw, reverse=reverse)
822

823
    @staticmethod
juga  's avatar
juga committed
824
    def read_number_consensus_relays(consensus_path):
825
826
827
828
829
        """Read the number of relays in the Network from the cached consensus
        file."""
        num = None
        try:
            num = len(list(parse_file(consensus_path)))
830
        except (FileNotFoundError, AttributeError):
831
832
833
834
835
836
            log.info("It is not possible to obtain statistics about the "
                     "percentage of measured relays because the cached "
                     "consensus file is not found.")
        log.debug("Number of relays in the network %s", num)
        return num

837
    @staticmethod
juga  's avatar
juga committed
838
    def measured_progress_stats(bw_lines, number_consensus_relays,
839
                                min_perc_reached_before):
840
841
842
843
844
        """ Statistics about measurements progress,
        to be included in the header.

        :param list bw_lines: the bw_lines after scaling and applying filters.
        :param str consensus_path: the path to the cached consensus file.
845
        :param str state_fpath: the path to the state file
846
847
848
849
850
851
852
853
854
        :returns dict, bool: Statistics about the progress made with
            measurements and whether the percentage of measured relays has been
            reached.

        """
        # cached-consensus should be updated every time that scanner get the
        # network status or descriptors?
        # It will not be updated to the last consensus, but the list of
        # measured relays is not either.
juga  's avatar
juga committed
855
        assert isinstance(number_consensus_relays, int)
856
857
        assert isinstance(bw_lines, list)
        statsd = {}
juga  's avatar
juga committed
858
859
860
861
862
863
864
865
866
        statsd['number_eligible_relays'] = len(bw_lines)
        statsd['number_consensus_relays'] = number_consensus_relays
        statsd['minimum_number_eligible_relays'] = round(
            statsd['number_consensus_relays'] * MIN_REPORT / 100)
        statsd['percent_eligible_relays'] = round(
            len(bw_lines) * 100 / statsd['number_consensus_relays'])
        statsd['minimum_percent_eligible_relays'] = MIN_REPORT
        if statsd['number_eligible_relays'] < \
                statsd['minimum_number_eligible_relays']:
867
            # if min percent was was reached before, warn
868
869
870
871
            # otherwise, debug
            if min_perc_reached_before is not None:
                log.warning('The percentage of the measured relays is less '
                            'than the %s%% of the relays in the network (%s).',
juga  's avatar
juga committed
872
                            MIN_REPORT, statsd['number_consensus_relays'])
873
874
875
            else:
                log.info('The percentage of the measured relays is less '
                         'than the %s%% of the relays in the network (%s).',
juga  's avatar
juga committed
876
                         MIN_REPORT, statsd['number_consensus_relays'])
877
878
879
880
881
            return statsd, False
        return statsd, True

    @property
    def is_min_perc(self):
juga  's avatar
juga committed
882
883
        if getattr(self.header, 'number_eligible_relays', 0) \
                < getattr(self.header, 'minimum_number_eligible_relays', 0):
884
885
886
            return False
        return True

juga  's avatar
juga committed
887
    @property
juga  's avatar
juga committed
888
889
    def sum_bw(self):
        return sum([l.bw for l in self.bw_lines])
juga  's avatar
juga committed
890
891

    @property
juga  's avatar
juga committed
892
    def num(self):
juga  's avatar
juga committed
893
894
895
        return len(self.bw_lines)

    @property
juga  's avatar
juga committed
896
897
898
899
900
901
    def mean_bw(self):
        return mean([l.bw for l in self.bw_lines])

    @property
    def median_bw(self):
        return median([l.bw for l in self.bw_lines])
juga  's avatar
juga committed
902

juga  's avatar
juga committed
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
    @property
    def max_bw(self):
        return max([l.bw for l in self.bw_lines])

    @property
    def min_bw(self):
        return min([l.bw for l in self.bw_lines])

    @property
    def info_stats(self):
        if not self.bw_lines:
            return
        [log.info(': '.join([attr, str(getattr(self, attr))])) for attr in
         ['sum_bw', 'mean_bw', 'median_bw', 'num',
          'max_bw', 'min_bw']]

juga  's avatar
juga committed
919
920
    def update_progress(self, bw_lines, header, number_consensus_relays,
                        state):
921
        min_perc_reached_before = state.get('min_perc_reached')
juga  's avatar
juga committed
922
        if number_consensus_relays is not None:
923
            statsd, success = self.measured_progress_stats(
juga  's avatar
juga committed
924
                bw_lines, number_consensus_relays, min_perc_reached_before)
925
926
            # add statistics about progress always
            header.add_stats(**statsd)
927
928
929
930
931
932
933
            if not success:
                bw_lines = []
                state['min_perc_reached'] = None
            else:
                state['min_perc_reached'] = now_isodt_str()
        return bw_lines

juga  's avatar
juga committed
934
935
936
937
938
939
940
941
942
943
    def bw_line_for_node_id(self, node_id):
        """Returns the bandwidth line for a given node fingerprint.

        Used to combine data when plotting.
        """
        bwl = [l for l in self.bw_lines if l.node_id == node_id]
        if bwl:
            return bwl[0]
        return None

944
945
946
947
948
949
950
951
952
    def to_plt(self, attrs=['bw'], sorted_by=None):
        """Return bandwidth data in a format useful for matplotlib.

        Used from external tool to plot.
        """
        x = [i for i in range(0, self.num)]
        ys = [[getattr(l, k) for l in self.bw_lines] for k in attrs]
        return x, ys, attrs

juga  's avatar
juga committed
953
954
955
956
957
    def write(self, output):
        if output == '/dev/stdout':
            log.info("Writing to stdout is not supported.")
            return
        log.info('Writing v3bw file to %s', output)
958
959
        # To avoid inconsistent reads, the bandwidth data is written to an
        # archive path, then atomically symlinked to 'latest.v3bw'
juga  's avatar
juga committed
960
961
        out_dir = os.path.dirname(output)
        out_link = os.path.join(out_dir, 'latest.v3bw')
962
        out_link_tmp = out_link + '.tmp'
juga  's avatar
juga committed
963
964
965
966
967
968
        with DirectoryLock(out_dir):
            with open(output, 'wt') as fd:
                fd.write(str(self.header))
                for line in self.bw_lines:
                    fd.write(str(line))
            output_basename = os.path.basename(output)
969
970
971
972
973
974
975
976
977
978
            # To atomically symlink a file, we need to create a temporary link,
            # then rename it to the final link name. (POSIX guarantees that
            # rename is atomic.)
            log.debug('Creating symlink {} -> {}.'
                      .format(out_link_tmp, output_basename))
            os.symlink(output_basename, out_link_tmp)
            log.debug('Renaming symlink {} -> {} to {} -> {}.'
                      .format(out_link_tmp, output_basename,
                              out_link, output_basename))
            os.rename(out_link_tmp, out_link)