Skip to content
GitLab
Menu
Projects
Groups
Snippets
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
Menu
Open sidebar
The Tor Project
Network Health
Metrics
Onionperf
Commits
e5f15ea4
Commit
e5f15ea4
authored
May 05, 2020
by
Karsten Loesing
Browse files
Port OnionPerf to Python 3.
Fixes
#29367
.
parent
c8763fc0
Changes
11
Hide whitespace changes
Inline
Side-by-side
onionperf/analysis.py
View file @
e5f15ea4
...
...
@@ -17,7 +17,7 @@ from stem.response.events import CircuitEvent, CircMinorEvent, StreamEvent, Band
from
stem.response
import
ControlMessage
,
convert
# onionperf imports
import
util
from
.
import
util
ERRORS
=
{
'AUTH'
:
'TGEN/AUTH'
,
'READ'
:
'TGEN/READ'
,
...
...
@@ -286,7 +286,7 @@ class Analysis(object):
output
.
write
(
"@type torperf 1.1
\r\n
"
)
output_str
=
' '
.
join
(
"{0}={1}"
.
format
(
k
,
d
[
k
])
for
k
in
sorted
(
d
.
keys
())
if
d
[
k
]
is
not
None
).
strip
()
output
.
write
(
"{0}
\r\n
"
.
format
(
output_str
))
except
KeyError
,
e
:
except
KeyError
as
e
:
logging
.
warning
(
"KeyError while exporting torperf file, missing key '{0}', skipping transfer '{1}'"
.
format
(
str
(
e
),
xfer_db
[
'transfer_id'
]))
continue
...
...
@@ -431,8 +431,7 @@ class Transfer(object):
d
[
'elapsed_seconds'
][
'payload_progress'
]
=
{
decile
:
self
.
payload_progress
[
decile
]
-
e
.
unix_ts_start
for
decile
in
self
.
payload_progress
if
self
.
payload_progress
[
decile
]
is
not
None
}
return
d
class
Parser
(
object
):
__metaclass__
=
ABCMeta
class
Parser
(
object
,
metaclass
=
ABCMeta
):
@
abstractmethod
def
parse
(
self
,
source
,
do_simple
):
pass
...
...
@@ -837,7 +836,6 @@ class TorCtlParser(Parser):
except
:
continue
source
.
close
()
print
len
(
self
.
streams
),
len
(
self
.
circuits
)
def
get_data
(
self
):
return
{
'circuits'
:
self
.
circuits
,
'circuits_summary'
:
self
.
circuits_summary
,
...
...
onionperf/docs/conf.py
View file @
e5f15ea4
...
...
@@ -19,14 +19,14 @@ sys.path.insert(0, os.path.abspath('..'))
# -- Project information -----------------------------------------------------
project
=
u
'onionperf'
copyright
=
u
'2019, Ana Custura'
author
=
u
'Ana Custura'
project
=
'onionperf'
copyright
=
'2019, Ana Custura'
author
=
'Ana Custura'
# The short X.Y version
version
=
u
''
version
=
''
# The full version, including alpha/beta/rc tags
release
=
u
''
release
=
''
# -- General configuration ---------------------------------------------------
...
...
@@ -66,7 +66,7 @@ language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns
=
[
u
'_build'
,
'Thumbs.db'
,
'.DS_Store'
]
exclude_patterns
=
[
'_build'
,
'Thumbs.db'
,
'.DS_Store'
]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style
=
'sphinx'
...
...
@@ -131,8 +131,8 @@ latex_elements = {
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents
=
[
(
master_doc
,
'onionperf.tex'
,
u
'onionperf Documentation'
,
u
'Ana Custura'
,
'manual'
),
(
master_doc
,
'onionperf.tex'
,
'onionperf Documentation'
,
'Ana Custura'
,
'manual'
),
]
...
...
@@ -141,7 +141,7 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages
=
[
(
master_doc
,
'onionperf'
,
u
'onionperf Documentation'
,
(
master_doc
,
'onionperf'
,
'onionperf Documentation'
,
[
author
],
1
)
]
...
...
@@ -152,7 +152,7 @@ man_pages = [
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents
=
[
(
master_doc
,
'onionperf'
,
u
'onionperf Documentation'
,
(
master_doc
,
'onionperf'
,
'onionperf Documentation'
,
author
,
'onionperf'
,
'One line description of project.'
,
'Miscellaneous'
),
]
...
...
onionperf/measurement.py
View file @
e5f15ea4
...
...
@@ -4,7 +4,7 @@
See LICENSE for licensing information
'''
import
os
,
traceback
,
subprocess
,
threading
,
Q
ueue
,
logging
,
time
,
datetime
,
re
,
shlex
import
os
,
traceback
,
subprocess
,
threading
,
q
ueue
,
logging
,
time
,
datetime
,
re
,
shlex
from
lxml
import
etree
# stem imports
...
...
@@ -14,7 +14,7 @@ from stem.version import Version, Requirement, get_system_tor_version
from
stem
import
__version__
as
stem_version
# onionperf imports
import
analysis
,
monitor
,
model
,
util
from
.
import
analysis
,
monitor
,
model
,
util
def
generate_docroot_index
(
docroot_path
):
root
=
etree
.
Element
(
"files"
)
...
...
@@ -22,7 +22,7 @@ def generate_docroot_index(docroot_path):
for
filename
in
filepaths
:
e
=
etree
.
SubElement
(
root
,
"file"
)
e
.
set
(
"name"
,
filename
)
with
open
(
"{0}/index.xml"
.
format
(
docroot_path
),
'w
b
'
)
as
f
:
print
>>
f
,
etree
.
tostring
(
root
,
pretty_print
=
True
,
xml_declaration
=
True
)
with
open
(
"{0}/index.xml"
.
format
(
docroot_path
),
'w
t
'
)
as
f
:
print
(
etree
.
tostring
(
root
,
pretty_print
=
True
,
xml_declaration
=
True
)
,
file
=
f
)
def
readline_thread_task
(
instream
,
q
):
# wait for lines from stdout until the EOF
...
...
@@ -49,7 +49,8 @@ def watchdog_thread_task(cmd, cwd, writable, done_ev, send_stdin, ready_search_s
# wait for a string to appear in stdout if requested
if
ready_search_str
is
not
None
:
boot_re
=
re
.
compile
(
ready_search_str
)
for
line
in
iter
(
subp
.
stdout
.
readline
,
b
''
):
for
bytes
in
iter
(
subp
.
stdout
.
readline
,
b
''
):
line
=
bytes
.
decode
(
'utf-8'
)
writable
.
write
(
line
)
if
boot_re
.
search
(
line
):
break
# got it!
...
...
@@ -59,7 +60,7 @@ def watchdog_thread_task(cmd, cwd, writable, done_ev, send_stdin, ready_search_s
ready_ev
.
set
()
# a helper will block on stdout and return lines back to us in a queue
stdout_q
=
Q
ueue
.
Queue
()
stdout_q
=
q
ueue
.
Queue
()
t
=
threading
.
Thread
(
target
=
readline_thread_task
,
args
=
(
subp
.
stdout
,
stdout_q
))
t
.
start
()
...
...
@@ -67,9 +68,9 @@ def watchdog_thread_task(cmd, cwd, writable, done_ev, send_stdin, ready_search_s
# sure that the subprocess is still alive and the master doesn't want us to quit
while
subp
.
poll
()
is
None
and
done_ev
.
is_set
()
is
False
:
try
:
line
=
stdout_q
.
get
(
True
,
1
)
writable
.
write
(
line
)
except
Q
ueue
.
Empty
:
bytes
=
stdout_q
.
get
(
True
,
1
)
writable
.
write
(
bytes
.
decode
(
'utf-8'
)
)
except
q
ueue
.
Empty
:
# the queue is empty and the get() timed out, recheck loop conditions
continue
...
...
@@ -100,7 +101,8 @@ def watchdog_thread_task(cmd, cwd, writable, done_ev, send_stdin, ready_search_s
# helper thread is done, make sure we drain the remaining lines from the stdout queue
while
not
stdout_q
.
empty
():
writable
.
write
(
stdout_q
.
get_nowait
())
bytes
=
stdout_q
.
get_nowait
()
writable
.
write
(
bytes
.
decode
(
'utf-8'
))
# if we have too many failures, exit the watchdog to propogate the error up
if
len
(
failure_times
)
>
10
:
break
...
...
onionperf/model.py
View file @
e5f15ea4
...
...
@@ -5,16 +5,14 @@
'''
from
abc
import
ABCMeta
,
abstractmethod
from
cStringIO
import
StringIO
from
io
import
StringIO
from
networkx
import
read_graphml
,
write_graphml
,
DiGraph
class
TGenModel
(
object
):
class
TGenModel
(
object
,
metaclass
=
ABCMeta
):
'''
an action-dependency graph model for Shadow's traffic generator
'''
__metaclass__
=
ABCMeta
def
dump_to_string
(
self
):
s
=
StringIO
()
write_graphml
(
self
.
graph
,
s
)
...
...
@@ -42,9 +40,7 @@ class TGenLoadableModel(TGenModel):
model_instance
=
cls
(
graph
)
return
model_instance
class
GeneratableTGenModel
(
TGenModel
):
__metaclass__
=
ABCMeta
class
GeneratableTGenModel
(
TGenModel
,
metaclass
=
ABCMeta
):
@
abstractmethod
def
generate
(
self
):
...
...
onionperf/onionperf
View file @
e5f15ea4
#!/usr/bin/env python
#!/usr/bin/env python
3
'''
OnionPerf
...
...
@@ -479,7 +479,7 @@ def visualize(args):
tor_viz
=
TorVisualization
()
for
(
path
,
label
)
in
args
.
datasets
:
nextformat
=
lfcycle
.
next
(
)
nextformat
=
next
(
lfcycle
)
anal
=
Analysis
.
load
(
filename
=
path
)
if
anal
is
not
None
:
...
...
onionperf/tests/test_reprocessing.py
View file @
e5f15ea4
...
...
@@ -45,7 +45,7 @@ def test_log_match_no_log_date():
def
test_log_match_with_filter_date
():
tgen_logs
=
reprocessing
.
collect_logs
(
DATA_DIR
,
'*tgen.log'
)
torctl_logs
=
reprocessing
.
collect_logs
(
DATA_DIR
,
'*torctl.log'
)
test_date
=
datetime
.
date
(
2019
,
0
1
,
10
)
test_date
=
datetime
.
date
(
2019
,
1
,
10
)
log_pairs
=
reprocessing
.
match
(
tgen_logs
,
torctl_logs
,
test_date
)
well_known_list
=
[(
DATA_DIR
+
'logs/onionperf_2019-01-10_23:59:59.tgen.log'
,
DATA_DIR
+
'logs/onionperf_2019-01-10_23:59:59.torctl.log'
,
datetime
.
datetime
(
2019
,
1
,
10
,
0
,
0
))]
assert_equals
(
log_pairs
,
well_known_list
)
...
...
@@ -53,7 +53,7 @@ def test_log_match_with_filter_date():
def
test_log_match_with_wrong_filter_date
():
tgen_logs
=
reprocessing
.
collect_logs
(
DATA_DIR
,
'*tgen.log'
)
torctl_logs
=
reprocessing
.
collect_logs
(
DATA_DIR
,
'*torctl.log'
)
test_date
=
datetime
.
date
(
2017
,
0
1
,
0
1
)
test_date
=
datetime
.
date
(
2017
,
1
,
1
)
log_pairs
=
reprocessing
.
match
(
tgen_logs
,
torctl_logs
,
test_date
)
well_known_list
=
[]
assert_equals
(
log_pairs
,
well_known_list
)
...
...
onionperf/tests/test_utils.py
View file @
e5f15ea4
...
...
@@ -95,7 +95,7 @@ def test_find_path_with_which():
"""
temp_file
=
tempfile
.
NamedTemporaryFile
()
os
.
chmod
(
temp_file
.
name
,
0775
)
os
.
chmod
(
temp_file
.
name
,
0
o
775
)
work_path
=
util
.
find_path
(
None
,
temp_file
.
name
,
tempfile
.
tempdir
)
assert_equals
(
work_path
,
temp_file
.
name
)
temp_file
.
close
()
...
...
@@ -252,7 +252,7 @@ def test_file_writable():
test_writable
.
write
(
"onionperf"
)
test_writable
.
close
()
expected_checksum
=
"5001ed4ab25b52543946fa63da829d4eeab1bd254c89ffdad0877186e074b385"
with
open
(
temp_file
.
name
)
as
f
:
with
open
(
temp_file
.
name
,
'rb'
)
as
f
:
file_bytes
=
f
.
read
()
file_checksum
=
hashlib
.
sha256
(
file_bytes
).
hexdigest
()
assert_equals
(
file_checksum
,
expected_checksum
)
...
...
@@ -270,8 +270,8 @@ def test_file_writable_compressed():
test_writable
=
util
.
FileWritable
(
temp_file
.
name
,
True
)
test_writable
.
write
(
"onionperf"
)
test_writable
.
close
()
expected_checksum
=
"
66a6256bc4b04529c7123fa9573d30de659ffaa0cce1cc9b189817c8bf30e813
"
with
open
(
temp_file
.
name
)
as
f
:
expected_checksum
=
"
3556b3bee6bb56d0a42676cbbf5784ebe4151fe65b0797f42260f93212e2df11
"
with
open
(
temp_file
.
name
,
'rb'
)
as
f
:
file_bytes
=
f
.
read
()
file_checksum
=
hashlib
.
sha256
(
file_bytes
).
hexdigest
()
assert_equals
(
file_checksum
,
expected_checksum
)
...
...
onionperf/util.py
View file @
e5f15ea4
...
...
@@ -4,10 +4,9 @@
See LICENSE for licensing information
'''
import
sys
,
os
,
socket
,
logging
,
random
,
re
,
shutil
,
datetime
,
urllib
,
gzip
from
subprocess
import
Popen
,
PIPE
,
STDOUT
import
sys
,
os
,
socket
,
logging
,
random
,
re
,
shutil
,
datetime
,
urllib
.
request
,
urllib
.
parse
,
urllib
.
error
,
gzip
,
lzma
from
threading
import
Lock
from
cStringIO
import
StringIO
from
io
import
StringIO
from
abc
import
ABCMeta
,
abstractmethod
LINEFORMATS
=
"k-,r-,b-,g-,c-,m-,y-,k--,r--,b--,g--,c--,m--,y--,k:,r:,b:,g:,c:,m:,y:,k-.,r-.,b-.,g-.,c-.,m-.,y-."
...
...
@@ -156,7 +155,7 @@ def get_ip_address():
"""
ip_address
=
None
try
:
data
=
urllib
.
urlopen
(
'https://check.torproject.org/'
).
read
()
data
=
urllib
.
request
.
urlopen
(
'https://check.torproject.org/'
).
read
()
.
decode
(
'utf-8'
)
ip_address
=
find_ip_address_url
(
data
)
if
not
ip_address
:
logging
.
error
(
...
...
@@ -195,18 +194,14 @@ class DataSource(object):
self
.
filename
=
filename
self
.
compress
=
compress
self
.
source
=
None
self
.
xzproc
=
None
def
__iter__
(
self
):
if
self
.
source
is
None
:
self
.
open
()
return
self
.
source
def
next
(
self
):
return
self
.
__next__
()
def
__next__
(
self
):
# python 3
return
self
.
source
.
next
()
if
self
.
source
is
not
None
else
None
def
__next__
(
self
):
return
next
(
self
.
source
)
if
self
.
source
is
not
None
else
None
def
open
(
self
):
if
self
.
source
is
None
:
...
...
@@ -214,14 +209,12 @@ class DataSource(object):
self
.
source
=
sys
.
stdin
elif
self
.
compress
or
self
.
filename
.
endswith
(
".xz"
):
self
.
compress
=
True
cmd
=
"xz --decompress --stdout {0}"
.
format
(
self
.
filename
)
xzproc
=
Popen
(
cmd
.
split
(),
stdout
=
PIPE
)
self
.
source
=
xzproc
.
stdout
self
.
source
=
lzma
.
open
(
self
.
filename
,
mode
=
'rt'
)
elif
self
.
filename
.
endswith
(
".gz"
):
self
.
compress
=
True
self
.
source
=
gzip
.
open
(
self
.
filename
,
'r
b
'
)
self
.
source
=
gzip
.
open
(
self
.
filename
,
'r
t
'
)
else
:
self
.
source
=
open
(
self
.
filename
,
'r'
)
self
.
source
=
open
(
self
.
filename
,
'r
t
'
)
def
get_file_handle
(
self
):
if
self
.
source
is
None
:
...
...
@@ -230,12 +223,9 @@ class DataSource(object):
def
close
(
self
):
if
self
.
source
is
not
None
:
self
.
source
.
close
()
if
self
.
xzproc
is
not
None
:
self
.
xzproc
.
wait
()
class
Writable
(
object
):
__metaclass__
=
ABCMeta
class
Writable
(
object
,
metaclass
=
ABCMeta
):
@
abstractmethod
def
write
(
self
,
msg
):
pass
...
...
@@ -251,8 +241,6 @@ class FileWritable(Writable):
self
.
do_compress
=
do_compress
self
.
do_truncate
=
do_truncate
self
.
file
=
None
self
.
xzproc
=
None
self
.
ddproc
=
None
self
.
lock
=
Lock
()
if
self
.
filename
==
'-'
:
...
...
@@ -275,14 +263,9 @@ class FileWritable(Writable):
def
__open_nolock
(
self
):
if
self
.
do_compress
:
self
.
xzproc
=
Popen
(
"xz --threads=3 -"
.
split
(),
stdin
=
PIPE
,
stdout
=
PIPE
)
dd_cmd
=
"dd of={0}"
.
format
(
self
.
filename
)
# # note: its probably not a good idea to append to finalized compressed files
# if not self.do_truncate: dd_cmd += " oflag=append conv=notrunc"
self
.
ddproc
=
Popen
(
dd_cmd
.
split
(),
stdin
=
self
.
xzproc
.
stdout
,
stdout
=
open
(
os
.
devnull
,
'w'
),
stderr
=
STDOUT
)
self
.
file
=
self
.
xzproc
.
stdin
self
.
file
=
lzma
.
open
(
self
.
filename
,
mode
=
'wt'
)
else
:
self
.
file
=
open
(
self
.
filename
,
'w'
if
self
.
do_truncate
else
'a'
,
0
)
self
.
file
=
open
(
self
.
filename
,
'w
t
'
if
self
.
do_truncate
else
'a
t
'
,
1
)
def
close
(
self
):
self
.
lock
.
acquire
()
...
...
@@ -293,12 +276,6 @@ class FileWritable(Writable):
if
self
.
file
is
not
None
:
self
.
file
.
close
()
self
.
file
=
None
if
self
.
xzproc
is
not
None
:
self
.
xzproc
.
wait
()
self
.
xzproc
=
None
if
self
.
ddproc
is
not
None
:
self
.
ddproc
.
wait
()
self
.
ddproc
=
None
def
rotate_file
(
self
,
filename_datetime
=
datetime
.
datetime
.
now
()):
self
.
lock
.
acquire
()
...
...
@@ -316,7 +293,7 @@ class FileWritable(Writable):
self
.
__close_nolock
()
with
open
(
self
.
filename
,
'rb'
)
as
f_in
,
gzip
.
open
(
new_filename
,
'wb'
)
as
f_out
:
shutil
.
copyfileobj
(
f_in
,
f_out
)
with
open
(
self
.
filename
,
'a'
)
as
f_in
:
with
open
(
self
.
filename
,
'a
b
'
)
as
f_in
:
f_in
.
truncate
(
0
)
self
.
__open_nolock
()
...
...
onionperf/visualization.py
View file @
e5f15ea4
...
...
@@ -46,9 +46,7 @@ pylab.rcParams.update({
})
'''
class
Visualization
(
object
):
__metaclass__
=
ABCMeta
class
Visualization
(
object
,
metaclass
=
ABCMeta
):
def
__init__
(
self
):
self
.
datasets
=
[]
...
...
@@ -349,7 +347,7 @@ class TGenVisualization(Visualization):
if
client
not
in
dls
[
bytes
]:
dls
[
bytes
][
client
]
=
0
for
sec
in
d
[
"time_to_last_byte"
][
b
]:
dls
[
bytes
][
client
]
+=
len
(
d
[
"time_to_last_byte"
][
b
][
sec
])
for
bytes
in
dls
:
x
,
y
=
getcdf
(
dls
[
bytes
].
values
(),
shownpercentile
=
1.0
)
x
,
y
=
getcdf
(
list
(
dls
[
bytes
].
values
()
)
,
shownpercentile
=
1.0
)
pylab
.
figure
(
figs
[
bytes
].
number
)
pylab
.
plot
(
x
,
y
,
lineformat
,
label
=
label
)
...
...
@@ -555,7 +553,7 @@ def getcdf(data, shownpercentile=0.99, maxpoints=10000.0):
frac
=
cf
(
data
)
k
=
len
(
data
)
/
maxpoints
x
,
y
,
lasty
=
[],
[],
0.0
for
i
in
x
range
(
int
(
round
(
len
(
data
)
*
shownpercentile
))):
for
i
in
range
(
int
(
round
(
len
(
data
)
*
shownpercentile
))):
if
i
%
k
>
1.0
:
continue
assert
not
numpy
.
isnan
(
data
[
i
])
x
.
append
(
data
[
i
])
...
...
run_tests.sh
View file @
e5f15ea4
#!/bin/sh
PYTHONPATH
=
.
python
-m
nose
--with-coverage
--cover-package
=
onionperf
PYTHONPATH
=
.
python
3
-m
nose
--with-coverage
--cover-package
=
onionperf
setup.py
View file @
e5f15ea4
#!/usr/bin/env python
#!/usr/bin/env python
3
from
distutils.core
import
setup
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment