run update
This commit is contained in:
parent
11af4540c5
commit
6806bebb7c
607 changed files with 52543 additions and 31832 deletions
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright 2012-2015, Damian Johnson and The Tor Project
|
||||
# Copyright 2012-2018, Damian Johnson and The Tor Project
|
||||
# See LICENSE for licensing information
|
||||
|
||||
"""
|
||||
|
|
@ -9,6 +9,8 @@ Package for parsing and processing descriptor data.
|
|||
::
|
||||
|
||||
parse_file - Parses the descriptors in a file.
|
||||
create - Creates a new custom descriptor.
|
||||
create_signing_key - Cretes a signing key that can be used for creating descriptors.
|
||||
|
||||
Descriptor - Common parent for all descriptor file types.
|
||||
|- get_path - location of the descriptor on disk if it came from a file
|
||||
|
|
@ -27,6 +29,24 @@ Package for parsing and processing descriptor data.
|
|||
and upfront runtime. However, if read time and memory aren't a concern then
|
||||
**DOCUMENT** can provide you with a fully populated document.
|
||||
|
||||
Handlers don't change the fact that most methods that provide
|
||||
descriptors return an iterator. In the case of **DOCUMENT** and
|
||||
**BARE_DOCUMENT** that iterator would have just a single item -
|
||||
the document itself.
|
||||
|
||||
Simple way to handle this is to call **next()** to get the iterator's one and
|
||||
only value...
|
||||
|
||||
::
|
||||
|
||||
import stem.descriptor.remote
|
||||
from stem.descriptor import DocumentHandler
|
||||
|
||||
consensus = next(stem.descriptor.remote.get_consensus(
|
||||
document_handler = DocumentHandler.BARE_DOCUMENT,
|
||||
)
|
||||
|
||||
|
||||
=================== ===========
|
||||
DocumentHandler Description
|
||||
=================== ===========
|
||||
|
|
@ -36,6 +56,29 @@ Package for parsing and processing descriptor data.
|
|||
=================== ===========
|
||||
"""
|
||||
|
||||
import base64
|
||||
import codecs
|
||||
import collections
|
||||
import copy
|
||||
import hashlib
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import string
|
||||
import tarfile
|
||||
|
||||
import stem.prereq
|
||||
import stem.util
|
||||
import stem.util.enum
|
||||
import stem.util.str_tools
|
||||
import stem.util.system
|
||||
|
||||
try:
|
||||
# added in python 2.7
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from stem.util.ordereddict import OrderedDict
|
||||
|
||||
__all__ = [
|
||||
'export',
|
||||
'reader',
|
||||
|
|
@ -50,26 +93,12 @@ __all__ = [
|
|||
'Descriptor',
|
||||
]
|
||||
|
||||
import base64
|
||||
import codecs
|
||||
import copy
|
||||
import hashlib
|
||||
import os
|
||||
import re
|
||||
import tarfile
|
||||
UNSEEKABLE_MSG = """\
|
||||
File object isn't seekable. Try wrapping it with a BytesIO instead...
|
||||
|
||||
import stem.prereq
|
||||
import stem.util.enum
|
||||
import stem.util.str_tools
|
||||
import stem.util.system
|
||||
|
||||
from stem import str_type
|
||||
|
||||
try:
|
||||
# added in python 2.7
|
||||
from collections import OrderedDict
|
||||
except ImportError:
|
||||
from stem.util.ordereddict import OrderedDict
|
||||
content = my_file.read()
|
||||
parsed_descriptors = stem.descriptor.parse_file(io.BytesIO(content))
|
||||
"""
|
||||
|
||||
KEYWORD_CHAR = 'a-zA-Z0-9-'
|
||||
WHITESPACE = ' \t'
|
||||
|
|
@ -77,6 +106,17 @@ KEYWORD_LINE = re.compile('^([%s]+)(?:[%s]+(.*))?$' % (KEYWORD_CHAR, WHITESPACE)
|
|||
SPECIFIC_KEYWORD_LINE = '^(%%s)(?:[%s]+(.*))?$' % WHITESPACE
|
||||
PGP_BLOCK_START = re.compile('^-----BEGIN ([%s%s]+)-----$' % (KEYWORD_CHAR, WHITESPACE))
|
||||
PGP_BLOCK_END = '-----END %s-----'
|
||||
EMPTY_COLLECTION = ([], {}, set())
|
||||
|
||||
DIGEST_TYPE_INFO = b'\x00\x01'
|
||||
DIGEST_PADDING = b'\xFF'
|
||||
DIGEST_SEPARATOR = b'\x00'
|
||||
|
||||
CRYPTO_BLOB = """
|
||||
MIGJAoGBAJv5IIWQ+WDWYUdyA/0L8qbIkEVH/cwryZWoIaPAzINfrw1WfNZGtBmg
|
||||
skFtXhOHHqTRN4GPPrZsAIUOQGzQtGb66IQgT4tO/pj+P6QmSCCdTfhvGfgTCsC+
|
||||
WPi4Fl2qryzTb3QO5r5x7T8OsG2IBUET1bLQzmtbC560SYR49IvVAgMBAAE=
|
||||
"""
|
||||
|
||||
DocumentHandler = stem.util.enum.UppercaseEnum(
|
||||
'ENTRIES',
|
||||
|
|
@ -85,7 +125,19 @@ DocumentHandler = stem.util.enum.UppercaseEnum(
|
|||
)
|
||||
|
||||
|
||||
def parse_file(descriptor_file, descriptor_type = None, validate = False, document_handler = DocumentHandler.ENTRIES, **kwargs):
|
||||
class SigningKey(collections.namedtuple('SigningKey', ['private', 'public', 'public_digest'])):
|
||||
"""
|
||||
Key used by relays to sign their server and extrainfo descriptors.
|
||||
|
||||
.. versionadded:: 1.6.0
|
||||
|
||||
:var cryptography.hazmat.backends.openssl.rsa._RSAPrivateKey private: private key
|
||||
:var cryptography.hazmat.backends.openssl.rsa._RSAPublicKey public: public key
|
||||
:var bytes public_digest: block that can be used for the a server descrptor's 'signing-key' field
|
||||
"""
|
||||
|
||||
|
||||
def parse_file(descriptor_file, descriptor_type = None, validate = False, document_handler = DocumentHandler.ENTRIES, normalize_newlines = None, **kwargs):
|
||||
"""
|
||||
Simple function to read the descriptor contents from a file, providing an
|
||||
iterator for its :class:`~stem.descriptor.__init__.Descriptor` contents.
|
||||
|
|
@ -94,7 +146,7 @@ def parse_file(descriptor_file, descriptor_type = None, validate = False, docume
|
|||
tries to determine the descriptor type based on the following...
|
||||
|
||||
* The @type annotation on the first line. These are generally only found in
|
||||
the `CollecTor archives <https://collector.torproject.org/formats.html#relay-descriptors>`_.
|
||||
the `CollecTor archives <https://metrics.torproject.org/collector.html#relay-descriptors>`_.
|
||||
|
||||
* The filename if it matches something from tor's data directory. For
|
||||
instance, tor's 'cached-descriptors' contains server descriptors.
|
||||
|
|
@ -138,11 +190,13 @@ def parse_file(descriptor_file, descriptor_type = None, validate = False, docume
|
|||
my_descriptor_file = open(descriptor_path, 'rb')
|
||||
|
||||
:param str,file,tarfile descriptor_file: path or opened file with the descriptor contents
|
||||
:param str descriptor_type: `descriptor type <https://collector.torproject.org/formats.html>`_, this is guessed if not provided
|
||||
:param str descriptor_type: `descriptor type <https://metrics.torproject.org/collector.html#data-formats>`_, this is guessed if not provided
|
||||
:param bool validate: checks the validity of the descriptor's content if
|
||||
**True**, skips these checks otherwise
|
||||
:param stem.descriptor.__init__.DocumentHandler document_handler: method in
|
||||
which to parse the :class:`~stem.descriptor.networkstatus.NetworkStatusDocument`
|
||||
:param bool normalize_newlines: converts windows newlines (CRLF), this is the
|
||||
default when reading data directories on windows
|
||||
:param dict kwargs: additional arguments for the descriptor constructor
|
||||
|
||||
:returns: iterator for :class:`~stem.descriptor.__init__.Descriptor` instances in the file
|
||||
|
|
@ -157,7 +211,7 @@ def parse_file(descriptor_file, descriptor_type = None, validate = False, docume
|
|||
|
||||
handler = None
|
||||
|
||||
if isinstance(descriptor_file, (bytes, str_type)):
|
||||
if stem.util._is_str(descriptor_file):
|
||||
if stem.util.system.is_tarfile(descriptor_file):
|
||||
handler = _parse_file_for_tar_path
|
||||
else:
|
||||
|
|
@ -171,6 +225,16 @@ def parse_file(descriptor_file, descriptor_type = None, validate = False, docume
|
|||
|
||||
return
|
||||
|
||||
# Not all files are seekable. If unseekable then advising the user.
|
||||
#
|
||||
# Python 3.x adds an io.seekable() method, but not an option with python 2.x
|
||||
# so using an experimental call to tell() to determine this.
|
||||
|
||||
try:
|
||||
descriptor_file.tell()
|
||||
except IOError:
|
||||
raise IOError(UNSEEKABLE_MSG)
|
||||
|
||||
# The tor descriptor specifications do not provide a reliable method for
|
||||
# identifying a descriptor file's type and version so we need to guess
|
||||
# based on its filename. Metrics descriptors, however, can be identified
|
||||
|
|
@ -186,47 +250,48 @@ def parse_file(descriptor_file, descriptor_type = None, validate = False, docume
|
|||
|
||||
descriptor_path = getattr(descriptor_file, 'name', None)
|
||||
filename = '<undefined>' if descriptor_path is None else os.path.basename(descriptor_file.name)
|
||||
file_parser = None
|
||||
|
||||
if descriptor_type is not None:
|
||||
descriptor_type_match = re.match('^(\S+) (\d+).(\d+)$', descriptor_type)
|
||||
def parse(descriptor_file):
|
||||
if normalize_newlines:
|
||||
descriptor_file = NewlineNormalizer(descriptor_file)
|
||||
|
||||
if descriptor_type_match:
|
||||
desc_type, major_version, minor_version = descriptor_type_match.groups()
|
||||
file_parser = lambda f: _parse_metrics_file(desc_type, int(major_version), int(minor_version), f, validate, document_handler, **kwargs)
|
||||
if descriptor_type is not None:
|
||||
descriptor_type_match = re.match('^(\S+) (\d+).(\d+)$', descriptor_type)
|
||||
|
||||
if descriptor_type_match:
|
||||
desc_type, major_version, minor_version = descriptor_type_match.groups()
|
||||
return _parse_metrics_file(desc_type, int(major_version), int(minor_version), descriptor_file, validate, document_handler, **kwargs)
|
||||
else:
|
||||
raise ValueError("The descriptor_type must be of the form '<type> <major_version>.<minor_version>'")
|
||||
elif metrics_header_match:
|
||||
# Metrics descriptor handling
|
||||
|
||||
desc_type, major_version, minor_version = metrics_header_match.groups()
|
||||
return _parse_metrics_file(desc_type, int(major_version), int(minor_version), descriptor_file, validate, document_handler, **kwargs)
|
||||
else:
|
||||
raise ValueError("The descriptor_type must be of the form '<type> <major_version>.<minor_version>'")
|
||||
elif metrics_header_match:
|
||||
# Metrics descriptor handling
|
||||
# Cached descriptor handling. These contain multiple descriptors per file.
|
||||
|
||||
desc_type, major_version, minor_version = metrics_header_match.groups()
|
||||
file_parser = lambda f: _parse_metrics_file(desc_type, int(major_version), int(minor_version), f, validate, document_handler, **kwargs)
|
||||
else:
|
||||
# Cached descriptor handling. These contain multiple descriptors per file.
|
||||
if normalize_newlines is None and stem.util.system.is_windows():
|
||||
descriptor_file = NewlineNormalizer(descriptor_file)
|
||||
|
||||
if filename == 'cached-descriptors' or filename == 'cached-descriptors.new':
|
||||
file_parser = lambda f: stem.descriptor.server_descriptor._parse_file(f, validate = validate, **kwargs)
|
||||
elif filename == 'cached-extrainfo' or filename == 'cached-extrainfo.new':
|
||||
file_parser = lambda f: stem.descriptor.extrainfo_descriptor._parse_file(f, validate = validate, **kwargs)
|
||||
elif filename == 'cached-microdescs' or filename == 'cached-microdescs.new':
|
||||
file_parser = lambda f: stem.descriptor.microdescriptor._parse_file(f, validate = validate, **kwargs)
|
||||
elif filename == 'cached-consensus':
|
||||
file_parser = lambda f: stem.descriptor.networkstatus._parse_file(f, validate = validate, document_handler = document_handler, **kwargs)
|
||||
elif filename == 'cached-microdesc-consensus':
|
||||
file_parser = lambda f: stem.descriptor.networkstatus._parse_file(f, is_microdescriptor = True, validate = validate, document_handler = document_handler, **kwargs)
|
||||
if filename == 'cached-descriptors' or filename == 'cached-descriptors.new':
|
||||
return stem.descriptor.server_descriptor._parse_file(descriptor_file, validate = validate, **kwargs)
|
||||
elif filename == 'cached-extrainfo' or filename == 'cached-extrainfo.new':
|
||||
return stem.descriptor.extrainfo_descriptor._parse_file(descriptor_file, validate = validate, **kwargs)
|
||||
elif filename == 'cached-microdescs' or filename == 'cached-microdescs.new':
|
||||
return stem.descriptor.microdescriptor._parse_file(descriptor_file, validate = validate, **kwargs)
|
||||
elif filename == 'cached-consensus':
|
||||
return stem.descriptor.networkstatus._parse_file(descriptor_file, validate = validate, document_handler = document_handler, **kwargs)
|
||||
elif filename == 'cached-microdesc-consensus':
|
||||
return stem.descriptor.networkstatus._parse_file(descriptor_file, is_microdescriptor = True, validate = validate, document_handler = document_handler, **kwargs)
|
||||
else:
|
||||
raise TypeError("Unable to determine the descriptor's type. filename: '%s', first line: '%s'" % (filename, first_line))
|
||||
|
||||
if file_parser:
|
||||
for desc in file_parser(descriptor_file):
|
||||
if descriptor_path is not None:
|
||||
desc._set_path(os.path.abspath(descriptor_path))
|
||||
for desc in parse(descriptor_file):
|
||||
if descriptor_path is not None:
|
||||
desc._set_path(os.path.abspath(descriptor_path))
|
||||
|
||||
yield desc
|
||||
|
||||
return
|
||||
|
||||
# Not recognized as a descriptor file.
|
||||
|
||||
raise TypeError("Unable to determine the descriptor's type. filename: '%s', first line: '%s'" % (filename, first_line))
|
||||
yield desc
|
||||
|
||||
|
||||
def _parse_file_for_path(descriptor_file, *args, **kwargs):
|
||||
|
|
@ -253,6 +318,9 @@ def _parse_file_for_tarfile(descriptor_file, *args, **kwargs):
|
|||
if tar_entry.isfile():
|
||||
entry = descriptor_file.extractfile(tar_entry)
|
||||
|
||||
if tar_entry.size == 0:
|
||||
continue
|
||||
|
||||
try:
|
||||
for desc in parse_file(entry, *args, **kwargs):
|
||||
desc._set_archive_path(entry.name)
|
||||
|
|
@ -320,6 +388,78 @@ def _parse_metrics_file(descriptor_type, major_version, minor_version, descripto
|
|||
raise TypeError("Unrecognized metrics descriptor format. type: '%s', version: '%i.%i'" % (descriptor_type, major_version, minor_version))
|
||||
|
||||
|
||||
def _descriptor_content(attr = None, exclude = (), header_template = (), footer_template = ()):
|
||||
"""
|
||||
Constructs a minimal descriptor with the given attributes. The content we
|
||||
provide back is of the form...
|
||||
|
||||
* header_template (with matching attr filled in)
|
||||
* unused attr entries
|
||||
* footer_template (with matching attr filled in)
|
||||
|
||||
So for instance...
|
||||
|
||||
::
|
||||
|
||||
_descriptor_content(
|
||||
attr = {'nickname': 'caerSidi', 'contact': 'atagar'},
|
||||
header_template = (
|
||||
('nickname', 'foobar'),
|
||||
('fingerprint', '12345'),
|
||||
),
|
||||
)
|
||||
|
||||
... would result in...
|
||||
|
||||
::
|
||||
|
||||
nickname caerSidi
|
||||
fingerprint 12345
|
||||
contact atagar
|
||||
|
||||
:param dict attr: keyword/value mappings to be included in the descriptor
|
||||
:param list exclude: mandatory keywords to exclude from the descriptor
|
||||
:param tuple header_template: key/value pairs for mandatory fields before unrecognized content
|
||||
:param tuple footer_template: key/value pairs for mandatory fields after unrecognized content
|
||||
|
||||
:returns: bytes with the requested descriptor content
|
||||
"""
|
||||
|
||||
header_content, footer_content = [], []
|
||||
attr = {} if attr is None else OrderedDict(attr) # shallow copy since we're destructive
|
||||
|
||||
for content, template in ((header_content, header_template),
|
||||
(footer_content, footer_template)):
|
||||
for keyword, value in template:
|
||||
if keyword in exclude:
|
||||
continue
|
||||
|
||||
value = stem.util.str_tools._to_unicode(attr.pop(keyword, value))
|
||||
|
||||
if value is None:
|
||||
continue
|
||||
elif isinstance(value, (tuple, list)):
|
||||
for v in value:
|
||||
content.append('%s %s' % (keyword, v))
|
||||
elif value == '':
|
||||
content.append(keyword)
|
||||
elif value.startswith('\n'):
|
||||
# some values like crypto follow the line instead
|
||||
content.append('%s%s' % (keyword, value))
|
||||
else:
|
||||
content.append('%s %s' % (keyword, value))
|
||||
|
||||
remainder = []
|
||||
|
||||
for k, v in attr.items():
|
||||
if isinstance(v, (tuple, list)):
|
||||
remainder += ['%s %s' % (k, entry) for entry in v]
|
||||
else:
|
||||
remainder.append('%s %s' % (k, v))
|
||||
|
||||
return stem.util.str_tools._to_bytes('\n'.join(header_content + remainder + footer_content))
|
||||
|
||||
|
||||
def _value(line, entries):
|
||||
return entries[line][0][0]
|
||||
|
||||
|
|
@ -328,13 +468,18 @@ def _values(line, entries):
|
|||
return [entry[0] for entry in entries[line]]
|
||||
|
||||
|
||||
def _parse_simple_line(keyword, attribute):
|
||||
def _parse_simple_line(keyword, attribute, func = None):
|
||||
def _parse(descriptor, entries):
|
||||
setattr(descriptor, attribute, _value(keyword, entries))
|
||||
value = _value(keyword, entries)
|
||||
setattr(descriptor, attribute, func(value) if func else value)
|
||||
|
||||
return _parse
|
||||
|
||||
|
||||
def _parse_if_present(keyword, attribute):
|
||||
return lambda descriptor, entries: setattr(descriptor, attribute, keyword in entries)
|
||||
|
||||
|
||||
def _parse_bytes_line(keyword, attribute):
|
||||
def _parse(descriptor, entries):
|
||||
line_match = re.search(stem.util.str_tools._to_bytes('^(opt )?%s(?:[%s]+(.*))?$' % (keyword, WHITESPACE)), descriptor.get_bytes(), re.MULTILINE)
|
||||
|
|
@ -377,6 +522,37 @@ def _parse_forty_character_hex(keyword, attribute):
|
|||
return _parse
|
||||
|
||||
|
||||
def _parse_protocol_line(keyword, attribute):
|
||||
def _parse(descriptor, entries):
|
||||
# parses 'protocol' entries like: Cons=1-2 Desc=1-2 DirCache=1 HSDir=1
|
||||
|
||||
value = _value(keyword, entries)
|
||||
protocols = OrderedDict()
|
||||
|
||||
for k, v in _mappings_for(keyword, value):
|
||||
versions = []
|
||||
|
||||
if not v:
|
||||
continue
|
||||
|
||||
for entry in v.split(','):
|
||||
if '-' in entry:
|
||||
min_value, max_value = entry.split('-', 1)
|
||||
else:
|
||||
min_value = max_value = entry
|
||||
|
||||
if not min_value.isdigit() or not max_value.isdigit():
|
||||
raise ValueError('Protocol values should be a number or number range, but was: %s %s' % (keyword, value))
|
||||
|
||||
versions += range(int(min_value), int(max_value) + 1)
|
||||
|
||||
protocols[k] = versions
|
||||
|
||||
setattr(descriptor, attribute, protocols)
|
||||
|
||||
return _parse
|
||||
|
||||
|
||||
def _parse_key_block(keyword, attribute, expected_block_type, value_attribute = None):
|
||||
def _parse(descriptor, entries):
|
||||
value, block_type, block_contents = entries[keyword][0]
|
||||
|
|
@ -392,6 +568,48 @@ def _parse_key_block(keyword, attribute, expected_block_type, value_attribute =
|
|||
return _parse
|
||||
|
||||
|
||||
def _mappings_for(keyword, value, require_value = False, divider = ' '):
|
||||
"""
|
||||
Parses an attribute as a series of 'key=value' mappings. Unlike _parse_*
|
||||
functions this is a helper, returning the attribute value rather than setting
|
||||
a descriptor field. This way parsers can perform additional validations.
|
||||
|
||||
:param str keyword: descriptor field being parsed
|
||||
:param str value: 'attribute => values' mappings to parse
|
||||
:param str divider: separator between the key/value mappings
|
||||
:param bool require_value: validates that values are not empty
|
||||
|
||||
:returns: **generator** with the key/value of the map attribute
|
||||
|
||||
:raises: **ValueError** if descriptor content is invalid
|
||||
"""
|
||||
|
||||
if value is None:
|
||||
return # no descripoter value to process
|
||||
elif value == '':
|
||||
return # descriptor field was present, but blank
|
||||
|
||||
for entry in value.split(divider):
|
||||
if '=' not in entry:
|
||||
raise ValueError("'%s' should be a series of 'key=value' pairs but was: %s" % (keyword, value))
|
||||
|
||||
k, v = entry.split('=', 1)
|
||||
|
||||
if require_value and not v:
|
||||
raise ValueError("'%s' line's %s mapping had a blank value: %s" % (keyword, k, value))
|
||||
|
||||
yield k, v
|
||||
|
||||
|
||||
def _copy(default):
|
||||
if default is None or isinstance(default, (bool, stem.exit_policy.ExitPolicy)):
|
||||
return default # immutable
|
||||
elif default in EMPTY_COLLECTION:
|
||||
return type(default)() # collection construction tad faster than copy
|
||||
else:
|
||||
return copy.copy(default)
|
||||
|
||||
|
||||
class Descriptor(object):
|
||||
"""
|
||||
Common parent for all types of descriptors.
|
||||
|
|
@ -408,6 +626,55 @@ class Descriptor(object):
|
|||
self._entries = {}
|
||||
self._unrecognized_lines = []
|
||||
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False):
|
||||
"""
|
||||
Creates descriptor content with the given attributes. Mandatory fields are
|
||||
filled with dummy information unless data is supplied. This doesn't yet
|
||||
create a valid signature.
|
||||
|
||||
.. versionadded:: 1.6.0
|
||||
|
||||
:param dict attr: keyword/value mappings to be included in the descriptor
|
||||
:param list exclude: mandatory keywords to exclude from the descriptor, this
|
||||
results in an invalid descriptor
|
||||
:param bool sign: includes cryptographic signatures and digests if True
|
||||
|
||||
:returns: **str** with the content of a descriptor
|
||||
|
||||
:raises:
|
||||
* **ImportError** if cryptography is unavailable and sign is True
|
||||
* **NotImplementedError** if not implemented for this descriptor type
|
||||
"""
|
||||
|
||||
raise NotImplementedError("The create and content methods haven't been implemented for %s" % cls.__name__)
|
||||
|
||||
@classmethod
|
||||
def create(cls, attr = None, exclude = (), validate = True, sign = False):
|
||||
"""
|
||||
Creates a descriptor with the given attributes. Mandatory fields are filled
|
||||
with dummy information unless data is supplied. This doesn't yet create a
|
||||
valid signature.
|
||||
|
||||
.. versionadded:: 1.6.0
|
||||
|
||||
:param dict attr: keyword/value mappings to be included in the descriptor
|
||||
:param list exclude: mandatory keywords to exclude from the descriptor, this
|
||||
results in an invalid descriptor
|
||||
:param bool validate: checks the validity of the descriptor's content if
|
||||
**True**, skips these checks otherwise
|
||||
:param bool sign: includes cryptographic signatures and digests if True
|
||||
|
||||
:returns: :class:`~stem.descriptor.Descriptor` subclass
|
||||
|
||||
:raises:
|
||||
* **ValueError** if the contents is malformed and validate is True
|
||||
* **ImportError** if cryptography is unavailable and sign is True
|
||||
* **NotImplementedError** if not implemented for this descriptor type
|
||||
"""
|
||||
|
||||
return cls(cls.content(attr, exclude, sign), validate = validate)
|
||||
|
||||
def get_path(self):
|
||||
"""
|
||||
Provides the absolute path that we loaded this descriptor from.
|
||||
|
|
@ -471,12 +738,6 @@ class Descriptor(object):
|
|||
if parser_for_line is None:
|
||||
parser_for_line = self.PARSER_FOR_LINE
|
||||
|
||||
# set defaults
|
||||
|
||||
for attr in self.ATTRIBUTES:
|
||||
if not hasattr(self, attr):
|
||||
setattr(self, attr, copy.copy(self.ATTRIBUTES[attr][0]))
|
||||
|
||||
for keyword, values in list(entries.items()):
|
||||
try:
|
||||
if keyword in parser_for_line:
|
||||
|
|
@ -489,9 +750,9 @@ class Descriptor(object):
|
|||
line += '\n%s' % block_contents
|
||||
|
||||
self._unrecognized_lines.append(line)
|
||||
except ValueError as exc:
|
||||
except ValueError:
|
||||
if validate:
|
||||
raise exc
|
||||
raise
|
||||
|
||||
def _set_path(self, path):
|
||||
self._path = path
|
||||
|
|
@ -515,28 +776,25 @@ class Descriptor(object):
|
|||
"""
|
||||
|
||||
if not stem.prereq.is_crypto_available():
|
||||
raise ValueError('Generating the signed digest requires pycrypto')
|
||||
raise ValueError('Generating the signed digest requires the cryptography module')
|
||||
|
||||
from Crypto.Util import asn1
|
||||
from Crypto.Util.number import bytes_to_long, long_to_bytes
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives.serialization import load_der_public_key
|
||||
from cryptography.utils import int_to_bytes, int_from_bytes
|
||||
|
||||
# get the ASN.1 sequence
|
||||
|
||||
seq = asn1.DerSequence()
|
||||
seq.decode(_bytes_for_block(signing_key))
|
||||
modulus, public_exponent = seq[0], seq[1]
|
||||
key = load_der_public_key(_bytes_for_block(signing_key), default_backend())
|
||||
modulus = key.public_numbers().n
|
||||
public_exponent = key.public_numbers().e
|
||||
|
||||
sig_as_bytes = _bytes_for_block(signature)
|
||||
sig_as_long = bytes_to_long(sig_as_bytes) # convert signature to an int
|
||||
blocksize = 128 # block size will always be 128 for a 1024 bit key
|
||||
sig_as_long = int_from_bytes(sig_as_bytes, byteorder='big') # convert signature to an int
|
||||
blocksize = len(sig_as_bytes) # 256B for NetworkStatusDocuments, 128B for others
|
||||
|
||||
# use the public exponent[e] & the modulus[n] to decrypt the int
|
||||
|
||||
decrypted_int = pow(sig_as_long, public_exponent, modulus)
|
||||
|
||||
# convert the int to a byte array
|
||||
|
||||
decrypted_bytes = long_to_bytes(decrypted_int, blocksize)
|
||||
decrypted_bytes = int_to_bytes(decrypted_int, blocksize)
|
||||
|
||||
############################################################################
|
||||
# The decrypted bytes should have a structure exactly along these lines.
|
||||
|
|
@ -551,7 +809,7 @@ class Descriptor(object):
|
|||
############################################################################
|
||||
|
||||
try:
|
||||
if decrypted_bytes.index(b'\x00\x01') != 0:
|
||||
if decrypted_bytes.index(DIGEST_TYPE_INFO) != 0:
|
||||
raise ValueError('Verification failed, identifier missing')
|
||||
except ValueError:
|
||||
raise ValueError('Verification failed, malformed data')
|
||||
|
|
@ -560,7 +818,7 @@ class Descriptor(object):
|
|||
identifier_offset = 2
|
||||
|
||||
# find the separator
|
||||
seperator_index = decrypted_bytes.index(b'\x00', identifier_offset)
|
||||
seperator_index = decrypted_bytes.index(DIGEST_SEPARATOR, identifier_offset)
|
||||
except ValueError:
|
||||
raise ValueError('Verification failed, seperator not found')
|
||||
|
||||
|
|
@ -594,19 +852,38 @@ class Descriptor(object):
|
|||
return stem.util.str_tools._to_unicode(digest_hash.hexdigest().upper())
|
||||
|
||||
def __getattr__(self, name):
|
||||
# If attribute isn't already present we might be lazy loading it...
|
||||
# We can't use standard hasattr() since it calls this function, recursing.
|
||||
# Doing so works since it stops recursing after several dozen iterations
|
||||
# (not sure why), but horrible in terms of performance.
|
||||
|
||||
if self._lazy_loading and name in self.ATTRIBUTES:
|
||||
def has_attr(attr):
|
||||
try:
|
||||
super(Descriptor, self).__getattribute__(attr)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
# If an attribute we should have isn't present it means either...
|
||||
#
|
||||
# a. we still need to lazy load this
|
||||
# b. we read the whole descriptor but it wasn't present, so needs the default
|
||||
|
||||
if name in self.ATTRIBUTES and not has_attr(name):
|
||||
default, parsing_function = self.ATTRIBUTES[name]
|
||||
|
||||
try:
|
||||
parsing_function(self, self._entries)
|
||||
except (ValueError, KeyError):
|
||||
if self._lazy_loading:
|
||||
try:
|
||||
# despite having a validation failure check to see if we set something
|
||||
return super(Descriptor, self).__getattribute__(name)
|
||||
except AttributeError:
|
||||
setattr(self, name, copy.copy(default))
|
||||
parsing_function(self, self._entries)
|
||||
except (ValueError, KeyError):
|
||||
# Set defaults for anything the parsing function should've covered.
|
||||
# Despite having a validation failure some attributes might be set in
|
||||
# which case we keep them.
|
||||
|
||||
for attr_name, (attr_default, attr_parser) in self.ATTRIBUTES.items():
|
||||
if parsing_function == attr_parser and not has_attr(attr_name):
|
||||
setattr(self, attr_name, _copy(attr_default))
|
||||
else:
|
||||
setattr(self, name, _copy(default))
|
||||
|
||||
return super(Descriptor, self).__getattribute__(name)
|
||||
|
||||
|
|
@ -617,6 +894,31 @@ class Descriptor(object):
|
|||
return self._raw_contents
|
||||
|
||||
|
||||
class NewlineNormalizer(object):
|
||||
"""
|
||||
File wrapper that normalizes CRLF line endings.
|
||||
"""
|
||||
|
||||
def __init__(self, wrapped_file):
|
||||
self._wrapped_file = wrapped_file
|
||||
self.name = getattr(wrapped_file, 'name', None)
|
||||
|
||||
def read(self, *args):
|
||||
return self._wrapped_file.read(*args).replace(b'\r\n', b'\n')
|
||||
|
||||
def readline(self, *args):
|
||||
return self._wrapped_file.readline(*args).replace(b'\r\n', b'\n')
|
||||
|
||||
def readlines(self, *args):
|
||||
return [line.rstrip(b'\r') for line in self._wrapped_file.readlines(*args)]
|
||||
|
||||
def seek(self, *args):
|
||||
return self._wrapped_file.seek(*args)
|
||||
|
||||
def tell(self, *args):
|
||||
return self._wrapped_file.tell(*args)
|
||||
|
||||
|
||||
def _read_until_keywords(keywords, descriptor_file, inclusive = False, ignore_first = False, skip = False, end_position = None, include_ending_keyword = False):
|
||||
"""
|
||||
Reads from the descriptor file until we get to one of the given keywords or reach the
|
||||
|
|
@ -636,23 +938,17 @@ def _read_until_keywords(keywords, descriptor_file, inclusive = False, ignore_fi
|
|||
**True**
|
||||
"""
|
||||
|
||||
if skip:
|
||||
content = None
|
||||
content_append = lambda x: None
|
||||
else:
|
||||
content = []
|
||||
content_append = content.append
|
||||
|
||||
content = None if skip else []
|
||||
ending_keyword = None
|
||||
|
||||
if isinstance(keywords, (bytes, str_type)):
|
||||
if stem.util._is_str(keywords):
|
||||
keywords = (keywords,)
|
||||
|
||||
if ignore_first:
|
||||
first_line = descriptor_file.readline()
|
||||
|
||||
if first_line:
|
||||
content_append(first_line)
|
||||
if first_line and content is not None:
|
||||
content.append(first_line)
|
||||
|
||||
keyword_match = re.compile(SPECIFIC_KEYWORD_LINE % '|'.join(keywords))
|
||||
|
||||
|
|
@ -674,12 +970,12 @@ def _read_until_keywords(keywords, descriptor_file, inclusive = False, ignore_fi
|
|||
|
||||
if not inclusive:
|
||||
descriptor_file.seek(last_position)
|
||||
else:
|
||||
content_append(line)
|
||||
elif content is not None:
|
||||
content.append(line)
|
||||
|
||||
break
|
||||
else:
|
||||
content_append(line)
|
||||
elif content is not None:
|
||||
content.append(line)
|
||||
|
||||
if include_ending_keyword:
|
||||
return (content, ending_keyword)
|
||||
|
|
@ -741,7 +1037,109 @@ def _get_pseudo_pgp_block(remaining_contents):
|
|||
return None
|
||||
|
||||
|
||||
def _get_descriptor_components(raw_contents, validate, extra_keywords = ()):
|
||||
def create_signing_key(private_key = None):
|
||||
"""
|
||||
Serializes a signing key if we have one. Otherwise this creates a new signing
|
||||
key we can use to create descriptors.
|
||||
|
||||
.. versionadded:: 1.6.0
|
||||
|
||||
:param cryptography.hazmat.backends.openssl.rsa._RSAPrivateKey private_key: private key
|
||||
|
||||
:returns: :class:`~stem.descriptor.__init__.SigningKey` that can be used to
|
||||
create descriptors
|
||||
|
||||
:raises: **ImportError** if the cryptography module is unavailable
|
||||
"""
|
||||
|
||||
if not stem.prereq.is_crypto_available():
|
||||
raise ImportError('Signing requires the cryptography module')
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
|
||||
if private_key is None:
|
||||
private_key = rsa.generate_private_key(
|
||||
public_exponent = 65537,
|
||||
key_size = 1024,
|
||||
backend = default_backend(),
|
||||
)
|
||||
|
||||
# When signing the cryptography module includes a constant indicating
|
||||
# the hash algorithm used. Tor doesn't. This causes signature
|
||||
# validation failures and unfortunately cryptography have no nice way
|
||||
# of excluding these so we need to mock out part of their internals...
|
||||
#
|
||||
# https://github.com/pyca/cryptography/issues/3713
|
||||
|
||||
def no_op(*args, **kwargs):
|
||||
return 1
|
||||
|
||||
private_key._backend._lib.EVP_PKEY_CTX_set_signature_md = no_op
|
||||
private_key._backend.openssl_assert = no_op
|
||||
|
||||
public_key = private_key.public_key()
|
||||
public_digest = b'\n' + public_key.public_bytes(
|
||||
encoding = serialization.Encoding.PEM,
|
||||
format = serialization.PublicFormat.PKCS1,
|
||||
).strip()
|
||||
|
||||
return SigningKey(private_key, public_key, public_digest)
|
||||
|
||||
|
||||
def _append_router_signature(content, private_key):
|
||||
"""
|
||||
Appends a router signature to a server or extrainfo descriptor.
|
||||
|
||||
:param bytes content: descriptor content up through 'router-signature\\n'
|
||||
:param cryptography.hazmat.backends.openssl.rsa._RSAPrivateKey private_key:
|
||||
private relay signing key
|
||||
|
||||
:returns: **bytes** with the signed descriptor content
|
||||
"""
|
||||
|
||||
if not stem.prereq.is_crypto_available():
|
||||
raise ImportError('Signing requires the cryptography module')
|
||||
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import padding
|
||||
|
||||
signature = base64.b64encode(private_key.sign(content, padding.PKCS1v15(), hashes.SHA1()))
|
||||
return content + b'\n'.join([b'-----BEGIN SIGNATURE-----'] + stem.util.str_tools._split_by_length(signature, 64) + [b'-----END SIGNATURE-----\n'])
|
||||
|
||||
|
||||
def _random_nickname():
|
||||
return ('Unnamed%i' % random.randint(0, 100000000000000))[:19]
|
||||
|
||||
|
||||
def _random_fingerprint():
|
||||
return ('%040x' % random.randrange(16 ** 40)).upper()
|
||||
|
||||
|
||||
def _random_ipv4_address():
|
||||
return '%i.%i.%i.%i' % (random.randint(0, 255), random.randint(0, 255), random.randint(0, 255), random.randint(0, 255))
|
||||
|
||||
|
||||
def _random_date():
|
||||
return '%i-%02i-%02i %02i:%02i:%02i' % (random.randint(2000, 2015), random.randint(1, 12), random.randint(1, 20), random.randint(0, 23), random.randint(0, 59), random.randint(0, 59))
|
||||
|
||||
|
||||
def _random_crypto_blob(block_type = None):
|
||||
"""
|
||||
Provides a random string that can be used for crypto blocks.
|
||||
"""
|
||||
|
||||
random_base64 = stem.util.str_tools._to_unicode(base64.b64encode(os.urandom(140)))
|
||||
crypto_blob = '\n'.join(stem.util.str_tools._split_by_length(random_base64, 64))
|
||||
|
||||
if block_type:
|
||||
return '\n-----BEGIN %s-----\n%s\n-----END %s-----' % (block_type, crypto_blob, block_type)
|
||||
else:
|
||||
return crypto_blob
|
||||
|
||||
|
||||
def _descriptor_components(raw_contents, validate, extra_keywords = (), non_ascii_fields = ()):
|
||||
"""
|
||||
Initial breakup of the server descriptor contents to make parsing easier.
|
||||
|
||||
|
|
@ -760,6 +1158,7 @@ def _get_descriptor_components(raw_contents, validate, extra_keywords = ()):
|
|||
True, skips these checks otherwise
|
||||
:param list extra_keywords: entity keywords to put into a separate listing
|
||||
with ordering intact
|
||||
:param list non_ascii_fields: fields containing non-ascii content
|
||||
|
||||
:returns:
|
||||
**collections.OrderedDict** with the 'keyword => (value, pgp key) entries'
|
||||
|
|
@ -815,11 +1214,18 @@ def _get_descriptor_components(raw_contents, validate, extra_keywords = ()):
|
|||
block_type, block_contents = block_attr
|
||||
else:
|
||||
block_type, block_contents = None, None
|
||||
except ValueError as exc:
|
||||
except ValueError:
|
||||
if not validate:
|
||||
continue
|
||||
|
||||
raise exc
|
||||
raise
|
||||
|
||||
if validate and keyword not in non_ascii_fields:
|
||||
try:
|
||||
value.encode('ascii')
|
||||
except UnicodeError:
|
||||
replaced = ''.join([(char if char in string.printable else '?') for char in value])
|
||||
raise ValueError("'%s' line had non-ascii content: %s" % (keyword, replaced))
|
||||
|
||||
if keyword in extra_keywords:
|
||||
extra_entries.append('%s %s' % (keyword, value))
|
||||
|
|
@ -831,6 +1237,7 @@ def _get_descriptor_components(raw_contents, validate, extra_keywords = ()):
|
|||
else:
|
||||
return entries
|
||||
|
||||
|
||||
# importing at the end to avoid circular dependencies on our Descriptor class
|
||||
|
||||
import stem.descriptor.server_descriptor
|
||||
|
|
|
|||
|
|
@ -0,0 +1,271 @@
|
|||
# Copyright 2017-2018, Damian Johnson and The Tor Project
|
||||
# See LICENSE for licensing information
|
||||
|
||||
"""
|
||||
Parsing for `Tor Ed25519 certificates
|
||||
<https://gitweb.torproject.org/torspec.git/tree/cert-spec.txt>`_, which are
|
||||
used to validate the key used to sign server descriptors.
|
||||
|
||||
.. versionadded:: 1.6.0
|
||||
|
||||
**Module Overview:**
|
||||
|
||||
::
|
||||
|
||||
Ed25519Certificate - Ed25519 signing key certificate
|
||||
| +- Ed25519CertificateV1 - version 1 Ed25519 certificate
|
||||
| |- is_expired - checks if certificate is presently expired
|
||||
| +- validate - validates signature of a server descriptor
|
||||
|
|
||||
+- parse - reads base64 encoded certificate data
|
||||
|
||||
Ed25519Extension - extension included within an Ed25519Certificate
|
||||
|
||||
.. data:: CertType (enum)
|
||||
|
||||
Purpose of Ed25519 certificate. As new certificate versions are added this
|
||||
enumeration will expand.
|
||||
|
||||
============== ===========
|
||||
CertType Description
|
||||
============== ===========
|
||||
**SIGNING** signing a signing key with an identity key
|
||||
**LINK_CERT** TLS link certificate signed with ed25519 signing key
|
||||
**AUTH** authentication key signed with ed25519 signing key
|
||||
============== ===========
|
||||
|
||||
.. data:: ExtensionType (enum)
|
||||
|
||||
Recognized exception types.
|
||||
|
||||
==================== ===========
|
||||
ExtensionType Description
|
||||
==================== ===========
|
||||
**HAS_SIGNING_KEY** includes key used to sign the certificate
|
||||
==================== ===========
|
||||
|
||||
.. data:: ExtensionFlag (enum)
|
||||
|
||||
Flags that can be assigned to Ed25519 certificate extensions.
|
||||
|
||||
====================== ===========
|
||||
ExtensionFlag Description
|
||||
====================== ===========
|
||||
**AFFECTS_VALIDATION** extension affects whether the certificate is valid
|
||||
**UNKNOWN** extension includes flags not yet recognized by stem
|
||||
====================== ===========
|
||||
"""
|
||||
|
||||
import base64
|
||||
import binascii
|
||||
import collections
|
||||
import datetime
|
||||
import hashlib
|
||||
|
||||
import stem.prereq
|
||||
import stem.util.enum
|
||||
import stem.util.str_tools
|
||||
|
||||
ED25519_HEADER_LENGTH = 40
|
||||
ED25519_SIGNATURE_LENGTH = 64
|
||||
ED25519_ROUTER_SIGNATURE_PREFIX = b'Tor router descriptor signature v1'
|
||||
|
||||
CertType = stem.util.enum.UppercaseEnum('SIGNING', 'LINK_CERT', 'AUTH')
|
||||
ExtensionType = stem.util.enum.Enum(('HAS_SIGNING_KEY', 4),)
|
||||
ExtensionFlag = stem.util.enum.UppercaseEnum('AFFECTS_VALIDATION', 'UNKNOWN')
|
||||
|
||||
|
||||
class Ed25519Extension(collections.namedtuple('Ed25519Extension', ['type', 'flags', 'flag_int', 'data'])):
|
||||
"""
|
||||
Extension within an Ed25519 certificate.
|
||||
|
||||
:var int type: extension type
|
||||
:var list flags: extension attribute flags
|
||||
:var int flag_int: integer encoding of the extension attribute flags
|
||||
:var bytes data: data the extension concerns
|
||||
"""
|
||||
|
||||
|
||||
class Ed25519Certificate(object):
|
||||
"""
|
||||
Base class for an Ed25519 certificate.
|
||||
|
||||
:var int version: certificate format version
|
||||
:var str encoded: base64 encoded ed25519 certificate
|
||||
"""
|
||||
|
||||
def __init__(self, version, encoded):
|
||||
self.version = version
|
||||
self.encoded = encoded
|
||||
|
||||
@staticmethod
|
||||
def parse(content):
|
||||
"""
|
||||
Parses the given base64 encoded data as an Ed25519 certificate.
|
||||
|
||||
:param str content: base64 encoded certificate
|
||||
|
||||
:returns: :class:`~stem.descriptor.certificate.Ed25519Certificate` subclsss
|
||||
for the given certificate
|
||||
|
||||
:raises: **ValueError** if content is malformed
|
||||
"""
|
||||
|
||||
try:
|
||||
decoded = base64.b64decode(stem.util.str_tools._to_bytes(content))
|
||||
|
||||
if not decoded:
|
||||
raise TypeError('empty')
|
||||
except (TypeError, binascii.Error) as exc:
|
||||
raise ValueError("Ed25519 certificate wasn't propoerly base64 encoded (%s):\n%s" % (exc, content))
|
||||
|
||||
version = stem.util.str_tools._to_int(decoded[0:1])
|
||||
|
||||
if version == 1:
|
||||
return Ed25519CertificateV1(version, content, decoded)
|
||||
else:
|
||||
raise ValueError('Ed25519 certificate is version %i. Parser presently only supports version 1.' % version)
|
||||
|
||||
|
||||
class Ed25519CertificateV1(Ed25519Certificate):
|
||||
"""
|
||||
Version 1 Ed25519 certificate, which are used for signing tor server
|
||||
descriptors.
|
||||
|
||||
:var CertType type: certificate purpose
|
||||
:var datetime expiration: expiration of the certificate
|
||||
:var int key_type: format of the key
|
||||
:var bytes key: key content
|
||||
:var list extensions: :class:`~stem.descriptor.certificate.Ed25519Extension` in this certificate
|
||||
:var bytes signature: certificate signature
|
||||
"""
|
||||
|
||||
def __init__(self, version, encoded, decoded):
|
||||
super(Ed25519CertificateV1, self).__init__(version, encoded)
|
||||
|
||||
if len(decoded) < ED25519_HEADER_LENGTH + ED25519_SIGNATURE_LENGTH:
|
||||
raise ValueError('Ed25519 certificate was %i bytes, but should be at least %i' % (len(decoded), ED25519_HEADER_LENGTH + ED25519_SIGNATURE_LENGTH))
|
||||
|
||||
cert_type = stem.util.str_tools._to_int(decoded[1:2])
|
||||
|
||||
if cert_type in (0, 1, 2, 3):
|
||||
raise ValueError('Ed25519 certificate cannot have a type of %i. This is reserved to avoid conflicts with tor CERTS cells.' % cert_type)
|
||||
elif cert_type == 4:
|
||||
self.type = CertType.SIGNING
|
||||
elif cert_type == 5:
|
||||
self.type = CertType.LINK_CERT
|
||||
elif cert_type == 6:
|
||||
self.type = CertType.AUTH
|
||||
elif cert_type == 7:
|
||||
raise ValueError('Ed25519 certificate cannot have a type of 7. This is reserved for RSA identity cross-certification.')
|
||||
else:
|
||||
raise ValueError("BUG: Ed25519 certificate type is decoded from one byte. It shouldn't be possible to have a value of %i." % cert_type)
|
||||
|
||||
# expiration time is in hours since epoch
|
||||
try:
|
||||
self.expiration = datetime.datetime.utcfromtimestamp(stem.util.str_tools._to_int(decoded[2:6]) * 3600)
|
||||
except ValueError as exc:
|
||||
raise ValueError('Invalid expiration timestamp (%s): %s' % (exc, stem.util.str_tools._to_int(decoded[2:6]) * 3600))
|
||||
|
||||
self.key_type = stem.util.str_tools._to_int(decoded[6:7])
|
||||
self.key = decoded[7:39]
|
||||
self.signature = decoded[-ED25519_SIGNATURE_LENGTH:]
|
||||
|
||||
self.extensions = []
|
||||
extension_count = stem.util.str_tools._to_int(decoded[39:40])
|
||||
remaining_data = decoded[40:-ED25519_SIGNATURE_LENGTH]
|
||||
|
||||
for i in range(extension_count):
|
||||
if len(remaining_data) < 4:
|
||||
raise ValueError('Ed25519 extension is missing header field data')
|
||||
|
||||
extension_length = stem.util.str_tools._to_int(remaining_data[:2])
|
||||
extension_type = stem.util.str_tools._to_int(remaining_data[2:3])
|
||||
extension_flags = stem.util.str_tools._to_int(remaining_data[3:4])
|
||||
extension_data = remaining_data[4:4 + extension_length]
|
||||
|
||||
if extension_length != len(extension_data):
|
||||
raise ValueError("Ed25519 extension is truncated. It should have %i bytes of data but there's only %i." % (extension_length, len(extension_data)))
|
||||
|
||||
flags, remaining_flags = [], extension_flags
|
||||
|
||||
if remaining_flags % 2 == 1:
|
||||
flags.append(ExtensionFlag.AFFECTS_VALIDATION)
|
||||
remaining_flags -= 1
|
||||
|
||||
if remaining_flags:
|
||||
flags.append(ExtensionFlag.UNKNOWN)
|
||||
|
||||
if extension_type == ExtensionType.HAS_SIGNING_KEY and len(extension_data) != 32:
|
||||
raise ValueError('Ed25519 HAS_SIGNING_KEY extension must be 32 bytes, but was %i.' % len(extension_data))
|
||||
|
||||
self.extensions.append(Ed25519Extension(extension_type, flags, extension_flags, extension_data))
|
||||
remaining_data = remaining_data[4 + extension_length:]
|
||||
|
||||
if remaining_data:
|
||||
raise ValueError('Ed25519 certificate had %i bytes of unused extension data' % len(remaining_data))
|
||||
|
||||
def is_expired(self):
|
||||
"""
|
||||
Checks if this certificate is presently expired or not.
|
||||
|
||||
:returns: **True** if the certificate has expired, **False** otherwise
|
||||
"""
|
||||
|
||||
return datetime.datetime.now() > self.expiration
|
||||
|
||||
def validate(self, server_descriptor):
|
||||
"""
|
||||
Validates our signing key and that the given descriptor content matches its
|
||||
Ed25519 signature.
|
||||
|
||||
:param stem.descriptor.server_descriptor.Ed25519 server_descriptor: relay
|
||||
server descriptor to validate
|
||||
|
||||
:raises:
|
||||
* **ValueError** if signing key or descriptor are invalid
|
||||
* **ImportError** if pynacl module is unavailable
|
||||
"""
|
||||
|
||||
if not stem.prereq._is_pynacl_available():
|
||||
raise ImportError('Certificate validation requires the pynacl module')
|
||||
|
||||
import nacl.signing
|
||||
import nacl.encoding
|
||||
from nacl.exceptions import BadSignatureError
|
||||
|
||||
descriptor_content = server_descriptor.get_bytes()
|
||||
signing_key = None
|
||||
|
||||
if server_descriptor.ed25519_master_key:
|
||||
signing_key = nacl.signing.VerifyKey(stem.util.str_tools._to_bytes(server_descriptor.ed25519_master_key) + b'=', encoder = nacl.encoding.Base64Encoder)
|
||||
else:
|
||||
for extension in self.extensions:
|
||||
if extension.type == ExtensionType.HAS_SIGNING_KEY:
|
||||
signing_key = nacl.signing.VerifyKey(extension.data)
|
||||
break
|
||||
|
||||
if not signing_key:
|
||||
raise ValueError('Server descriptor missing an ed25519 signing key')
|
||||
|
||||
try:
|
||||
signing_key.verify(base64.b64decode(stem.util.str_tools._to_bytes(self.encoded))[:-ED25519_SIGNATURE_LENGTH], self.signature)
|
||||
except BadSignatureError as exc:
|
||||
raise ValueError('Ed25519KeyCertificate signing key is invalid (%s)' % exc)
|
||||
|
||||
# ed25519 signature validates descriptor content up until the signature itself
|
||||
|
||||
if b'router-sig-ed25519 ' not in descriptor_content:
|
||||
raise ValueError("Descriptor doesn't have a router-sig-ed25519 entry.")
|
||||
|
||||
signed_content = descriptor_content[:descriptor_content.index(b'router-sig-ed25519 ') + 19]
|
||||
descriptor_sha256_digest = hashlib.sha256(ED25519_ROUTER_SIGNATURE_PREFIX + signed_content).digest()
|
||||
|
||||
missing_padding = len(server_descriptor.ed25519_signature) % 4
|
||||
signature_bytes = base64.b64decode(stem.util.str_tools._to_bytes(server_descriptor.ed25519_signature) + b'=' * missing_padding)
|
||||
|
||||
try:
|
||||
verify_key = nacl.signing.VerifyKey(self.key)
|
||||
verify_key.verify(descriptor_sha256_digest, signature_bytes)
|
||||
except BadSignatureError as exc:
|
||||
raise ValueError('Descriptor Ed25519 certificate signature invalid (%s)' % exc)
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright 2012-2015, Damian Johnson and The Tor Project
|
||||
# Copyright 2012-2018, Damian Johnson and The Tor Project
|
||||
# See LICENSE for licensing information
|
||||
|
||||
"""
|
||||
|
|
@ -10,6 +10,11 @@ Toolkit for exporting descriptors to other formats.
|
|||
|
||||
export_csv - Exports descriptors to a CSV
|
||||
export_csv_file - Writes exported CSV output to a file
|
||||
|
||||
.. deprecated:: 1.7.0
|
||||
|
||||
This module will likely be removed in Stem 2.0 due to lack of usage. If you
|
||||
use this modle please `let me know <https://www.atagar.com/contact/>`_.
|
||||
"""
|
||||
|
||||
import csv
|
||||
|
|
@ -98,7 +103,7 @@ def export_csv_file(output_file, descriptors, included_fields = (), excluded_fie
|
|||
|
||||
writer = csv.DictWriter(output_file, included_fields, dialect = _ExportDialect(), extrasaction='ignore')
|
||||
|
||||
if header and stem.prereq.is_python_27():
|
||||
if header and not stem.prereq._is_python_26():
|
||||
writer.writeheader()
|
||||
|
||||
for desc in descriptors:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright 2012-2015, Damian Johnson and The Tor Project
|
||||
# Copyright 2012-2018, Damian Johnson and The Tor Project
|
||||
# See LICENSE for licensing information
|
||||
|
||||
"""
|
||||
|
|
@ -7,7 +7,7 @@ their server descriptor is published and have a similar format. However, unlike
|
|||
server descriptors these don't contain information that Tor clients require to
|
||||
function and as such aren't fetched by default.
|
||||
|
||||
Defined in section 2.2 of the `dir-spec
|
||||
Defined in section 2.1.2 of the `dir-spec
|
||||
<https://gitweb.torproject.org/torspec.git/tree/dir-spec.txt>`_,
|
||||
extra-info descriptors contain interesting but non-vital information such as
|
||||
usage statistics. Tor clients cannot request these documents for bridges.
|
||||
|
|
@ -19,8 +19,7 @@ Extra-info descriptors are available from a few sources...
|
|||
* control port via 'GETINFO extra-info/digest/\*' queries
|
||||
* the 'cached-extrainfo' file in tor's data directory
|
||||
|
||||
* Archived descriptors provided by CollecTor
|
||||
(https://collector.torproject.org/).
|
||||
* Archived descriptors provided by `CollecTor <https://metrics.torproject.org/collector.html>`_.
|
||||
|
||||
* Directory authorities and mirrors via their DirPort.
|
||||
|
||||
|
|
@ -72,6 +71,7 @@ import functools
|
|||
import hashlib
|
||||
import re
|
||||
|
||||
import stem.prereq
|
||||
import stem.util.connection
|
||||
import stem.util.enum
|
||||
import stem.util.str_tools
|
||||
|
|
@ -79,19 +79,27 @@ import stem.util.str_tools
|
|||
from stem.descriptor import (
|
||||
PGP_BLOCK_END,
|
||||
Descriptor,
|
||||
create_signing_key,
|
||||
_descriptor_content,
|
||||
_read_until_keywords,
|
||||
_get_descriptor_components,
|
||||
_descriptor_components,
|
||||
_value,
|
||||
_values,
|
||||
_parse_simple_line,
|
||||
_parse_timestamp_line,
|
||||
_parse_forty_character_hex,
|
||||
_parse_key_block,
|
||||
_mappings_for,
|
||||
_append_router_signature,
|
||||
_random_nickname,
|
||||
_random_fingerprint,
|
||||
_random_date,
|
||||
_random_crypto_blob,
|
||||
)
|
||||
|
||||
try:
|
||||
# added in python 3.2
|
||||
if stem.prereq._is_lru_cache_available():
|
||||
from functools import lru_cache
|
||||
except ImportError:
|
||||
else:
|
||||
from stem.util.lru_cache import lru_cache
|
||||
|
||||
# known statuses for dirreq-v2-resp and dirreq-v3-resp...
|
||||
|
|
@ -154,7 +162,6 @@ SINGLE_FIELDS = (
|
|||
'exit-streams-opened',
|
||||
)
|
||||
|
||||
|
||||
_timestamp_re = re.compile('^(.*) \(([0-9]+) s\)( .*)?$')
|
||||
_locale_re = re.compile('^[a-zA-Z0-9\?]{2}$')
|
||||
|
||||
|
|
@ -280,14 +287,15 @@ def _parse_transport_line(descriptor, entries):
|
|||
raise ValueError("Transport line's address:port entry is missing a colon: transport %s" % value)
|
||||
|
||||
name = value_comp[0]
|
||||
address, port_str = value_comp[1].split(':', 1)
|
||||
address, port_str = value_comp[1].rsplit(':', 1)
|
||||
|
||||
if not stem.util.connection.is_valid_ipv4_address(address) or \
|
||||
stem.util.connection.is_valid_ipv6_address(address):
|
||||
stem.util.connection.is_valid_ipv6_address(address, allow_brackets = True):
|
||||
raise ValueError('Transport line has a malformed address: transport %s' % value)
|
||||
elif not stem.util.connection.is_valid_port(port_str):
|
||||
raise ValueError('Transport line has a malformed port: transport %s' % value)
|
||||
|
||||
address.lstrip('[').rstrip(']')
|
||||
port = int(port_str)
|
||||
args = value_comp[2:] if len(value_comp) >= 3 else []
|
||||
|
||||
|
|
@ -309,6 +317,21 @@ def _parse_cell_circuits_per_decline_line(descriptor, entries):
|
|||
descriptor.cell_circuits_per_decile = int(value)
|
||||
|
||||
|
||||
def _parse_padding_counts_line(descriptor, entries):
|
||||
# "padding-counts" YYYY-MM-DD HH:MM:SS (NSEC s) key=val key=val...
|
||||
|
||||
value = _value('padding-counts', entries)
|
||||
timestamp, interval, remainder = _parse_timestamp_and_interval('padding-counts', value)
|
||||
counts = {}
|
||||
|
||||
for k, v in _mappings_for('padding-counts', remainder, require_value = True):
|
||||
counts[k] = int(v) if v.isdigit() else v
|
||||
|
||||
setattr(descriptor, 'padding_counts_end', timestamp)
|
||||
setattr(descriptor, 'padding_counts_interval', interval)
|
||||
setattr(descriptor, 'padding_counts', counts)
|
||||
|
||||
|
||||
def _parse_dirreq_line(keyword, recognized_counts_attr, unrecognized_counts_attr, descriptor, entries):
|
||||
value = _value(keyword, entries)
|
||||
|
||||
|
|
@ -319,22 +342,15 @@ def _parse_dirreq_line(keyword, recognized_counts_attr, unrecognized_counts_attr
|
|||
key_set = DirResponse if is_response_stats else DirStat
|
||||
|
||||
key_type = 'STATUS' if is_response_stats else 'STAT'
|
||||
error_msg = '%s lines should contain %s=COUNT mappings: %s %s' % (keyword, key_type, keyword, value)
|
||||
|
||||
if value:
|
||||
for entry in value.split(','):
|
||||
if '=' not in entry:
|
||||
raise ValueError(error_msg)
|
||||
for status, count in _mappings_for(keyword, value, divider = ','):
|
||||
if not count.isdigit():
|
||||
raise ValueError('%s lines should contain %s=COUNT mappings: %s %s' % (keyword, key_type, keyword, value))
|
||||
|
||||
status, count = entry.split('=', 1)
|
||||
|
||||
if count.isdigit():
|
||||
if status in key_set:
|
||||
recognized_counts[status] = int(count)
|
||||
else:
|
||||
unrecognized_counts[status] = int(count)
|
||||
else:
|
||||
raise ValueError(error_msg)
|
||||
if status in key_set:
|
||||
recognized_counts[status] = int(count)
|
||||
else:
|
||||
unrecognized_counts[status] = int(count)
|
||||
|
||||
setattr(descriptor, recognized_counts_attr, recognized_counts)
|
||||
setattr(descriptor, unrecognized_counts_attr, unrecognized_counts)
|
||||
|
|
@ -423,22 +439,13 @@ def _parse_port_count_line(keyword, attribute, descriptor, entries):
|
|||
# "<keyword>" port=N,port=N,...
|
||||
|
||||
value, port_mappings = _value(keyword, entries), {}
|
||||
error_msg = 'Entries in %s line should only be PORT=N entries: %s %s' % (keyword, keyword, value)
|
||||
|
||||
if value:
|
||||
for entry in value.split(','):
|
||||
if '=' not in entry:
|
||||
raise ValueError(error_msg)
|
||||
for port, stat in _mappings_for(keyword, value, divider = ','):
|
||||
if (port != 'other' and not stem.util.connection.is_valid_port(port)) or not stat.isdigit():
|
||||
raise ValueError('Entries in %s line should only be PORT=N entries: %s %s' % (keyword, keyword, value))
|
||||
|
||||
port, stat = entry.split('=', 1)
|
||||
|
||||
if (port == 'other' or stem.util.connection.is_valid_port(port)) and stat.isdigit():
|
||||
if port != 'other':
|
||||
port = int(port)
|
||||
|
||||
port_mappings[port] = int(stat)
|
||||
else:
|
||||
raise ValueError(error_msg)
|
||||
port = int(port) if port.isdigit() else port
|
||||
port_mappings[port] = int(stat)
|
||||
|
||||
setattr(descriptor, attribute, port_mappings)
|
||||
|
||||
|
|
@ -453,19 +460,12 @@ def _parse_geoip_to_count_line(keyword, attribute, descriptor, entries):
|
|||
# ??,"Unknown"
|
||||
|
||||
value, locale_usage = _value(keyword, entries), {}
|
||||
error_msg = 'Entries in %s line should only be CC=N entries: %s %s' % (keyword, keyword, value)
|
||||
|
||||
if value:
|
||||
for entry in value.split(','):
|
||||
if '=' not in entry:
|
||||
raise ValueError(error_msg)
|
||||
for locale, count in _mappings_for(keyword, value, divider = ','):
|
||||
if not _locale_re.match(locale) or not count.isdigit():
|
||||
raise ValueError('Entries in %s line should only be CC=N entries: %s %s' % (keyword, keyword, value))
|
||||
|
||||
locale, count = entry.split('=', 1)
|
||||
|
||||
if _locale_re.match(locale) and count.isdigit():
|
||||
locale_usage[locale] = int(count)
|
||||
else:
|
||||
raise ValueError(error_msg)
|
||||
locale_usage[locale] = int(count)
|
||||
|
||||
setattr(descriptor, attribute, locale_usage)
|
||||
|
||||
|
|
@ -473,17 +473,11 @@ def _parse_geoip_to_count_line(keyword, attribute, descriptor, entries):
|
|||
def _parse_bridge_ip_versions_line(descriptor, entries):
|
||||
value, ip_versions = _value('bridge-ip-versions', entries), {}
|
||||
|
||||
if value:
|
||||
for entry in value.split(','):
|
||||
if '=' not in entry:
|
||||
raise stem.ProtocolError("The bridge-ip-versions should be a comma separated listing of '<protocol>=<count>' mappings: bridge-ip-versions %s" % value)
|
||||
for protocol, count in _mappings_for('bridge-ip-versions', value, divider = ','):
|
||||
if not count.isdigit():
|
||||
raise stem.ProtocolError('IP protocol count was non-numeric (%s): bridge-ip-versions %s' % (count, value))
|
||||
|
||||
protocol, count = entry.split('=', 1)
|
||||
|
||||
if not count.isdigit():
|
||||
raise stem.ProtocolError('IP protocol count was non-numeric (%s): bridge-ip-versions %s' % (count, value))
|
||||
|
||||
ip_versions[protocol] = int(count)
|
||||
ip_versions[protocol] = int(count)
|
||||
|
||||
descriptor.ip_versions = ip_versions
|
||||
|
||||
|
|
@ -491,17 +485,11 @@ def _parse_bridge_ip_versions_line(descriptor, entries):
|
|||
def _parse_bridge_ip_transports_line(descriptor, entries):
|
||||
value, ip_transports = _value('bridge-ip-transports', entries), {}
|
||||
|
||||
if value:
|
||||
for entry in value.split(','):
|
||||
if '=' not in entry:
|
||||
raise stem.ProtocolError("The bridge-ip-transports should be a comma separated listing of '<protocol>=<count>' mappings: bridge-ip-transports %s" % value)
|
||||
for protocol, count in _mappings_for('bridge-ip-transports', value, divider = ','):
|
||||
if not count.isdigit():
|
||||
raise stem.ProtocolError('Transport count was non-numeric (%s): bridge-ip-transports %s' % (count, value))
|
||||
|
||||
protocol, count = entry.split('=', 1)
|
||||
|
||||
if not count.isdigit():
|
||||
raise stem.ProtocolError('Transport count was non-numeric (%s): bridge-ip-transports %s' % (count, value))
|
||||
|
||||
ip_transports[protocol] = int(count)
|
||||
ip_transports[protocol] = int(count)
|
||||
|
||||
descriptor.ip_transports = ip_transports
|
||||
|
||||
|
|
@ -511,28 +499,30 @@ def _parse_hs_stats(keyword, stat_attribute, extra_attribute, descriptor, entrie
|
|||
|
||||
value, stat, extra = _value(keyword, entries), None, {}
|
||||
|
||||
if value is not None:
|
||||
value_comp = value.split()
|
||||
|
||||
if not value_comp:
|
||||
raise ValueError("'%s' line was blank" % keyword)
|
||||
if value is None:
|
||||
pass # not in the descriptor
|
||||
elif value == '':
|
||||
raise ValueError("'%s' line was blank" % keyword)
|
||||
else:
|
||||
if ' ' in value:
|
||||
stat_value, remainder = value.split(' ', 1)
|
||||
else:
|
||||
stat_value, remainder = value, None
|
||||
|
||||
try:
|
||||
stat = int(value_comp[0])
|
||||
stat = int(stat_value)
|
||||
except ValueError:
|
||||
raise ValueError("'%s' stat was non-numeric (%s): %s %s" % (keyword, value_comp[0], keyword, value))
|
||||
raise ValueError("'%s' stat was non-numeric (%s): %s %s" % (keyword, stat_value, keyword, value))
|
||||
|
||||
for entry in value_comp[1:]:
|
||||
if '=' not in entry:
|
||||
raise ValueError('Entries after the stat in %s lines should only be key=val entries: %s %s' % (keyword, keyword, value))
|
||||
|
||||
key, val = entry.split('=', 1)
|
||||
for key, val in _mappings_for(keyword, remainder):
|
||||
extra[key] = val
|
||||
|
||||
setattr(descriptor, stat_attribute, stat)
|
||||
setattr(descriptor, extra_attribute, extra)
|
||||
|
||||
|
||||
_parse_identity_ed25519_line = _parse_key_block('identity-ed25519', 'ed25519_certificate', 'ED25519 CERT')
|
||||
_parse_master_key_ed25519_line = _parse_simple_line('master-key-ed25519', 'ed25519_certificate_hash')
|
||||
_parse_geoip_db_digest_line = _parse_forty_character_hex('geoip-db-digest', 'geoip_db_digest')
|
||||
_parse_geoip6_db_digest_line = _parse_forty_character_hex('geoip6-db-digest', 'geoip6_db_digest')
|
||||
_parse_dirreq_v2_resp_line = functools.partial(_parse_dirreq_line, 'dirreq-v2-resp', 'dir_v2_responses', 'dir_v2_responses_unknown')
|
||||
|
|
@ -570,6 +560,8 @@ _parse_dirreq_v3_reqs_line = functools.partial(_parse_geoip_to_count_line, 'dirr
|
|||
_parse_geoip_client_origins_line = functools.partial(_parse_geoip_to_count_line, 'geoip-client-origins', 'geoip_client_origins')
|
||||
_parse_entry_ips_line = functools.partial(_parse_geoip_to_count_line, 'entry-ips', 'entry_ips')
|
||||
_parse_bridge_ips_line = functools.partial(_parse_geoip_to_count_line, 'bridge-ips', 'bridge_ips')
|
||||
_parse_router_sig_ed25519_line = _parse_simple_line('router-sig-ed25519', 'ed25519_signature')
|
||||
_parse_router_digest_sha256_line = _parse_simple_line('router-digest-sha256', 'router_digest_sha256')
|
||||
_parse_router_digest_line = _parse_forty_character_hex('router-digest', '_digest')
|
||||
_parse_router_signature_line = _parse_key_block('router-signature', 'signature', 'SIGNATURE')
|
||||
|
||||
|
|
@ -673,6 +665,12 @@ class ExtraInfoDescriptor(Descriptor):
|
|||
:var int hs_dir_onions_seen: rounded count of the identities seen
|
||||
:var int hs_dir_onions_seen_attr: **\*** attributes provided for the hs_dir_onions_seen
|
||||
|
||||
**Padding Count Attributes:**
|
||||
|
||||
:var dict padding_counts: **\*** padding parameters
|
||||
:var datetime padding_counts_end: end of the period when padding data is being collected
|
||||
:var int padding_counts_interval: length in seconds of the interval
|
||||
|
||||
**Bridge Attributes:**
|
||||
|
||||
:var datetime bridge_stats_end: end of the period when stats were gathered
|
||||
|
|
@ -689,6 +687,10 @@ class ExtraInfoDescriptor(Descriptor):
|
|||
.. versionchanged:: 1.4.0
|
||||
Added the hs_stats_end, hs_rend_cells, hs_rend_cells_attr,
|
||||
hs_dir_onions_seen, and hs_dir_onions_seen_attr attributes.
|
||||
|
||||
.. versionchanged:: 1.6.0
|
||||
Added the padding_counts, padding_counts_end, and padding_counts_interval
|
||||
attributes.
|
||||
"""
|
||||
|
||||
ATTRIBUTES = {
|
||||
|
|
@ -766,6 +768,10 @@ class ExtraInfoDescriptor(Descriptor):
|
|||
'hs_dir_onions_seen': (None, _parse_hidden_service_dir_onions_seen_line),
|
||||
'hs_dir_onions_seen_attr': ({}, _parse_hidden_service_dir_onions_seen_line),
|
||||
|
||||
'padding_counts': ({}, _parse_padding_counts_line),
|
||||
'padding_counts_end': (None, _parse_padding_counts_line),
|
||||
'padding_counts_interval': (None, _parse_padding_counts_line),
|
||||
|
||||
'bridge_stats_end': (None, _parse_bridge_stats_end_line),
|
||||
'bridge_stats_interval': (None, _parse_bridge_stats_end_line),
|
||||
'bridge_ips': (None, _parse_bridge_ips_line),
|
||||
|
|
@ -811,6 +817,7 @@ class ExtraInfoDescriptor(Descriptor):
|
|||
'hidserv-stats-end': _parse_hidden_service_stats_end_line,
|
||||
'hidserv-rend-relayed-cells': _parse_hidden_service_rend_relayed_cells_line,
|
||||
'hidserv-dir-onions-seen': _parse_hidden_service_dir_onions_seen_line,
|
||||
'padding-counts': _parse_padding_counts_line,
|
||||
'dirreq-v2-ips': _parse_dirreq_v2_ips_line,
|
||||
'dirreq-v3-ips': _parse_dirreq_v3_ips_line,
|
||||
'dirreq-v2-reqs': _parse_dirreq_v2_reqs_line,
|
||||
|
|
@ -836,7 +843,7 @@ class ExtraInfoDescriptor(Descriptor):
|
|||
"""
|
||||
|
||||
super(ExtraInfoDescriptor, self).__init__(raw_contents, lazy_load = not validate)
|
||||
entries = _get_descriptor_components(raw_contents, validate)
|
||||
entries = _descriptor_components(raw_contents, validate)
|
||||
|
||||
if validate:
|
||||
for keyword in self._required_fields():
|
||||
|
|
@ -886,19 +893,56 @@ class RelayExtraInfoDescriptor(ExtraInfoDescriptor):
|
|||
'GETINFO extra-info/digest/\*', cached descriptors, and metrics
|
||||
(`specification <https://gitweb.torproject.org/torspec.git/tree/dir-spec.txt>`_).
|
||||
|
||||
:var ed25519_certificate str: base64 encoded ed25519 certificate
|
||||
:var ed25519_signature str: signature of this document using ed25519
|
||||
:var str signature: **\*** signature for this extrainfo descriptor
|
||||
|
||||
**\*** attribute is required when we're parsed with validation
|
||||
|
||||
.. versionchanged:: 1.5.0
|
||||
Added the ed25519_certificate and ed25519_signature attributes.
|
||||
"""
|
||||
|
||||
ATTRIBUTES = dict(ExtraInfoDescriptor.ATTRIBUTES, **{
|
||||
'ed25519_certificate': (None, _parse_identity_ed25519_line),
|
||||
'ed25519_signature': (None, _parse_router_sig_ed25519_line),
|
||||
'signature': (None, _parse_router_signature_line),
|
||||
})
|
||||
|
||||
PARSER_FOR_LINE = dict(ExtraInfoDescriptor.PARSER_FOR_LINE, **{
|
||||
'identity-ed25519': _parse_identity_ed25519_line,
|
||||
'router-sig-ed25519': _parse_router_sig_ed25519_line,
|
||||
'router-signature': _parse_router_signature_line,
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False, signing_key = None):
|
||||
base_header = (
|
||||
('extra-info', '%s %s' % (_random_nickname(), _random_fingerprint())),
|
||||
('published', _random_date()),
|
||||
)
|
||||
|
||||
if signing_key:
|
||||
sign = True
|
||||
|
||||
if sign:
|
||||
if attr and 'router-signature' in attr:
|
||||
raise ValueError('Cannot sign the descriptor if a router-signature has been provided')
|
||||
|
||||
if signing_key is None:
|
||||
signing_key = create_signing_key()
|
||||
|
||||
content = _descriptor_content(attr, exclude, base_header) + b'\nrouter-signature\n'
|
||||
return _append_router_signature(content, signing_key.private)
|
||||
else:
|
||||
return _descriptor_content(attr, exclude, base_header, (
|
||||
('router-signature', _random_crypto_blob('SIGNATURE')),
|
||||
))
|
||||
|
||||
@classmethod
|
||||
def create(cls, attr = None, exclude = (), validate = True, sign = False, signing_key = None):
|
||||
return cls(cls.content(attr, exclude, sign, signing_key), validate = validate)
|
||||
|
||||
@lru_cache()
|
||||
def digest(self):
|
||||
# our digest is calculated from everything except our signature
|
||||
|
|
@ -910,17 +954,39 @@ class RelayExtraInfoDescriptor(ExtraInfoDescriptor):
|
|||
class BridgeExtraInfoDescriptor(ExtraInfoDescriptor):
|
||||
"""
|
||||
Bridge extra-info descriptor (`bridge descriptor specification
|
||||
<https://collector.torproject.org/formats.html#bridge-descriptors>`_)
|
||||
<https://metrics.torproject.org/collector.html#bridge-descriptors>`_)
|
||||
|
||||
:var str ed25519_certificate_hash: sha256 hash of the original identity-ed25519
|
||||
:var str router_digest_sha256: sha256 digest of this document
|
||||
|
||||
.. versionchanged:: 1.5.0
|
||||
Added the ed25519_certificate_hash and router_digest_sha256 attributes.
|
||||
"""
|
||||
|
||||
ATTRIBUTES = dict(ExtraInfoDescriptor.ATTRIBUTES, **{
|
||||
'ed25519_certificate_hash': (None, _parse_master_key_ed25519_line),
|
||||
'router_digest_sha256': (None, _parse_router_digest_sha256_line),
|
||||
'_digest': (None, _parse_router_digest_line),
|
||||
})
|
||||
|
||||
PARSER_FOR_LINE = dict(ExtraInfoDescriptor.PARSER_FOR_LINE, **{
|
||||
'master-key-ed25519': _parse_master_key_ed25519_line,
|
||||
'router-digest-sha256': _parse_router_digest_sha256_line,
|
||||
'router-digest': _parse_router_digest_line,
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False):
|
||||
if sign:
|
||||
raise NotImplementedError('Signing of %s not implemented' % cls.__name__)
|
||||
|
||||
return _descriptor_content(attr, exclude, (
|
||||
('extra-info', 'ec2bridgereaac65a3 %s' % _random_fingerprint()),
|
||||
('published', _random_date()),
|
||||
), (
|
||||
('router-digest', _random_fingerprint()),
|
||||
))
|
||||
|
||||
def digest(self):
|
||||
return self._digest
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright 2015, Damian Johnson and The Tor Project
|
||||
# Copyright 2015-2018, Damian Johnson and The Tor Project
|
||||
# See LICENSE for licensing information
|
||||
|
||||
"""
|
||||
|
|
@ -9,6 +9,9 @@ Unlike other descriptor types these describe a hidden service rather than a
|
|||
relay. They're created by the service, and can only be fetched via relays with
|
||||
the HSDir flag.
|
||||
|
||||
These are only available through the Controller's
|
||||
:func:`~stem.control.get_hidden_service_descriptor` method.
|
||||
|
||||
**Module Overview:**
|
||||
|
||||
::
|
||||
|
|
@ -18,34 +21,34 @@ the HSDir flag.
|
|||
.. versionadded:: 1.4.0
|
||||
"""
|
||||
|
||||
# TODO: Add a description for how to retrieve them when tor supports that
|
||||
# (#14847) and then update #15009.
|
||||
|
||||
import base64
|
||||
import binascii
|
||||
import collections
|
||||
import hashlib
|
||||
import io
|
||||
|
||||
import stem.prereq
|
||||
import stem.util.connection
|
||||
import stem.util.str_tools
|
||||
|
||||
from stem.descriptor import (
|
||||
PGP_BLOCK_END,
|
||||
Descriptor,
|
||||
_get_descriptor_components,
|
||||
_descriptor_content,
|
||||
_descriptor_components,
|
||||
_read_until_keywords,
|
||||
_bytes_for_block,
|
||||
_value,
|
||||
_parse_simple_line,
|
||||
_parse_timestamp_line,
|
||||
_parse_key_block,
|
||||
_random_date,
|
||||
_random_crypto_blob,
|
||||
)
|
||||
|
||||
try:
|
||||
# added in python 3.2
|
||||
if stem.prereq._is_lru_cache_available():
|
||||
from functools import lru_cache
|
||||
except ImportError:
|
||||
else:
|
||||
from stem.util.lru_cache import lru_cache
|
||||
|
||||
REQUIRED_FIELDS = (
|
||||
|
|
@ -80,7 +83,17 @@ SINGLE_INTRODUCTION_POINT_FIELDS = [
|
|||
BASIC_AUTH = 1
|
||||
STEALTH_AUTH = 2
|
||||
|
||||
IntroductionPoint = collections.namedtuple('IntroductionPoints', INTRODUCTION_POINTS_ATTR.keys())
|
||||
|
||||
class IntroductionPoints(collections.namedtuple('IntroductionPoints', INTRODUCTION_POINTS_ATTR.keys())):
|
||||
"""
|
||||
:var str identifier: hash of this introduction point's identity key
|
||||
:var str address: address of this introduction point
|
||||
:var int port: port where this introduction point is listening
|
||||
:var str onion_key: public key for communicating with this introduction point
|
||||
:var str service_key: public key for communicating with this hidden service
|
||||
:var list intro_authentication: tuples of the form (auth_type, auth_data) for
|
||||
establishing a connection
|
||||
"""
|
||||
|
||||
|
||||
class DecryptionFailure(Exception):
|
||||
|
|
@ -153,25 +166,13 @@ def _parse_introduction_points_line(descriptor, entries):
|
|||
raise ValueError("'introduction-points' should be followed by a MESSAGE block, but was a %s" % block_type)
|
||||
|
||||
descriptor.introduction_points_encoded = block_contents
|
||||
descriptor.introduction_points_auth = [] # field was never implemented in tor (#15190)
|
||||
|
||||
try:
|
||||
decoded_field = _bytes_for_block(block_contents)
|
||||
descriptor.introduction_points_content = _bytes_for_block(block_contents)
|
||||
except TypeError:
|
||||
raise ValueError("'introduction-points' isn't base64 encoded content:\n%s" % block_contents)
|
||||
|
||||
auth_types = []
|
||||
|
||||
while decoded_field.startswith(b'service-authentication ') and b'\n' in decoded_field:
|
||||
auth_line, decoded_field = decoded_field.split(b'\n', 1)
|
||||
auth_line_comp = auth_line.split(b' ')
|
||||
|
||||
if len(auth_line_comp) < 3:
|
||||
raise ValueError("Within introduction-points we expected 'service-authentication [auth_type] [auth_data]', but had '%s'" % auth_line)
|
||||
|
||||
auth_types.append((auth_line_comp[1], auth_line_comp[2]))
|
||||
|
||||
descriptor.introduction_points_auth = auth_types
|
||||
descriptor.introduction_points_content = decoded_field
|
||||
|
||||
_parse_rendezvous_service_descriptor_line = _parse_simple_line('rendezvous-service-descriptor', 'descriptor_id')
|
||||
_parse_permanent_key_line = _parse_key_block('permanent-key', 'permanent_key', 'RSA PUBLIC KEY')
|
||||
|
|
@ -194,6 +195,7 @@ class HiddenServiceDescriptor(Descriptor):
|
|||
:var str introduction_points_encoded: raw introduction points blob
|
||||
:var list introduction_points_auth: **\*** tuples of the form
|
||||
(auth_method, auth_data) for our introduction_points_content
|
||||
(**deprecated**, always **[]**)
|
||||
:var bytes introduction_points_content: decoded introduction-points content
|
||||
without authentication data, if using cookie authentication this is
|
||||
encrypted
|
||||
|
|
@ -201,6 +203,14 @@ class HiddenServiceDescriptor(Descriptor):
|
|||
|
||||
**\*** attribute is either required when we're parsed with validation or has
|
||||
a default value, others are left as **None** if undefined
|
||||
|
||||
.. versionchanged:: 1.6.0
|
||||
Moved from the deprecated `pycrypto
|
||||
<https://www.dlitz.net/software/pycrypto/>`_ module to `cryptography
|
||||
<https://pypi.python.org/pypi/cryptography>`_ for validating signatures.
|
||||
|
||||
.. versionchanged:: 1.6.0
|
||||
Added the **skip_crypto_validation** constructor argument.
|
||||
"""
|
||||
|
||||
ATTRIBUTES = {
|
||||
|
|
@ -227,9 +237,30 @@ class HiddenServiceDescriptor(Descriptor):
|
|||
'signature': _parse_signature_line,
|
||||
}
|
||||
|
||||
def __init__(self, raw_contents, validate = False):
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False):
|
||||
if sign:
|
||||
raise NotImplementedError('Signing of %s not implemented' % cls.__name__)
|
||||
|
||||
return _descriptor_content(attr, exclude, (
|
||||
('rendezvous-service-descriptor', 'y3olqqblqw2gbh6phimfuiroechjjafa'),
|
||||
('version', '2'),
|
||||
('permanent-key', _random_crypto_blob('RSA PUBLIC KEY')),
|
||||
('secret-id-part', 'e24kgecavwsznj7gpbktqsiwgvngsf4e'),
|
||||
('publication-time', _random_date()),
|
||||
('protocol-versions', '2,3'),
|
||||
('introduction-points', '\n-----BEGIN MESSAGE-----\n-----END MESSAGE-----'),
|
||||
), (
|
||||
('signature', _random_crypto_blob('SIGNATURE')),
|
||||
))
|
||||
|
||||
@classmethod
|
||||
def create(cls, attr = None, exclude = (), validate = True, sign = False):
|
||||
return cls(cls.content(attr, exclude, sign), validate = validate, skip_crypto_validation = not sign)
|
||||
|
||||
def __init__(self, raw_contents, validate = False, skip_crypto_validation = False):
|
||||
super(HiddenServiceDescriptor, self).__init__(raw_contents, lazy_load = not validate)
|
||||
entries = _get_descriptor_components(raw_contents, validate)
|
||||
entries = _descriptor_components(raw_contents, validate, non_ascii_fields = ('introduction-points'))
|
||||
|
||||
if validate:
|
||||
for keyword in REQUIRED_FIELDS:
|
||||
|
|
@ -245,7 +276,7 @@ class HiddenServiceDescriptor(Descriptor):
|
|||
|
||||
self._parse(entries, validate)
|
||||
|
||||
if stem.prereq.is_crypto_available():
|
||||
if not skip_crypto_validation and stem.prereq.is_crypto_available():
|
||||
signed_digest = self._digest_for_signature(self.permanent_key, self.signature)
|
||||
content_digest = self._digest_for_content(b'rendezvous-service-descriptor ', b'\nsignature\n')
|
||||
|
||||
|
|
@ -257,21 +288,9 @@ class HiddenServiceDescriptor(Descriptor):
|
|||
@lru_cache()
|
||||
def introduction_points(self, authentication_cookie = None):
|
||||
"""
|
||||
Provided this service's introduction points. This provides a list of
|
||||
IntroductionPoint instances, which have the following attributes...
|
||||
Provided this service's introduction points.
|
||||
|
||||
* **identifier** (str): hash of this introduction point's identity key
|
||||
* **address** (str): address of this introduction point
|
||||
* **port** (int): port where this introduction point is listening
|
||||
* **onion_key** (str): public key for communicating with this introduction point
|
||||
* **service_key** (str): public key for communicating with this hidden service
|
||||
* **intro_authentication** (list): tuples of the form (auth_type, auth_data)
|
||||
for establishing a connection
|
||||
|
||||
:param str authentication_cookie: cookie to decrypt the introduction-points
|
||||
if it's encrypted
|
||||
|
||||
:returns: **list** of IntroductionPoints instances
|
||||
:returns: **list** of :class:`~stem.descriptor.hidden_service_descriptor.IntroductionPoints`
|
||||
|
||||
:raises:
|
||||
* **ValueError** if the our introduction-points is malformed
|
||||
|
|
@ -284,7 +303,7 @@ class HiddenServiceDescriptor(Descriptor):
|
|||
return []
|
||||
elif authentication_cookie:
|
||||
if not stem.prereq.is_crypto_available():
|
||||
raise DecryptionFailure('Decrypting introduction-points requires pycrypto')
|
||||
raise DecryptionFailure('Decrypting introduction-points requires the cryptography module')
|
||||
|
||||
try:
|
||||
missing_padding = len(authentication_cookie) % 4
|
||||
|
|
@ -310,9 +329,8 @@ class HiddenServiceDescriptor(Descriptor):
|
|||
|
||||
@staticmethod
|
||||
def _decrypt_basic_auth(content, authentication_cookie):
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Util import Counter
|
||||
from Crypto.Util.number import bytes_to_long
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
|
||||
try:
|
||||
client_blocks = int(binascii.hexlify(content[1:2]), 16)
|
||||
|
|
@ -336,15 +354,15 @@ class HiddenServiceDescriptor(Descriptor):
|
|||
|
||||
# try decrypting the session key
|
||||
|
||||
counter = Counter.new(128, initial_value = 0)
|
||||
cipher = AES.new(authentication_cookie, AES.MODE_CTR, counter = counter)
|
||||
session_key = cipher.decrypt(encrypted_session_key)
|
||||
cipher = Cipher(algorithms.AES(authentication_cookie), modes.CTR(b'\x00' * len(iv)), default_backend())
|
||||
decryptor = cipher.decryptor()
|
||||
session_key = decryptor.update(encrypted_session_key) + decryptor.finalize()
|
||||
|
||||
# attempt to decrypt the intro points with the session key
|
||||
|
||||
counter = Counter.new(128, initial_value = bytes_to_long(iv))
|
||||
cipher = AES.new(session_key, AES.MODE_CTR, counter = counter)
|
||||
decrypted = cipher.decrypt(encrypted)
|
||||
cipher = Cipher(algorithms.AES(session_key), modes.CTR(iv), default_backend())
|
||||
decryptor = cipher.decryptor()
|
||||
decrypted = decryptor.update(encrypted) + decryptor.finalize()
|
||||
|
||||
# check if the decryption looks correct
|
||||
|
||||
|
|
@ -355,22 +373,20 @@ class HiddenServiceDescriptor(Descriptor):
|
|||
|
||||
@staticmethod
|
||||
def _decrypt_stealth_auth(content, authentication_cookie):
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Util import Counter
|
||||
from Crypto.Util.number import bytes_to_long
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
|
||||
# byte 1 = authentication type, 2-17 = input vector, 18 on = encrypted content
|
||||
|
||||
iv, encrypted = content[1:17], content[17:]
|
||||
counter = Counter.new(128, initial_value = bytes_to_long(iv))
|
||||
cipher = AES.new(authentication_cookie, AES.MODE_CTR, counter = counter)
|
||||
cipher = Cipher(algorithms.AES(authentication_cookie), modes.CTR(iv), default_backend())
|
||||
decryptor = cipher.decryptor()
|
||||
|
||||
return cipher.decrypt(encrypted)
|
||||
return decryptor.update(encrypted) + decryptor.finalize()
|
||||
|
||||
@staticmethod
|
||||
def _parse_introduction_points(content):
|
||||
"""
|
||||
Provides the parsed list of IntroductionPoint for the unencrypted content.
|
||||
Provides the parsed list of IntroductionPoints for the unencrypted content.
|
||||
"""
|
||||
|
||||
introduction_points = []
|
||||
|
|
@ -383,7 +399,7 @@ class HiddenServiceDescriptor(Descriptor):
|
|||
break # reached the end
|
||||
|
||||
attr = dict(INTRODUCTION_POINTS_ATTR)
|
||||
entries = _get_descriptor_components(content, False)
|
||||
entries = _descriptor_components(content, False)
|
||||
|
||||
for keyword, values in list(entries.items()):
|
||||
value, block_type, block_contents = values[0]
|
||||
|
|
@ -417,6 +433,6 @@ class HiddenServiceDescriptor(Descriptor):
|
|||
auth_type, auth_data = auth_value.split(' ')[:2]
|
||||
auth_entries.append((auth_type, auth_data))
|
||||
|
||||
introduction_points.append(IntroductionPoint(**attr))
|
||||
introduction_points.append(IntroductionPoints(**attr))
|
||||
|
||||
return introduction_points
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright 2013-2015, Damian Johnson and The Tor Project
|
||||
# Copyright 2013-2018, Damian Johnson and The Tor Project
|
||||
# See LICENSE for licensing information
|
||||
|
||||
"""
|
||||
|
|
@ -19,7 +19,7 @@ corresponding router status entry. For added fun as of this writing the
|
|||
controller doesn't even surface those router status entries
|
||||
(:trac:`7953`).
|
||||
|
||||
For instance, here's an example that prints the nickname and fignerprints of
|
||||
For instance, here's an example that prints the nickname and fingerprints of
|
||||
the exit relays.
|
||||
|
||||
::
|
||||
|
|
@ -67,14 +67,18 @@ Doing the same is trivial with server descriptors...
|
|||
import hashlib
|
||||
|
||||
import stem.exit_policy
|
||||
import stem.prereq
|
||||
|
||||
from stem.descriptor import (
|
||||
Descriptor,
|
||||
_get_descriptor_components,
|
||||
_descriptor_content,
|
||||
_descriptor_components,
|
||||
_read_until_keywords,
|
||||
_value,
|
||||
_values,
|
||||
_parse_simple_line,
|
||||
_parse_protocol_line,
|
||||
_parse_key_block,
|
||||
_random_crypto_blob,
|
||||
)
|
||||
|
||||
from stem.descriptor.router_status_entry import (
|
||||
|
|
@ -82,10 +86,9 @@ from stem.descriptor.router_status_entry import (
|
|||
_parse_p_line,
|
||||
)
|
||||
|
||||
try:
|
||||
# added in python 3.2
|
||||
if stem.prereq._is_lru_cache_available():
|
||||
from functools import lru_cache
|
||||
except ImportError:
|
||||
else:
|
||||
from stem.util.lru_cache import lru_cache
|
||||
|
||||
REQUIRED_FIELDS = (
|
||||
|
|
@ -98,6 +101,7 @@ SINGLE_FIELDS = (
|
|||
'family',
|
||||
'p',
|
||||
'p6',
|
||||
'pr',
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -159,21 +163,35 @@ def _parse_file(descriptor_file, validate = False, **kwargs):
|
|||
|
||||
|
||||
def _parse_id_line(descriptor, entries):
|
||||
value = _value('id', entries)
|
||||
value_comp = value.split()
|
||||
identities = {}
|
||||
|
||||
if len(value_comp) >= 2:
|
||||
descriptor.identifier_type = value_comp[0]
|
||||
descriptor.identifier = value_comp[1]
|
||||
else:
|
||||
raise ValueError("'id' lines should contain both the key type and digest: id %s" % value)
|
||||
for entry in _values('id', entries):
|
||||
entry_comp = entry.split()
|
||||
|
||||
if len(entry_comp) >= 2:
|
||||
key_type, key_value = entry_comp[0], entry_comp[1]
|
||||
|
||||
if key_type in identities:
|
||||
raise ValueError("There can only be one 'id' line per a key type, but '%s' appeared multiple times" % key_type)
|
||||
|
||||
descriptor.identifier_type = key_type
|
||||
descriptor.identifier = key_value
|
||||
identities[key_type] = key_value
|
||||
else:
|
||||
raise ValueError("'id' lines should contain both the key type and digest: id %s" % entry)
|
||||
|
||||
descriptor.identifiers = identities
|
||||
|
||||
|
||||
def _parse_digest(descriptor, entries):
|
||||
setattr(descriptor, 'digest', hashlib.sha256(descriptor.get_bytes()).hexdigest().upper())
|
||||
|
||||
|
||||
_parse_digest = lambda descriptor, entries: setattr(descriptor, 'digest', hashlib.sha256(descriptor.get_bytes()).hexdigest().upper())
|
||||
_parse_onion_key_line = _parse_key_block('onion-key', 'onion_key', 'RSA PUBLIC KEY')
|
||||
_parse_ntor_onion_key_line = _parse_simple_line('ntor-onion-key', 'ntor_onion_key')
|
||||
_parse_family_line = lambda descriptor, entries: setattr(descriptor, 'family', _value('family', entries).split(' '))
|
||||
_parse_p6_line = lambda descriptor, entries: setattr(descriptor, 'exit_policy_v6', stem.exit_policy.MicroExitPolicy(_value('p6', entries)))
|
||||
_parse_family_line = _parse_simple_line('family', 'family', func = lambda v: v.split(' '))
|
||||
_parse_p6_line = _parse_simple_line('p6', 'exit_policy_v6', func = lambda v: stem.exit_policy.MicroExitPolicy(v))
|
||||
_parse_pr_line = _parse_protocol_line('pr', 'protocols')
|
||||
|
||||
|
||||
class Microdescriptor(Descriptor):
|
||||
|
|
@ -192,13 +210,27 @@ class Microdescriptor(Descriptor):
|
|||
:var list family: **\*** nicknames or fingerprints of declared family
|
||||
:var stem.exit_policy.MicroExitPolicy exit_policy: **\*** relay's exit policy
|
||||
:var stem.exit_policy.MicroExitPolicy exit_policy_v6: **\*** exit policy for IPv6
|
||||
:var str identifier_type: identity digest key type
|
||||
:var str identifier: base64 encoded identity digest, this is only used for collision prevention (:trac:`11743`)
|
||||
:var hash identifiers: mapping of key types (like rsa1024 or ed25519) to
|
||||
their base64 encoded identity, this is only used for collision prevention
|
||||
(:trac:`11743`)
|
||||
:var dict protocols: mapping of protocols to their supported versions
|
||||
|
||||
:var str identifier: base64 encoded identity digest (**deprecated**, use
|
||||
identifiers instead)
|
||||
:var str identifier_type: identity digest key type (**deprecated**, use
|
||||
identifiers instead)
|
||||
|
||||
**\*** attribute is required when we're parsed with validation
|
||||
|
||||
.. versionchanged:: 1.1.0
|
||||
Added the identifier and identifier_type attributes.
|
||||
|
||||
.. versionchanged:: 1.5.0
|
||||
Added the identifiers attribute, and deprecated identifier and
|
||||
identifier_type since the field can now appear multiple times.
|
||||
|
||||
.. versionchanged:: 1.6.0
|
||||
Added the protocols attribute.
|
||||
"""
|
||||
|
||||
ATTRIBUTES = {
|
||||
|
|
@ -208,8 +240,10 @@ class Microdescriptor(Descriptor):
|
|||
'family': ([], _parse_family_line),
|
||||
'exit_policy': (stem.exit_policy.MicroExitPolicy('reject 1-65535'), _parse_p_line),
|
||||
'exit_policy_v6': (None, _parse_p6_line),
|
||||
'identifier_type': (None, _parse_id_line),
|
||||
'identifier': (None, _parse_id_line),
|
||||
'identifier_type': (None, _parse_id_line), # deprecated in favor of identifiers
|
||||
'identifier': (None, _parse_id_line), # deprecated in favor of identifiers
|
||||
'identifiers': ({}, _parse_id_line),
|
||||
'protocols': ({}, _parse_pr_line),
|
||||
'digest': (None, _parse_digest),
|
||||
}
|
||||
|
||||
|
|
@ -220,13 +254,23 @@ class Microdescriptor(Descriptor):
|
|||
'family': _parse_family_line,
|
||||
'p': _parse_p_line,
|
||||
'p6': _parse_p6_line,
|
||||
'pr': _parse_pr_line,
|
||||
'id': _parse_id_line,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False):
|
||||
if sign:
|
||||
raise NotImplementedError('Signing of %s not implemented' % cls.__name__)
|
||||
|
||||
return _descriptor_content(attr, exclude, (
|
||||
('onion-key', _random_crypto_blob('RSA PUBLIC KEY')),
|
||||
))
|
||||
|
||||
def __init__(self, raw_contents, validate = False, annotations = None):
|
||||
super(Microdescriptor, self).__init__(raw_contents, lazy_load = not validate)
|
||||
self._annotation_lines = annotations if annotations else []
|
||||
entries = _get_descriptor_components(raw_contents, validate)
|
||||
entries = _descriptor_components(raw_contents, validate)
|
||||
|
||||
if validate:
|
||||
self.digest = hashlib.sha256(self.get_bytes()).hexdigest().upper()
|
||||
|
|
@ -307,6 +351,9 @@ class Microdescriptor(Descriptor):
|
|||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,14 +1,15 @@
|
|||
# Copyright 2012-2015, Damian Johnson and The Tor Project
|
||||
# Copyright 2012-2018, Damian Johnson and The Tor Project
|
||||
# See LICENSE for licensing information
|
||||
|
||||
"""
|
||||
Parsing for Tor network status documents. This supports both the v2 and v3
|
||||
dir-spec. Documents can be obtained from a few sources...
|
||||
`dir-spec <https://gitweb.torproject.org/torspec.git/tree/dir-spec.txt>`_.
|
||||
Documents can be obtained from a few sources...
|
||||
|
||||
* The 'cached-consensus' file in Tor's data directory.
|
||||
|
||||
* Archived descriptors provided by CollecTor
|
||||
(https://collector.torproject.org/).
|
||||
* Archived descriptors provided by `CollecTor
|
||||
<https://metrics.torproject.org/collector.html>`_.
|
||||
|
||||
* Directory authorities and mirrors via their DirPort.
|
||||
|
||||
|
|
@ -19,6 +20,10 @@ dir-spec. Documents can be obtained from a few sources...
|
|||
* list of :class:`stem.descriptor.router_status_entry.RouterStatusEntry`
|
||||
* document footer
|
||||
|
||||
**For a great graphical overview see** `Jordan Wright's chart describing the
|
||||
anatomy of the consensus
|
||||
<https://jordan-wright.github.io/images/blog/how_tor_works/consensus.png>`_.
|
||||
|
||||
Of these, the router status entry section can be quite large (on the order of
|
||||
hundreds of kilobytes). As such we provide a couple of methods for reading
|
||||
network status documents through :func:`~stem.descriptor.__init__.parse_file`.
|
||||
|
|
@ -47,16 +52,6 @@ For more information see :func:`~stem.descriptor.__init__.DocumentHandler`...
|
|||
KeyCertificate - Certificate used to authenticate an authority
|
||||
DocumentSignature - Signature of a document by a directory authority
|
||||
DirectoryAuthority - Directory authority as defined in a v3 network status document
|
||||
|
||||
|
||||
.. data:: PackageVersion
|
||||
|
||||
Latest recommended version of a package that's available.
|
||||
|
||||
:var str name: name of the package
|
||||
:var str version: latest recommended version
|
||||
:var str url: package's url
|
||||
:var dict digests: mapping of digest types to their value
|
||||
"""
|
||||
|
||||
import collections
|
||||
|
|
@ -71,13 +66,22 @@ from stem.descriptor import (
|
|||
PGP_BLOCK_END,
|
||||
Descriptor,
|
||||
DocumentHandler,
|
||||
_get_descriptor_components,
|
||||
_descriptor_content,
|
||||
_descriptor_components,
|
||||
_read_until_keywords,
|
||||
_value,
|
||||
_parse_simple_line,
|
||||
_parse_if_present,
|
||||
_parse_timestamp_line,
|
||||
_parse_forty_character_hex,
|
||||
_parse_protocol_line,
|
||||
_parse_key_block,
|
||||
_mappings_for,
|
||||
_random_nickname,
|
||||
_random_fingerprint,
|
||||
_random_ipv4_address,
|
||||
_random_date,
|
||||
_random_crypto_blob,
|
||||
)
|
||||
|
||||
from stem.descriptor.router_status_entry import (
|
||||
|
|
@ -86,13 +90,6 @@ from stem.descriptor.router_status_entry import (
|
|||
RouterStatusEntryMicroV3,
|
||||
)
|
||||
|
||||
PackageVersion = collections.namedtuple('PackageVersion', [
|
||||
'name',
|
||||
'version',
|
||||
'url',
|
||||
'digests',
|
||||
])
|
||||
|
||||
# Version 2 network status document fields, tuples of the form...
|
||||
# (keyword, is_mandatory)
|
||||
|
||||
|
|
@ -130,6 +127,15 @@ HEADER_STATUS_DOCUMENT_FIELDS = (
|
|||
('package', True, True, False),
|
||||
('known-flags', True, True, True),
|
||||
('flag-thresholds', True, False, False),
|
||||
('shared-rand-participate', True, False, False),
|
||||
('shared-rand-commit', True, False, False),
|
||||
('shared-rand-previous-value', True, True, False),
|
||||
('shared-rand-current-value', True, True, False),
|
||||
('bandwidth-file-headers', True, False, False),
|
||||
('recommended-client-protocols', True, True, False),
|
||||
('recommended-relay-protocols', True, True, False),
|
||||
('required-client-protocols', True, True, False),
|
||||
('required-relay-protocols', True, True, False),
|
||||
('params', True, True, False),
|
||||
)
|
||||
|
||||
|
|
@ -139,9 +145,6 @@ FOOTER_STATUS_DOCUMENT_FIELDS = (
|
|||
('directory-signature', True, True, True),
|
||||
)
|
||||
|
||||
HEADER_FIELDS = [attr[0] for attr in HEADER_STATUS_DOCUMENT_FIELDS]
|
||||
FOOTER_FIELDS = [attr[0] for attr in FOOTER_STATUS_DOCUMENT_FIELDS]
|
||||
|
||||
AUTH_START = 'dir-source'
|
||||
ROUTERS_START = 'r'
|
||||
FOOTER_START = 'directory-footer'
|
||||
|
|
@ -159,8 +162,17 @@ DEFAULT_PARAMS = {
|
|||
'cbttestfreq': 60,
|
||||
'cbtmintimeout': 2000,
|
||||
'cbtinitialtimeout': 60000,
|
||||
'cbtlearntimeout': 180,
|
||||
'cbtmaxopencircs': 10,
|
||||
'UseOptimisticData': 1,
|
||||
'Support022HiddenServices': 1,
|
||||
'usecreatefast': 1,
|
||||
'max-consensuses-age-to-cache-for-diff': 72,
|
||||
'try-diff-for-consensus-newer-than': 72,
|
||||
'onion-key-rotation-days': 28,
|
||||
'onion-key-grace-period-days': 7,
|
||||
'hs_service_max_rdv_failures': 2,
|
||||
'circ_max_cell_queue_size': 50000,
|
||||
}
|
||||
|
||||
# KeyCertificate fields, tuple is of the form...
|
||||
|
|
@ -197,6 +209,8 @@ PARAM_RANGE = {
|
|||
'cbtclosequantile': (MIN_PARAM, 99),
|
||||
'cbttestfreq': (1, MAX_PARAM),
|
||||
'cbtmintimeout': (500, MAX_PARAM),
|
||||
'cbtlearntimeout': (10, 60000),
|
||||
'cbtmaxopencircs': (0, 14),
|
||||
'UseOptimisticData': (0, 1),
|
||||
'Support022HiddenServices': (0, 1),
|
||||
'usecreatefast': (0, 1),
|
||||
|
|
@ -207,9 +221,40 @@ PARAM_RANGE = {
|
|||
'GuardLifetime': (2592000, 157766400), # min: 30 days, max: 1826 days
|
||||
'NumNTorsPerTAP': (1, 100000),
|
||||
'AllowNonearlyExtend': (0, 1),
|
||||
'AuthDirNumSRVAgreements': (1, MAX_PARAM),
|
||||
'max-consensuses-age-to-cache-for-diff': (0, 8192),
|
||||
'try-diff-for-consensus-newer-than': (0, 8192),
|
||||
'onion-key-rotation-days': (1, 90),
|
||||
'onion-key-grace-period-days': (1, 90), # max is the highest onion-key-rotation-days
|
||||
'hs_service_max_rdv_failures': (1, 10),
|
||||
'circ_max_cell_queue_size': (1000, 4294967295),
|
||||
}
|
||||
|
||||
|
||||
class PackageVersion(collections.namedtuple('PackageVersion', ['name', 'version', 'url', 'digests'])):
|
||||
"""
|
||||
Latest recommended version of a package that's available.
|
||||
|
||||
:var str name: name of the package
|
||||
:var str version: latest recommended version
|
||||
:var str url: package's url
|
||||
:var dict digests: mapping of digest types to their value
|
||||
"""
|
||||
|
||||
|
||||
class SharedRandomnessCommitment(collections.namedtuple('SharedRandomnessCommitment', ['version', 'algorithm', 'identity', 'commit', 'reveal'])):
|
||||
"""
|
||||
Directory authority's commitment for generating the next shared random value.
|
||||
|
||||
:var int version: shared randomness protocol version
|
||||
:var str algorithm: hash algorithm used to make the commitment
|
||||
:var str identity: authority's sha1 identity fingerprint
|
||||
:var str commit: base64 encoded commitment hash to the shared random value
|
||||
:var str reveal: base64 encoded commitment to the shared random value,
|
||||
**None** of not provided
|
||||
"""
|
||||
|
||||
|
||||
def _parse_file(document_file, document_type = None, validate = False, is_microdescriptor = False, document_handler = DocumentHandler.ENTRIES, **kwargs):
|
||||
"""
|
||||
Parses a network status and iterates over the RouterStatusEntry in it. The
|
||||
|
|
@ -361,10 +406,10 @@ _parse_network_status_version_line = _parse_version_line('network-status-version
|
|||
_parse_fingerprint_line = _parse_forty_character_hex('fingerprint', 'fingerprint')
|
||||
_parse_contact_line = _parse_simple_line('contact', 'contact')
|
||||
_parse_dir_signing_key_line = _parse_key_block('dir-signing-key', 'signing_key', 'RSA PUBLIC KEY')
|
||||
_parse_client_versions_line = lambda descriptor, entries: setattr(descriptor, 'client_versions', _value('client-versions', entries).split(','))
|
||||
_parse_server_versions_line = lambda descriptor, entries: setattr(descriptor, 'server_versions', _value('server-versions', entries).split(','))
|
||||
_parse_client_versions_line = _parse_simple_line('client-versions', 'client_versions', func = lambda v: v.split(','))
|
||||
_parse_server_versions_line = _parse_simple_line('server-versions', 'server_versions', func = lambda v: v.split(','))
|
||||
_parse_published_line = _parse_timestamp_line('published', 'published')
|
||||
_parse_dir_options_line = lambda descriptor, entries: setattr(descriptor, 'options', _value('dir-options', entries).split())
|
||||
_parse_dir_options_line = _parse_simple_line('dir-options', 'options', func = lambda v: v.split())
|
||||
_parse_directory_signature_line = _parse_key_block('directory-signature', 'signature', 'SIGNATURE', value_attribute = 'signing_authority')
|
||||
|
||||
|
||||
|
|
@ -428,6 +473,22 @@ class NetworkStatusDocumentV2(NetworkStatusDocument):
|
|||
'directory-signature': _parse_directory_signature_line,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False):
|
||||
if sign:
|
||||
raise NotImplementedError('Signing of %s not implemented' % cls.__name__)
|
||||
|
||||
return _descriptor_content(attr, exclude, (
|
||||
('network-status-version', '2'),
|
||||
('dir-source', '%s %s 80' % (_random_ipv4_address(), _random_ipv4_address())),
|
||||
('fingerprint', _random_fingerprint()),
|
||||
('contact', 'arma at mit dot edu'),
|
||||
('published', _random_date()),
|
||||
('dir-signing-key', _random_crypto_blob('RSA PUBLIC KEY')),
|
||||
), (
|
||||
('directory-signature', 'moria2' + _random_crypto_blob('SIGNATURE')),
|
||||
))
|
||||
|
||||
def __init__(self, raw_content, validate = False):
|
||||
super(NetworkStatusDocumentV2, self).__init__(raw_content, lazy_load = not validate)
|
||||
|
||||
|
|
@ -450,7 +511,7 @@ class NetworkStatusDocumentV2(NetworkStatusDocument):
|
|||
|
||||
self.routers = dict((desc.fingerprint, desc) for desc in router_iter)
|
||||
|
||||
entries = _get_descriptor_components(document_content + b'\n' + document_file.read(), validate)
|
||||
entries = _descriptor_components(document_content + b'\n' + document_file.read(), validate)
|
||||
|
||||
if validate:
|
||||
self._check_constraints(entries)
|
||||
|
|
@ -582,26 +643,20 @@ def _parse_header_flag_thresholds_line(descriptor, entries):
|
|||
|
||||
value, thresholds = _value('flag-thresholds', entries).strip(), {}
|
||||
|
||||
if value:
|
||||
for entry in value.split(' '):
|
||||
if '=' not in entry:
|
||||
raise ValueError("Network status document's 'flag-thresholds' line is expected to be space separated key=value mappings, got: flag-thresholds %s" % value)
|
||||
for key, val in _mappings_for('flag-thresholds', value):
|
||||
try:
|
||||
if val.endswith('%'):
|
||||
# opting for string manipulation rather than just
|
||||
# 'float(entry_value) / 100' because floating point arithmetic
|
||||
# will lose precision
|
||||
|
||||
entry_key, entry_value = entry.split('=', 1)
|
||||
|
||||
try:
|
||||
if entry_value.endswith('%'):
|
||||
# opting for string manipulation rather than just
|
||||
# 'float(entry_value) / 100' because floating point arithmetic
|
||||
# will lose precision
|
||||
|
||||
thresholds[entry_key] = float('0.' + entry_value[:-1].replace('.', '', 1))
|
||||
elif '.' in entry_value:
|
||||
thresholds[entry_key] = float(entry_value)
|
||||
else:
|
||||
thresholds[entry_key] = int(entry_value)
|
||||
except ValueError:
|
||||
raise ValueError("Network status document's 'flag-thresholds' line is expected to have float values, got: flag-thresholds %s" % value)
|
||||
thresholds[key] = float('0.' + val[:-1].replace('.', '', 1))
|
||||
elif '.' in val:
|
||||
thresholds[key] = float(val)
|
||||
else:
|
||||
thresholds[key] = int(val)
|
||||
except ValueError:
|
||||
raise ValueError("Network status document's 'flag-thresholds' line is expected to have float values, got: flag-thresholds %s" % value)
|
||||
|
||||
descriptor.flag_thresholds = thresholds
|
||||
|
||||
|
|
@ -617,11 +672,6 @@ def _parse_header_parameters_line(descriptor, entries):
|
|||
|
||||
value = _value('params', entries)
|
||||
|
||||
# should only appear in consensus-method 7 or later
|
||||
|
||||
if not descriptor.meets_consensus_method(7):
|
||||
raise ValueError("A network status document's 'params' line should only appear in consensus-method 7 or later")
|
||||
|
||||
if value != '':
|
||||
descriptor.params = _parse_int_mappings('params', value, True)
|
||||
descriptor._check_params_constraints()
|
||||
|
|
@ -661,7 +711,7 @@ def _parse_package_line(descriptor, entries):
|
|||
package_versions = []
|
||||
|
||||
for value, _, _ in entries['package']:
|
||||
value_comp = value.split()
|
||||
value_comp = value.split(' ', 3)
|
||||
|
||||
if len(value_comp) < 3:
|
||||
raise ValueError("'package' must at least have a 'PackageName Version URL': %s" % value)
|
||||
|
|
@ -669,33 +719,98 @@ def _parse_package_line(descriptor, entries):
|
|||
name, version, url = value_comp[:3]
|
||||
digests = {}
|
||||
|
||||
for digest_entry in value_comp[3:]:
|
||||
if '=' not in digest_entry:
|
||||
raise ValueError("'package' digest entries should be 'key=value' pairs: %s" % value)
|
||||
|
||||
key, value = digest_entry.split('=', 1)
|
||||
digests[key] = value
|
||||
if len(value_comp) == 4:
|
||||
for key, val in _mappings_for('package', value_comp[3]):
|
||||
digests[key] = val
|
||||
|
||||
package_versions.append(PackageVersion(name, version, url, digests))
|
||||
|
||||
descriptor.packages = package_versions
|
||||
|
||||
|
||||
def _parsed_shared_rand_commit(descriptor, entries):
|
||||
# "shared-rand-commit" Version AlgName Identity Commit [Reveal]
|
||||
|
||||
commitments = []
|
||||
|
||||
for value, _, _ in entries['shared-rand-commit']:
|
||||
value_comp = value.split()
|
||||
|
||||
if len(value_comp) < 4:
|
||||
raise ValueError("'shared-rand-commit' must at least have a 'Version AlgName Identity Commit': %s" % value)
|
||||
|
||||
version, algorithm, identity, commit = value_comp[:4]
|
||||
reveal = value_comp[4] if len(value_comp) >= 5 else None
|
||||
|
||||
if not version.isdigit():
|
||||
raise ValueError("The version on our 'shared-rand-commit' line wasn't an integer: %s" % value)
|
||||
|
||||
commitments.append(SharedRandomnessCommitment(int(version), algorithm, identity, commit, reveal))
|
||||
|
||||
descriptor.shared_randomness_commitments = commitments
|
||||
|
||||
|
||||
def _parse_shared_rand_previous_value(descriptor, entries):
|
||||
# "shared-rand-previous-value" NumReveals Value
|
||||
|
||||
value = _value('shared-rand-previous-value', entries)
|
||||
value_comp = value.split(' ')
|
||||
|
||||
if len(value_comp) == 2 and value_comp[0].isdigit():
|
||||
descriptor.shared_randomness_previous_reveal_count = int(value_comp[0])
|
||||
descriptor.shared_randomness_previous_value = value_comp[1]
|
||||
else:
|
||||
raise ValueError("A network status document's 'shared-rand-previous-value' line must be a pair of values, the first an integer but was '%s'" % value)
|
||||
|
||||
|
||||
def _parse_shared_rand_current_value(descriptor, entries):
|
||||
# "shared-rand-current-value" NumReveals Value
|
||||
|
||||
value = _value('shared-rand-current-value', entries)
|
||||
value_comp = value.split(' ')
|
||||
|
||||
if len(value_comp) == 2 and value_comp[0].isdigit():
|
||||
descriptor.shared_randomness_current_reveal_count = int(value_comp[0])
|
||||
descriptor.shared_randomness_current_value = value_comp[1]
|
||||
else:
|
||||
raise ValueError("A network status document's 'shared-rand-current-value' line must be a pair of values, the first an integer but was '%s'" % value)
|
||||
|
||||
|
||||
def _parse_bandwidth_file_headers(descriptor, entries):
|
||||
# "bandwidth-file-headers" KeyValues
|
||||
# KeyValues ::= "" | KeyValue | KeyValues SP KeyValue
|
||||
# KeyValue ::= Keyword '=' Value
|
||||
# Value ::= ArgumentChar+
|
||||
|
||||
value = _value('bandwidth-file-headers', entries)
|
||||
results = {}
|
||||
|
||||
for key, val in _mappings_for('bandwidth-file-headers', value):
|
||||
results[key] = val
|
||||
|
||||
descriptor.bandwidth_file_headers = results
|
||||
|
||||
|
||||
_parse_header_valid_after_line = _parse_timestamp_line('valid-after', 'valid_after')
|
||||
_parse_header_fresh_until_line = _parse_timestamp_line('fresh-until', 'fresh_until')
|
||||
_parse_header_valid_until_line = _parse_timestamp_line('valid-until', 'valid_until')
|
||||
_parse_header_client_versions_line = _parse_versions_line('client-versions', 'client_versions')
|
||||
_parse_header_server_versions_line = _parse_versions_line('server-versions', 'server_versions')
|
||||
_parse_header_known_flags_line = lambda descriptor, entries: setattr(descriptor, 'known_flags', [entry for entry in _value('known-flags', entries).split(' ') if entry])
|
||||
_parse_footer_bandwidth_weights_line = lambda descriptor, entries: setattr(descriptor, 'bandwidth_weights', _parse_int_mappings('bandwidth-weights', _value('bandwidth-weights', entries), True))
|
||||
_parse_header_known_flags_line = _parse_simple_line('known-flags', 'known_flags', func = lambda v: [entry for entry in v.split(' ') if entry])
|
||||
_parse_footer_bandwidth_weights_line = _parse_simple_line('bandwidth-weights', 'bandwidth_weights', func = lambda v: _parse_int_mappings('bandwidth-weights', v, True))
|
||||
_parse_shared_rand_participate_line = _parse_if_present('shared-rand-participate', 'is_shared_randomness_participate')
|
||||
_parse_recommended_client_protocols_line = _parse_protocol_line('recommended-client-protocols', 'recommended_client_protocols')
|
||||
_parse_recommended_relay_protocols_line = _parse_protocol_line('recommended-relay-protocols', 'recommended_relay_protocols')
|
||||
_parse_required_client_protocols_line = _parse_protocol_line('required-client-protocols', 'required_client_protocols')
|
||||
_parse_required_relay_protocols_line = _parse_protocol_line('required-relay-protocols', 'required_relay_protocols')
|
||||
|
||||
|
||||
class NetworkStatusDocumentV3(NetworkStatusDocument):
|
||||
"""
|
||||
Version 3 network status document. This could be either a vote or consensus.
|
||||
|
||||
:var tuple routers: :class:`~stem.descriptor.router_status_entry.RouterStatusEntryV3`
|
||||
contained in the document
|
||||
:var dict routers: fingerprint to :class:`~stem.descriptor.router_status_entry.RouterStatusEntryV3`
|
||||
mapping for relays contained in the document
|
||||
|
||||
:var int version: **\*** document version
|
||||
:var str version_flavor: **\*** flavor associated with the document (such as 'microdesc')
|
||||
|
|
@ -725,17 +840,59 @@ class NetworkStatusDocumentV3(NetworkStatusDocument):
|
|||
:var int consensus_method: method version used to generate this consensus
|
||||
:var dict bandwidth_weights: dict of weight(str) => value(int) mappings
|
||||
|
||||
:var int shared_randomness_current_reveal_count: number of commitments
|
||||
used to generate the current shared random value
|
||||
:var str shared_randomness_current_value: base64 encoded current shared
|
||||
random value
|
||||
|
||||
:var int shared_randomness_previous_reveal_count: number of commitments
|
||||
used to generate the last shared random value
|
||||
:var str shared_randomness_previous_value: base64 encoded last shared random
|
||||
value
|
||||
|
||||
**Vote Attributes:**
|
||||
|
||||
:var list consensus_methods: list of ints for the supported method versions
|
||||
:var datetime published: time when the document was published
|
||||
:var dict flag_thresholds: **\*** mapping of internal performance thresholds used while making the vote, values are **ints** or **floats**
|
||||
|
||||
:var dict recommended_client_protocols: recommended protocols for clients
|
||||
:var dict recommended_relay_protocols: recommended protocols for relays
|
||||
:var dict required_client_protocols: required protocols for clients
|
||||
:var dict required_relay_protocols: required protocols for relays
|
||||
:var dict bandwidth_file_headers: headers from the bandwidth authority that
|
||||
generated this vote
|
||||
|
||||
**\*** attribute is either required when we're parsed with validation or has
|
||||
a default value, others are left as None if undefined
|
||||
|
||||
.. versionchanged:: 1.4.0
|
||||
Added the packages attribute.
|
||||
|
||||
.. versionchanged:: 1.5.0
|
||||
Added the is_shared_randomness_participate, shared_randomness_commitments,
|
||||
shared_randomness_previous_reveal_count,
|
||||
shared_randomness_previous_value,
|
||||
shared_randomness_current_reveal_count, and
|
||||
shared_randomness_current_value attributes.
|
||||
|
||||
.. versionchanged:: 1.6.0
|
||||
Added the recommended_client_protocols, recommended_relay_protocols,
|
||||
required_client_protocols, and required_relay_protocols attributes.
|
||||
|
||||
.. versionchanged:: 1.6.0
|
||||
The is_shared_randomness_participate and shared_randomness_commitments
|
||||
were misdocumented in the tor spec and as such never set. They're now an
|
||||
attribute of votes in the **directory_authorities**.
|
||||
|
||||
.. versionchanged:: 1.7.0
|
||||
The shared_randomness_current_reveal_count and
|
||||
shared_randomness_previous_reveal_count attributes were undocumented and
|
||||
not provided properly if retrieved before their shred_randomness_*_value
|
||||
counterpart.
|
||||
|
||||
.. versionchanged:: 1.7.0
|
||||
Added the bandwidth_file_headers attributbute.
|
||||
"""
|
||||
|
||||
ATTRIBUTES = {
|
||||
|
|
@ -757,7 +914,16 @@ class NetworkStatusDocumentV3(NetworkStatusDocument):
|
|||
'packages': ([], _parse_package_line),
|
||||
'known_flags': ([], _parse_header_known_flags_line),
|
||||
'flag_thresholds': ({}, _parse_header_flag_thresholds_line),
|
||||
'recommended_client_protocols': ({}, _parse_recommended_client_protocols_line),
|
||||
'recommended_relay_protocols': ({}, _parse_recommended_relay_protocols_line),
|
||||
'required_client_protocols': ({}, _parse_required_client_protocols_line),
|
||||
'required_relay_protocols': ({}, _parse_required_relay_protocols_line),
|
||||
'params': ({}, _parse_header_parameters_line),
|
||||
'shared_randomness_previous_reveal_count': (None, _parse_shared_rand_previous_value),
|
||||
'shared_randomness_previous_value': (None, _parse_shared_rand_previous_value),
|
||||
'shared_randomness_current_reveal_count': (None, _parse_shared_rand_current_value),
|
||||
'shared_randomness_current_value': (None, _parse_shared_rand_current_value),
|
||||
'bandwidth_file_headers': ({}, _parse_bandwidth_file_headers),
|
||||
|
||||
'signatures': ([], _parse_footer_directory_signature_line),
|
||||
'bandwidth_weights': ({}, _parse_footer_bandwidth_weights_line),
|
||||
|
|
@ -778,7 +944,14 @@ class NetworkStatusDocumentV3(NetworkStatusDocument):
|
|||
'package': _parse_package_line,
|
||||
'known-flags': _parse_header_known_flags_line,
|
||||
'flag-thresholds': _parse_header_flag_thresholds_line,
|
||||
'recommended-client-protocols': _parse_recommended_client_protocols_line,
|
||||
'recommended-relay-protocols': _parse_recommended_relay_protocols_line,
|
||||
'required-client-protocols': _parse_required_client_protocols_line,
|
||||
'required-relay-protocols': _parse_required_relay_protocols_line,
|
||||
'params': _parse_header_parameters_line,
|
||||
'shared-rand-previous-value': _parse_shared_rand_previous_value,
|
||||
'shared-rand-current-value': _parse_shared_rand_current_value,
|
||||
'bandwidth-file-headers': _parse_bandwidth_file_headers,
|
||||
}
|
||||
|
||||
FOOTER_PARSER_FOR_LINE = {
|
||||
|
|
@ -787,6 +960,85 @@ class NetworkStatusDocumentV3(NetworkStatusDocument):
|
|||
'directory-signature': _parse_footer_directory_signature_line,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False, authorities = None, routers = None):
|
||||
if sign:
|
||||
raise NotImplementedError('Signing of %s not implemented' % cls.__name__)
|
||||
|
||||
attr = {} if attr is None else dict(attr)
|
||||
is_vote = attr.get('vote-status') == 'vote'
|
||||
|
||||
if is_vote:
|
||||
extra_defaults = {'consensus-methods': '1 9', 'published': _random_date()}
|
||||
else:
|
||||
extra_defaults = {'consensus-method': '9'}
|
||||
|
||||
if is_vote and authorities is None:
|
||||
authorities = [DirectoryAuthority.create(is_vote = is_vote)]
|
||||
|
||||
for k, v in extra_defaults.items():
|
||||
if exclude and k in exclude:
|
||||
continue # explicitly excluding this field
|
||||
elif k not in attr:
|
||||
attr[k] = v
|
||||
|
||||
desc_content = _descriptor_content(attr, exclude, (
|
||||
('network-status-version', '3'),
|
||||
('vote-status', 'consensus'),
|
||||
('consensus-methods', None),
|
||||
('consensus-method', None),
|
||||
('published', None),
|
||||
('valid-after', _random_date()),
|
||||
('fresh-until', _random_date()),
|
||||
('valid-until', _random_date()),
|
||||
('voting-delay', '300 300'),
|
||||
('client-versions', None),
|
||||
('server-versions', None),
|
||||
('package', None),
|
||||
('known-flags', 'Authority BadExit Exit Fast Guard HSDir Named Running Stable Unnamed V2Dir Valid'),
|
||||
('params', None),
|
||||
), (
|
||||
('directory-footer', ''),
|
||||
('bandwidth-weights', None),
|
||||
('directory-signature', '%s %s%s' % (_random_fingerprint(), _random_fingerprint(), _random_crypto_blob('SIGNATURE'))),
|
||||
))
|
||||
|
||||
# inject the authorities and/or routers between the header and footer
|
||||
|
||||
if authorities:
|
||||
if b'directory-footer' in desc_content:
|
||||
footer_div = desc_content.find(b'\ndirectory-footer') + 1
|
||||
elif b'directory-signature' in desc_content:
|
||||
footer_div = desc_content.find(b'\ndirectory-signature') + 1
|
||||
else:
|
||||
if routers:
|
||||
desc_content += b'\n'
|
||||
|
||||
footer_div = len(desc_content) + 1
|
||||
|
||||
authority_content = stem.util.str_tools._to_bytes('\n'.join([str(a) for a in authorities]) + '\n')
|
||||
desc_content = desc_content[:footer_div] + authority_content + desc_content[footer_div:]
|
||||
|
||||
if routers:
|
||||
if b'directory-footer' in desc_content:
|
||||
footer_div = desc_content.find(b'\ndirectory-footer') + 1
|
||||
elif b'directory-signature' in desc_content:
|
||||
footer_div = desc_content.find(b'\ndirectory-signature') + 1
|
||||
else:
|
||||
if routers:
|
||||
desc_content += b'\n'
|
||||
|
||||
footer_div = len(desc_content) + 1
|
||||
|
||||
router_content = stem.util.str_tools._to_bytes('\n'.join([str(r) for r in routers]) + '\n')
|
||||
desc_content = desc_content[:footer_div] + router_content + desc_content[footer_div:]
|
||||
|
||||
return desc_content
|
||||
|
||||
@classmethod
|
||||
def create(cls, attr = None, exclude = (), validate = True, sign = False, authorities = None, routers = None):
|
||||
return cls(cls.content(attr, exclude, sign, authorities, routers), validate = validate)
|
||||
|
||||
def __init__(self, raw_content, validate = False, default_params = True):
|
||||
"""
|
||||
Parse a v3 network status document.
|
||||
|
|
@ -802,6 +1054,13 @@ class NetworkStatusDocumentV3(NetworkStatusDocument):
|
|||
super(NetworkStatusDocumentV3, self).__init__(raw_content, lazy_load = not validate)
|
||||
document_file = io.BytesIO(raw_content)
|
||||
|
||||
# TODO: Tor misdocumented these as being in the header rather than the
|
||||
# authority section. As such these have never been set but we need the
|
||||
# attributes for stem 1.5 compatability. Drop these in 2.0.
|
||||
|
||||
self.is_shared_randomness_participate = False
|
||||
self.shared_randomness_commitments = []
|
||||
|
||||
self._default_params = default_params
|
||||
self._header(document_file, validate)
|
||||
|
||||
|
|
@ -829,6 +1088,39 @@ class NetworkStatusDocumentV3(NetworkStatusDocument):
|
|||
self.routers = dict((desc.fingerprint, desc) for desc in router_iter)
|
||||
self._footer(document_file, validate)
|
||||
|
||||
def validate_signatures(self, key_certs):
|
||||
"""
|
||||
Validates we're properly signed by the signing certificates.
|
||||
|
||||
.. versionadded:: 1.6.0
|
||||
|
||||
:param list key_certs: :class:`~stem.descriptor.networkstatus.KeyCertificates`
|
||||
to validate the consensus against
|
||||
|
||||
:raises: **ValueError** if an insufficient number of valid signatures are present.
|
||||
"""
|
||||
|
||||
# sha1 hash of the body and header
|
||||
|
||||
local_digest = self._digest_for_content(b'network-status-version', b'directory-signature ')
|
||||
|
||||
valid_digests, total_digests = 0, 0
|
||||
required_digests = len(self.signatures) / 2.0
|
||||
signing_keys = dict([(cert.fingerprint, cert.signing_key) for cert in key_certs])
|
||||
|
||||
for sig in self.signatures:
|
||||
if sig.identity not in signing_keys:
|
||||
continue
|
||||
|
||||
signed_digest = self._digest_for_signature(signing_keys[sig.identity], sig.signature)
|
||||
total_digests += 1
|
||||
|
||||
if signed_digest == local_digest:
|
||||
valid_digests += 1
|
||||
|
||||
if valid_digests < required_digests:
|
||||
raise ValueError('Network Status Document has %i valid signatures out of %i total, needed %i' % (valid_digests, total_digests, required_digests))
|
||||
|
||||
def get_unrecognized_lines(self):
|
||||
if self._lazy_loading:
|
||||
self._parse(self._header_entries, False, parser_for_line = self.HEADER_PARSER_FOR_LINE)
|
||||
|
|
@ -863,13 +1155,14 @@ class NetworkStatusDocumentV3(NetworkStatusDocument):
|
|||
|
||||
def _header(self, document_file, validate):
|
||||
content = bytes.join(b'', _read_until_keywords((AUTH_START, ROUTERS_START, FOOTER_START), document_file))
|
||||
entries = _get_descriptor_components(content, validate)
|
||||
entries = _descriptor_components(content, validate)
|
||||
header_fields = [attr[0] for attr in HEADER_STATUS_DOCUMENT_FIELDS]
|
||||
|
||||
if validate:
|
||||
# all known header fields can only appear once except
|
||||
|
||||
for keyword, values in list(entries.items()):
|
||||
if len(values) > 1 and keyword in HEADER_FIELDS and keyword != 'package':
|
||||
if len(values) > 1 and keyword in header_fields and keyword != 'package' and keyword != 'shared-rand-commit':
|
||||
raise ValueError("Network status documents can only have a single '%s' line, got %i" % (keyword, len(values)))
|
||||
|
||||
if self._default_params:
|
||||
|
|
@ -877,8 +1170,12 @@ class NetworkStatusDocumentV3(NetworkStatusDocument):
|
|||
|
||||
self._parse(entries, validate, parser_for_line = self.HEADER_PARSER_FOR_LINE)
|
||||
|
||||
# should only appear in consensus-method 7 or later
|
||||
|
||||
if not self.meets_consensus_method(7) and 'params' in list(entries.keys()):
|
||||
raise ValueError("A network status document's 'params' line should only appear in consensus-method 7 or later")
|
||||
|
||||
_check_for_missing_and_disallowed_fields(self, entries, HEADER_STATUS_DOCUMENT_FIELDS)
|
||||
_check_for_misordered_fields(entries, HEADER_FIELDS)
|
||||
|
||||
# default consensus_method and consensus_methods based on if we're a consensus or vote
|
||||
|
||||
|
|
@ -891,14 +1188,15 @@ class NetworkStatusDocumentV3(NetworkStatusDocument):
|
|||
self._entries.update(entries)
|
||||
|
||||
def _footer(self, document_file, validate):
|
||||
entries = _get_descriptor_components(document_file.read(), validate)
|
||||
entries = _descriptor_components(document_file.read(), validate)
|
||||
footer_fields = [attr[0] for attr in FOOTER_STATUS_DOCUMENT_FIELDS]
|
||||
|
||||
if validate:
|
||||
for keyword, values in list(entries.items()):
|
||||
# all known footer fields can only appear once except...
|
||||
# * 'directory-signature' in a consensus
|
||||
|
||||
if len(values) > 1 and keyword in FOOTER_FIELDS:
|
||||
if len(values) > 1 and keyword in footer_fields:
|
||||
if not (keyword == 'directory-signature' and self.is_consensus):
|
||||
raise ValueError("Network status documents can only have a single '%s' line, got %i" % (keyword, len(values)))
|
||||
|
||||
|
|
@ -917,7 +1215,6 @@ class NetworkStatusDocumentV3(NetworkStatusDocument):
|
|||
raise ValueError("Network status document's footer should start with a 'directory-signature' line prior to consensus-method 9")
|
||||
|
||||
_check_for_missing_and_disallowed_fields(self, entries, FOOTER_STATUS_DOCUMENT_FIELDS)
|
||||
_check_for_misordered_fields(entries, FOOTER_FIELDS)
|
||||
else:
|
||||
self._footer_entries = entries
|
||||
self._entries.update(entries)
|
||||
|
|
@ -946,6 +1243,9 @@ class NetworkStatusDocumentV3(NetworkStatusDocument):
|
|||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
|
|
@ -986,71 +1286,32 @@ def _check_for_missing_and_disallowed_fields(document, entries, fields):
|
|||
raise ValueError("Network status document has fields that shouldn't appear in this document type or version: %s" % ', '.join(disallowed_fields))
|
||||
|
||||
|
||||
def _check_for_misordered_fields(entries, expected):
|
||||
"""
|
||||
To be valid a network status document's fiends need to appear in a specific
|
||||
order. Checks that known fields appear in that order (unrecognized fields
|
||||
are ignored).
|
||||
|
||||
:param dict entries: ordered keyword/value mappings of the header or footer
|
||||
:param list expected: ordered list of expected fields (either
|
||||
**HEADER_FIELDS** or **FOOTER_FIELDS**)
|
||||
|
||||
:raises: **ValueError** if entries aren't properly ordered
|
||||
"""
|
||||
|
||||
# Earlier validation has ensured that our fields either belong to our
|
||||
# document type or are unknown. Remove the unknown fields since they
|
||||
# reflect a spec change and can appear anywhere in the document.
|
||||
|
||||
actual = [field for field in entries.keys() if field in expected]
|
||||
|
||||
# Narrow the expected to just what we have. If the lists then match then the
|
||||
# order's valid.
|
||||
|
||||
expected = [field for field in expected if field in actual]
|
||||
|
||||
if actual != expected:
|
||||
actual_label = ', '.join(actual)
|
||||
expected_label = ', '.join(expected)
|
||||
raise ValueError("The fields in a section of the document are misordered. It should be '%s' but was '%s'" % (actual_label, expected_label))
|
||||
|
||||
|
||||
def _parse_int_mappings(keyword, value, validate):
|
||||
# Parse a series of 'key=value' entries, checking the following:
|
||||
# - values are integers
|
||||
# - keys are sorted in lexical order
|
||||
|
||||
results, seen_keys = {}, []
|
||||
for entry in value.split(' '):
|
||||
error_template = "Unable to parse network status document's '%s' line (%%s): %s'" % (keyword, value)
|
||||
|
||||
for key, val in _mappings_for(keyword, value):
|
||||
if validate:
|
||||
# parameters should be in ascending order by their key
|
||||
for prior_key in seen_keys:
|
||||
if prior_key > key:
|
||||
raise ValueError(error_template % 'parameters must be sorted by their key')
|
||||
|
||||
try:
|
||||
if '=' not in entry:
|
||||
raise ValueError("must only have 'key=value' entries")
|
||||
# the int() function accepts things like '+123', but we don't want to
|
||||
|
||||
entry_key, entry_value = entry.split('=', 1)
|
||||
if val.startswith('+'):
|
||||
raise ValueError()
|
||||
|
||||
try:
|
||||
# the int() function accepts things like '+123', but we don't want to
|
||||
if entry_value.startswith('+'):
|
||||
raise ValueError()
|
||||
results[key] = int(val)
|
||||
except ValueError:
|
||||
raise ValueError(error_template % ("'%s' is a non-numeric value" % val))
|
||||
|
||||
entry_value = int(entry_value)
|
||||
except ValueError:
|
||||
raise ValueError("'%s' is a non-numeric value" % entry_value)
|
||||
|
||||
if validate:
|
||||
# parameters should be in ascending order by their key
|
||||
for prior_key in seen_keys:
|
||||
if prior_key > entry_key:
|
||||
raise ValueError('parameters must be sorted by their key')
|
||||
|
||||
results[entry_key] = entry_value
|
||||
seen_keys.append(entry_key)
|
||||
except ValueError as exc:
|
||||
if not validate:
|
||||
continue
|
||||
|
||||
raise ValueError("Unable to parse network status document's '%s' line (%s): %s'" % (keyword, exc, value))
|
||||
seen_keys.append(key)
|
||||
|
||||
return results
|
||||
|
||||
|
|
@ -1120,11 +1381,31 @@ class DirectoryAuthority(Descriptor):
|
|||
:var stem.descriptor.networkstatus.KeyCertificate key_certificate: **\***
|
||||
authority's key certificate
|
||||
|
||||
:var bool is_shared_randomness_participate: **\*** **True** if this authority
|
||||
participates in establishing a shared random value, **False** otherwise
|
||||
:var list shared_randomness_commitments: **\*** list of
|
||||
:data:`~stem.descriptor.networkstatus.SharedRandomnessCommitment` entries
|
||||
:var int shared_randomness_previous_reveal_count: number of commitments
|
||||
used to generate the last shared random value
|
||||
:var str shared_randomness_previous_value: base64 encoded last shared random
|
||||
value
|
||||
:var int shared_randomness_current_reveal_count: number of commitments
|
||||
used to generate the current shared random value
|
||||
:var str shared_randomness_current_value: base64 encoded current shared
|
||||
random value
|
||||
|
||||
**\*** mandatory attribute
|
||||
|
||||
.. versionchanged:: 1.4.0
|
||||
Renamed our 'fingerprint' attribute to 'v3ident' (prior attribute exists
|
||||
for backward compatability, but is deprecated).
|
||||
|
||||
.. versionchanged:: 1.6.0
|
||||
Added the is_shared_randomness_participate, shared_randomness_commitments,
|
||||
shared_randomness_previous_reveal_count,
|
||||
shared_randomness_previous_value,
|
||||
shared_randomness_current_reveal_count, and
|
||||
shared_randomness_current_value attributes.
|
||||
"""
|
||||
|
||||
ATTRIBUTES = {
|
||||
|
|
@ -1138,6 +1419,12 @@ class DirectoryAuthority(Descriptor):
|
|||
'contact': (None, _parse_contact_line),
|
||||
'vote_digest': (None, _parse_vote_digest_line),
|
||||
'legacy_dir_key': (None, _parse_legacy_dir_key_line),
|
||||
'is_shared_randomness_participate': (False, _parse_shared_rand_participate_line),
|
||||
'shared_randomness_commitments': ([], _parsed_shared_rand_commit),
|
||||
'shared_randomness_previous_reveal_count': (None, _parse_shared_rand_previous_value),
|
||||
'shared_randomness_previous_value': (None, _parse_shared_rand_previous_value),
|
||||
'shared_randomness_current_reveal_count': (None, _parse_shared_rand_current_value),
|
||||
'shared_randomness_current_value': (None, _parse_shared_rand_current_value),
|
||||
}
|
||||
|
||||
PARSER_FOR_LINE = {
|
||||
|
|
@ -1145,8 +1432,38 @@ class DirectoryAuthority(Descriptor):
|
|||
'contact': _parse_contact_line,
|
||||
'legacy-dir-key': _parse_legacy_dir_key_line,
|
||||
'vote-digest': _parse_vote_digest_line,
|
||||
'shared-rand-participate': _parse_shared_rand_participate_line,
|
||||
'shared-rand-commit': _parsed_shared_rand_commit,
|
||||
'shared-rand-previous-value': _parse_shared_rand_previous_value,
|
||||
'shared-rand-current-value': _parse_shared_rand_current_value,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False, is_vote = False):
|
||||
if sign:
|
||||
raise NotImplementedError('Signing of %s not implemented' % cls.__name__)
|
||||
|
||||
attr = {} if attr is None else dict(attr)
|
||||
|
||||
# include mandatory 'vote-digest' if a consensus
|
||||
|
||||
if not is_vote and not ('vote-digest' in attr or (exclude and 'vote-digest' in exclude)):
|
||||
attr['vote-digest'] = _random_fingerprint()
|
||||
|
||||
content = _descriptor_content(attr, exclude, (
|
||||
('dir-source', '%s %s no.place.com %s 9030 9090' % (_random_nickname(), _random_fingerprint(), _random_ipv4_address())),
|
||||
('contact', 'Mike Perry <email>'),
|
||||
))
|
||||
|
||||
if is_vote:
|
||||
content += b'\n' + KeyCertificate.content()
|
||||
|
||||
return content
|
||||
|
||||
@classmethod
|
||||
def create(cls, attr = None, exclude = (), validate = True, sign = False, is_vote = False):
|
||||
return cls(cls.content(attr, exclude, sign, is_vote), validate = validate, is_vote = is_vote)
|
||||
|
||||
def __init__(self, raw_content, validate = False, is_vote = False):
|
||||
"""
|
||||
Parse a directory authority entry in a v3 network status document.
|
||||
|
|
@ -1171,7 +1488,7 @@ class DirectoryAuthority(Descriptor):
|
|||
else:
|
||||
self.key_certificate = None
|
||||
|
||||
entries = _get_descriptor_components(content, validate)
|
||||
entries = _descriptor_components(content, validate)
|
||||
|
||||
if validate and 'dir-source' != list(entries.keys())[0]:
|
||||
raise ValueError("Authority entries are expected to start with a 'dir-source' line:\n%s" % (content))
|
||||
|
|
@ -1233,9 +1550,15 @@ class DirectoryAuthority(Descriptor):
|
|||
|
||||
return method(str(self).strip(), str(other).strip())
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self).strip())
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
|
|
@ -1251,7 +1574,7 @@ def _parse_dir_address_line(descriptor, entries):
|
|||
if ':' not in value:
|
||||
raise ValueError("Key certificate's 'dir-address' is expected to be of the form ADDRESS:PORT: dir-address %s" % value)
|
||||
|
||||
address, dirport = value.split(':', 1)
|
||||
address, dirport = value.rsplit(':', 1)
|
||||
|
||||
if not stem.util.connection.is_valid_ipv4_address(address):
|
||||
raise ValueError("Key certificate's address isn't a valid IPv4 address: dir-address %s" % value)
|
||||
|
|
@ -1315,9 +1638,25 @@ class KeyCertificate(Descriptor):
|
|||
'dir-key-certification': _parse_dir_key_certification_line,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False):
|
||||
if sign:
|
||||
raise NotImplementedError('Signing of %s not implemented' % cls.__name__)
|
||||
|
||||
return _descriptor_content(attr, exclude, (
|
||||
('dir-key-certificate-version', '3'),
|
||||
('fingerprint', _random_fingerprint()),
|
||||
('dir-key-published', _random_date()),
|
||||
('dir-key-expires', _random_date()),
|
||||
('dir-identity-key', _random_crypto_blob('RSA PUBLIC KEY')),
|
||||
('dir-signing-key', _random_crypto_blob('RSA PUBLIC KEY')),
|
||||
), (
|
||||
('dir-key-certification', _random_crypto_blob('SIGNATURE')),
|
||||
))
|
||||
|
||||
def __init__(self, raw_content, validate = False):
|
||||
super(KeyCertificate, self).__init__(raw_content, lazy_load = not validate)
|
||||
entries = _get_descriptor_components(raw_content, validate)
|
||||
entries = _descriptor_components(raw_content, validate)
|
||||
|
||||
if validate:
|
||||
if 'dir-key-certificate-version' != list(entries.keys())[0]:
|
||||
|
|
@ -1346,9 +1685,15 @@ class KeyCertificate(Descriptor):
|
|||
|
||||
return method(str(self).strip(), str(other).strip())
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self).strip())
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
|
|
@ -1395,9 +1740,15 @@ class DocumentSignature(object):
|
|||
|
||||
return method(True, True) # we're equal
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self).strip())
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
|
|
@ -1410,8 +1761,8 @@ class BridgeNetworkStatusDocument(NetworkStatusDocument):
|
|||
Network status document containing bridges. This is only available through
|
||||
the metrics site.
|
||||
|
||||
:var tuple routers: :class:`~stem.descriptor.router_status_entry.RouterStatusEntryV2`
|
||||
contained in the document
|
||||
:var dict routers: fingerprint to :class:`~stem.descriptor.router_status_entry.RouterStatusEntryV3`
|
||||
mapping for relays contained in the document
|
||||
:var datetime published: time when the document was published
|
||||
"""
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright 2012-2015, Damian Johnson and The Tor Project
|
||||
# Copyright 2012-2018, Damian Johnson and The Tor Project
|
||||
# See LICENSE for licensing information
|
||||
|
||||
"""
|
||||
|
|
@ -89,10 +89,10 @@ except ImportError:
|
|||
|
||||
import stem.descriptor
|
||||
import stem.prereq
|
||||
import stem.util
|
||||
import stem.util.str_tools
|
||||
import stem.util.system
|
||||
|
||||
from stem import str_type
|
||||
|
||||
# flag to indicate when the reader thread is out of descriptor files to read
|
||||
FINISHED = 'DONE'
|
||||
|
||||
|
|
@ -179,9 +179,9 @@ def load_processed_files(path):
|
|||
|
||||
processed_files = {}
|
||||
|
||||
with open(path) as input_file:
|
||||
with open(path, 'rb') as input_file:
|
||||
for line in input_file.readlines():
|
||||
line = line.strip()
|
||||
line = stem.util.str_tools._to_unicode(line.strip())
|
||||
|
||||
if not line:
|
||||
continue # skip blank lines
|
||||
|
|
@ -218,6 +218,7 @@ def save_processed_files(path, processed_files):
|
|||
"""
|
||||
|
||||
# makes the parent directory if it doesn't already exist
|
||||
|
||||
try:
|
||||
path_dir = os.path.dirname(path)
|
||||
|
||||
|
|
@ -264,10 +265,7 @@ class DescriptorReader(object):
|
|||
"""
|
||||
|
||||
def __init__(self, target, validate = False, follow_links = False, buffer_size = 100, persistence_path = None, document_handler = stem.descriptor.DocumentHandler.ENTRIES, **kwargs):
|
||||
if isinstance(target, (bytes, str_type)):
|
||||
self._targets = [target]
|
||||
else:
|
||||
self._targets = target
|
||||
self._targets = [target] if stem.util._is_str(target) else target
|
||||
|
||||
# expand any relative paths we got
|
||||
|
||||
|
|
@ -388,7 +386,7 @@ class DescriptorReader(object):
|
|||
raise ValueError('Already running, you need to call stop() first')
|
||||
else:
|
||||
self._is_stopped.clear()
|
||||
self._reader_thread = threading.Thread(target = self._read_descriptor_files, name='Descriptor Reader')
|
||||
self._reader_thread = threading.Thread(target = self._read_descriptor_files, name='Descriptor reader')
|
||||
self._reader_thread.setDaemon(True)
|
||||
self._reader_thread.start()
|
||||
|
||||
|
|
@ -514,7 +512,7 @@ class DescriptorReader(object):
|
|||
|
||||
self._unreturned_descriptors.put(desc)
|
||||
self._iter_notice.set()
|
||||
except TypeError as exc:
|
||||
except TypeError:
|
||||
self._notify_skip_listeners(target, UnrecognizedType(mime_type))
|
||||
except ValueError as exc:
|
||||
self._notify_skip_listeners(target, ParsingFailure(exc))
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright 2012-2015, Damian Johnson and The Tor Project
|
||||
# Copyright 2012-2018, Damian Johnson and The Tor Project
|
||||
# See LICENSE for licensing information
|
||||
|
||||
"""
|
||||
|
|
@ -29,12 +29,19 @@ import stem.util.str_tools
|
|||
from stem.descriptor import (
|
||||
KEYWORD_LINE,
|
||||
Descriptor,
|
||||
_descriptor_content,
|
||||
_value,
|
||||
_values,
|
||||
_get_descriptor_components,
|
||||
_descriptor_components,
|
||||
_parse_protocol_line,
|
||||
_read_until_keywords,
|
||||
_random_nickname,
|
||||
_random_ipv4_address,
|
||||
_random_date,
|
||||
)
|
||||
|
||||
_parse_pr_line = _parse_protocol_line('pr', 'protocols')
|
||||
|
||||
|
||||
def _parse_file(document_file, validate, entry_class, entry_keyword = 'r', start_position = None, end_position = None, section_end_keywords = (), extra_args = ()):
|
||||
"""
|
||||
|
|
@ -166,17 +173,12 @@ def _parse_a_line(descriptor, entries):
|
|||
raise ValueError("%s 'a' line must be of the form '[address]:[ports]': a %s" % (descriptor._name(), value))
|
||||
|
||||
address, port = value.rsplit(':', 1)
|
||||
is_ipv6 = address.startswith('[') and address.endswith(']')
|
||||
|
||||
if is_ipv6:
|
||||
address = address[1:-1] # remove brackets
|
||||
|
||||
if not ((not is_ipv6 and stem.util.connection.is_valid_ipv4_address(address)) or
|
||||
(is_ipv6 and stem.util.connection.is_valid_ipv6_address(address))):
|
||||
if not stem.util.connection.is_valid_ipv4_address(address) and not stem.util.connection.is_valid_ipv6_address(address, allow_brackets = True):
|
||||
raise ValueError("%s 'a' line must start with an IPv6 address: a %s" % (descriptor._name(), value))
|
||||
|
||||
if stem.util.connection.is_valid_port(port):
|
||||
or_addresses.append((address, int(port), is_ipv6))
|
||||
or_addresses.append((address.lstrip('[').rstrip(']'), int(port), stem.util.connection.is_valid_ipv6_address(address, allow_brackets = True)))
|
||||
else:
|
||||
raise ValueError("%s 'a' line had an invalid port (%s): a %s" % (descriptor._name(), port, value))
|
||||
|
||||
|
|
@ -228,6 +230,11 @@ def _parse_w_line(descriptor, entries):
|
|||
elif not w_comp[0].startswith('Bandwidth='):
|
||||
raise ValueError("%s 'w' line needs to start with a 'Bandwidth=' entry: w %s" % (descriptor._name(), value))
|
||||
|
||||
bandwidth = None
|
||||
measured = None
|
||||
is_unmeasured = False
|
||||
unrecognized_bandwidth_entries = []
|
||||
|
||||
for w_entry in w_comp:
|
||||
if '=' in w_entry:
|
||||
w_key, w_value = w_entry.split('=', 1)
|
||||
|
|
@ -238,25 +245,33 @@ def _parse_w_line(descriptor, entries):
|
|||
if not (w_value and w_value.isdigit()):
|
||||
raise ValueError("%s 'Bandwidth=' entry needs to have a numeric value: w %s" % (descriptor._name(), value))
|
||||
|
||||
descriptor.bandwidth = int(w_value)
|
||||
bandwidth = int(w_value)
|
||||
elif w_key == 'Measured':
|
||||
if not (w_value and w_value.isdigit()):
|
||||
raise ValueError("%s 'Measured=' entry needs to have a numeric value: w %s" % (descriptor._name(), value))
|
||||
|
||||
descriptor.measured = int(w_value)
|
||||
measured = int(w_value)
|
||||
elif w_key == 'Unmeasured':
|
||||
if w_value != '1':
|
||||
raise ValueError("%s 'Unmeasured=' should only have the value of '1': w %s" % (descriptor._name(), value))
|
||||
|
||||
descriptor.is_unmeasured = True
|
||||
is_unmeasured = True
|
||||
else:
|
||||
descriptor.unrecognized_bandwidth_entries.append(w_entry)
|
||||
unrecognized_bandwidth_entries.append(w_entry)
|
||||
|
||||
descriptor.bandwidth = bandwidth
|
||||
descriptor.measured = measured
|
||||
descriptor.is_unmeasured = is_unmeasured
|
||||
descriptor.unrecognized_bandwidth_entries = unrecognized_bandwidth_entries
|
||||
|
||||
|
||||
def _parse_p_line(descriptor, entries):
|
||||
# "p" ("accept" / "reject") PortList
|
||||
# p reject 1-65535
|
||||
# example: p accept 80,110,143,443,993,995,6660-6669,6697,7000-7001
|
||||
#
|
||||
# examples:
|
||||
#
|
||||
# p accept 80,110,143,443,993,995,6660-6669,6697,7000-7001
|
||||
# p reject 1-65535
|
||||
|
||||
value = _value('p', entries)
|
||||
|
||||
|
|
@ -266,6 +281,29 @@ def _parse_p_line(descriptor, entries):
|
|||
raise ValueError('%s exit policy is malformed (%s): p %s' % (descriptor._name(), exc, value))
|
||||
|
||||
|
||||
def _parse_id_line(descriptor, entries):
|
||||
# "id" "ed25519" ed25519-identity
|
||||
#
|
||||
# examples:
|
||||
#
|
||||
# id ed25519 none
|
||||
# id ed25519 8RH34kO07Pp+XYwzdoATVyCibIvmbslUjRkAm7J4IA8
|
||||
|
||||
value = _value('id', entries)
|
||||
|
||||
if value:
|
||||
if descriptor.document and not descriptor.document.is_vote:
|
||||
raise ValueError("%s 'id' line should only appear in votes: id %s" % (descriptor._name(), value))
|
||||
|
||||
value_comp = value.split()
|
||||
|
||||
if len(value_comp) >= 2:
|
||||
descriptor.identifier_type = value_comp[0]
|
||||
descriptor.identifier = value_comp[1]
|
||||
else:
|
||||
raise ValueError("'id' lines should contain both the key type and digest: id %s" % value)
|
||||
|
||||
|
||||
def _parse_m_line(descriptor, entries):
|
||||
# "m" methods 1*(algorithm "=" digest)
|
||||
# example: m 8,9,10,11,12 sha256=g1vx9si329muxV3tquWIXXySNOIwRGMeAESKs/v4DWs
|
||||
|
|
@ -333,7 +371,7 @@ def _base64_to_hex(identity, check_if_fingerprint = True):
|
|||
except (TypeError, binascii.Error):
|
||||
raise ValueError("Unable to decode identity string '%s'" % identity)
|
||||
|
||||
fingerprint = binascii.b2a_hex(identity_decoded).upper()
|
||||
fingerprint = binascii.hexlify(identity_decoded).upper()
|
||||
|
||||
if stem.prereq.is_python_3():
|
||||
fingerprint = stem.util.str_tools._to_unicode(fingerprint)
|
||||
|
|
@ -400,7 +438,7 @@ class RouterStatusEntry(Descriptor):
|
|||
|
||||
super(RouterStatusEntry, self).__init__(content, lazy_load = not validate)
|
||||
self.document = document
|
||||
entries = _get_descriptor_components(content, validate)
|
||||
entries = _descriptor_components(content, validate)
|
||||
|
||||
if validate:
|
||||
for keyword in self._required_fields():
|
||||
|
|
@ -445,9 +483,15 @@ class RouterStatusEntry(Descriptor):
|
|||
|
||||
return method(str(self).strip(), str(other).strip())
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self).strip())
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
|
|
@ -470,6 +514,15 @@ class RouterStatusEntryV2(RouterStatusEntry):
|
|||
'digest': (None, _parse_r_line),
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False):
|
||||
if sign:
|
||||
raise NotImplementedError('Signing of %s not implemented' % cls.__name__)
|
||||
|
||||
return _descriptor_content(attr, exclude, (
|
||||
('r', '%s p1aag7VwarGxqctS7/fS0y5FU+s oQZFLYe9e4A7bOkWKR7TaNxb0JE %s %s 9001 0' % (_random_nickname(), _random_date(), _random_ipv4_address())),
|
||||
))
|
||||
|
||||
def _name(self, is_plural = False):
|
||||
return 'Router status entries (v2)' if is_plural else 'Router status entry (v2)'
|
||||
|
||||
|
|
@ -485,9 +538,15 @@ class RouterStatusEntryV2(RouterStatusEntry):
|
|||
|
||||
return method(str(self).strip(), str(other).strip())
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self).strip())
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
|
|
@ -502,18 +561,21 @@ class RouterStatusEntryV3(RouterStatusEntry):
|
|||
|
||||
:var list or_addresses: **\*** relay's OR addresses, this is a tuple listing
|
||||
of the form (address (**str**), port (**int**), is_ipv6 (**bool**))
|
||||
:var str identifier_type: identity digest key type
|
||||
:var str identifier: base64 encoded identity digest
|
||||
:var str digest: **\*** router's upper-case hex digest
|
||||
|
||||
:var int bandwidth: bandwidth claimed by the relay (in kb/s)
|
||||
:var int measured: bandwidth measured to be available by the relay, this is a
|
||||
:var int bandwidth: bandwidth measured to be available by the relay, this is a
|
||||
unit-less heuristic generated by the Bandwidth authoritites to weight relay
|
||||
selection
|
||||
:var bool is_unmeasured: bandwidth measurement isn't based on three or more
|
||||
:var int measured: *bandwidth* vote provided by a bandwidth authority
|
||||
:var bool is_unmeasured: *bandwidth* measurement isn't based on three or more
|
||||
measurements
|
||||
:var list unrecognized_bandwidth_entries: **\*** bandwidth weighting
|
||||
information that isn't yet recognized
|
||||
|
||||
:var stem.exit_policy.MicroExitPolicy exit_policy: router's exit policy
|
||||
:var dict protocols: mapping of protocols to their supported versions
|
||||
|
||||
:var list microdescriptor_hashes: **\*** tuples of two values, the list of
|
||||
consensus methods for generating a set of digests and the 'algorithm =>
|
||||
|
|
@ -521,11 +583,19 @@ class RouterStatusEntryV3(RouterStatusEntry):
|
|||
|
||||
**\*** attribute is either required when we're parsed with validation or has
|
||||
a default value, others are left as **None** if undefined
|
||||
|
||||
.. versionchanged:: 1.5.0
|
||||
Added the identifier and identifier_type attributes.
|
||||
|
||||
.. versionchanged:: 1.6.0
|
||||
Added the protocols attribute.
|
||||
"""
|
||||
|
||||
ATTRIBUTES = dict(RouterStatusEntry.ATTRIBUTES, **{
|
||||
'digest': (None, _parse_r_line),
|
||||
'or_addresses': ([], _parse_a_line),
|
||||
'identifier_type': (None, _parse_id_line),
|
||||
'identifier': (None, _parse_id_line),
|
||||
|
||||
'bandwidth': (None, _parse_w_line),
|
||||
'measured': (None, _parse_w_line),
|
||||
|
|
@ -533,6 +603,7 @@ class RouterStatusEntryV3(RouterStatusEntry):
|
|||
'unrecognized_bandwidth_entries': ([], _parse_w_line),
|
||||
|
||||
'exit_policy': (None, _parse_p_line),
|
||||
'protocols': ({}, _parse_pr_line),
|
||||
'microdescriptor_hashes': ([], _parse_m_line),
|
||||
})
|
||||
|
||||
|
|
@ -540,9 +611,21 @@ class RouterStatusEntryV3(RouterStatusEntry):
|
|||
'a': _parse_a_line,
|
||||
'w': _parse_w_line,
|
||||
'p': _parse_p_line,
|
||||
'pr': _parse_pr_line,
|
||||
'id': _parse_id_line,
|
||||
'm': _parse_m_line,
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False):
|
||||
if sign:
|
||||
raise NotImplementedError('Signing of %s not implemented' % cls.__name__)
|
||||
|
||||
return _descriptor_content(attr, exclude, (
|
||||
('r', '%s p1aag7VwarGxqctS7/fS0y5FU+s oQZFLYe9e4A7bOkWKR7TaNxb0JE %s %s 9001 0' % (_random_nickname(), _random_date(), _random_ipv4_address())),
|
||||
('s', 'Fast Named Running Stable Valid'),
|
||||
))
|
||||
|
||||
def _name(self, is_plural = False):
|
||||
return 'Router status entries (v3)' if is_plural else 'Router status entry (v3)'
|
||||
|
||||
|
|
@ -550,7 +633,7 @@ class RouterStatusEntryV3(RouterStatusEntry):
|
|||
return ('r', 's')
|
||||
|
||||
def _single_fields(self):
|
||||
return ('r', 's', 'v', 'w', 'p')
|
||||
return ('r', 's', 'v', 'w', 'p', 'pr')
|
||||
|
||||
def _compare(self, other, method):
|
||||
if not isinstance(other, RouterStatusEntryV3):
|
||||
|
|
@ -558,9 +641,15 @@ class RouterStatusEntryV3(RouterStatusEntry):
|
|||
|
||||
return method(str(self).strip(), str(other).strip())
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self).strip())
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
|
|
@ -573,33 +662,57 @@ class RouterStatusEntryMicroV3(RouterStatusEntry):
|
|||
Information about an individual router stored within a microdescriptor
|
||||
flavored network status document.
|
||||
|
||||
:var list or_addresses: **\*** relay's OR addresses, this is a tuple listing
|
||||
of the form (address (**str**), port (**int**), is_ipv6 (**bool**))
|
||||
:var int bandwidth: bandwidth claimed by the relay (in kb/s)
|
||||
:var int measured: bandwidth measured to be available by the relay
|
||||
:var bool is_unmeasured: bandwidth measurement isn't based on three or more
|
||||
measurements
|
||||
:var list unrecognized_bandwidth_entries: **\*** bandwidth weighting
|
||||
information that isn't yet recognized
|
||||
:var dict protocols: mapping of protocols to their supported versions
|
||||
|
||||
:var str digest: **\*** router's hex encoded digest of our corresponding microdescriptor
|
||||
|
||||
.. versionchanged:: 1.6.0
|
||||
Added the protocols attribute.
|
||||
|
||||
.. versionchanged:: 1.7.0
|
||||
Added the or_addresses attribute.
|
||||
|
||||
**\*** attribute is either required when we're parsed with validation or has
|
||||
a default value, others are left as **None** if undefined
|
||||
"""
|
||||
|
||||
ATTRIBUTES = dict(RouterStatusEntry.ATTRIBUTES, **{
|
||||
'or_addresses': ([], _parse_a_line),
|
||||
'bandwidth': (None, _parse_w_line),
|
||||
'measured': (None, _parse_w_line),
|
||||
'is_unmeasured': (False, _parse_w_line),
|
||||
'unrecognized_bandwidth_entries': ([], _parse_w_line),
|
||||
'protocols': ({}, _parse_pr_line),
|
||||
|
||||
'digest': (None, _parse_microdescriptor_m_line),
|
||||
})
|
||||
|
||||
PARSER_FOR_LINE = dict(RouterStatusEntry.PARSER_FOR_LINE, **{
|
||||
'a': _parse_a_line,
|
||||
'w': _parse_w_line,
|
||||
'm': _parse_microdescriptor_m_line,
|
||||
'pr': _parse_pr_line,
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False):
|
||||
if sign:
|
||||
raise NotImplementedError('Signing of %s not implemented' % cls.__name__)
|
||||
|
||||
return _descriptor_content(attr, exclude, (
|
||||
('r', '%s ARIJF2zbqirB9IwsW0mQznccWww %s %s 9001 9030' % (_random_nickname(), _random_date(), _random_ipv4_address())),
|
||||
('m', 'aiUklwBrua82obG5AsTX+iEpkjQA2+AQHxZ7GwMfY70'),
|
||||
('s', 'Fast Guard HSDir Named Running Stable V2Dir Valid'),
|
||||
))
|
||||
|
||||
def _name(self, is_plural = False):
|
||||
return 'Router status entries (micro v3)' if is_plural else 'Router status entry (micro v3)'
|
||||
|
||||
|
|
@ -607,7 +720,7 @@ class RouterStatusEntryMicroV3(RouterStatusEntry):
|
|||
return ('r', 's', 'm')
|
||||
|
||||
def _single_fields(self):
|
||||
return ('r', 's', 'v', 'w', 'm')
|
||||
return ('r', 's', 'v', 'w', 'm', 'pr')
|
||||
|
||||
def _compare(self, other, method):
|
||||
if not isinstance(other, RouterStatusEntryMicroV3):
|
||||
|
|
@ -615,9 +728,15 @@ class RouterStatusEntryMicroV3(RouterStatusEntry):
|
|||
|
||||
return method(str(self).strip(), str(other).strip())
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self).strip())
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright 2012-2015, Damian Johnson and The Tor Project
|
||||
# Copyright 2012-2018, Damian Johnson and The Tor Project
|
||||
# See LICENSE for licensing information
|
||||
|
||||
"""
|
||||
|
|
@ -10,8 +10,7 @@ etc). This information is provided from a few sources...
|
|||
|
||||
* The 'cached-descriptors' file in Tor's data directory.
|
||||
|
||||
* Archived descriptors provided by CollecTor
|
||||
(https://collector.torproject.org/).
|
||||
* Archived descriptors provided by `CollecTor <https://metrics.torproject.org/collector.html>`_.
|
||||
|
||||
* Directory authorities and mirrors via their DirPort.
|
||||
|
||||
|
|
@ -21,6 +20,7 @@ etc). This information is provided from a few sources...
|
|||
|
||||
ServerDescriptor - Tor server descriptor.
|
||||
|- RelayDescriptor - Server descriptor for a relay.
|
||||
| +- make_router_status_entry - Creates a router status entry for this descriptor.
|
||||
|
|
||||
|- BridgeDescriptor - Scrubbed server descriptor for a bridge.
|
||||
| |- is_scrubbed - checks if our content has been properly scrubbed
|
||||
|
|
@ -29,41 +29,69 @@ etc). This information is provided from a few sources...
|
|||
|- digest - calculates the upper-case hex digest value for our content
|
||||
|- get_annotations - dictionary of content prior to the descriptor entry
|
||||
+- get_annotation_lines - lines that provided the annotations
|
||||
|
||||
.. data:: BridgeDistribution (enum)
|
||||
|
||||
Preferred method of distributing this relay if a bridge.
|
||||
|
||||
.. versionadded:: 1.6.0
|
||||
|
||||
===================== ===========
|
||||
BridgeDistribution Description
|
||||
===================== ===========
|
||||
**ANY** No proference, BridgeDB will pick how the bridge is distributed.
|
||||
**HTTPS** Provided via the `web interface <https://bridges.torproject.org>`_.
|
||||
**EMAIL** Provided in response to emails to bridges@torproject.org.
|
||||
**MOAT** Provided in interactive menus within Tor Browser.
|
||||
**HYPHAE** Provided via a cryptographic invitation-based system.
|
||||
===================== ===========
|
||||
"""
|
||||
|
||||
import base64
|
||||
import binascii
|
||||
import functools
|
||||
import hashlib
|
||||
import re
|
||||
|
||||
import stem.descriptor.certificate
|
||||
import stem.descriptor.extrainfo_descriptor
|
||||
import stem.exit_policy
|
||||
import stem.prereq
|
||||
import stem.util.connection
|
||||
import stem.util.enum
|
||||
import stem.util.str_tools
|
||||
import stem.util.tor_tools
|
||||
import stem.version
|
||||
|
||||
from stem import str_type
|
||||
from stem.descriptor.router_status_entry import RouterStatusEntryV3
|
||||
|
||||
from stem.descriptor import (
|
||||
PGP_BLOCK_END,
|
||||
Descriptor,
|
||||
_get_descriptor_components,
|
||||
create_signing_key,
|
||||
_descriptor_content,
|
||||
_descriptor_components,
|
||||
_read_until_keywords,
|
||||
_bytes_for_block,
|
||||
_value,
|
||||
_values,
|
||||
_parse_simple_line,
|
||||
_parse_if_present,
|
||||
_parse_bytes_line,
|
||||
_parse_timestamp_line,
|
||||
_parse_forty_character_hex,
|
||||
_parse_protocol_line,
|
||||
_parse_key_block,
|
||||
_append_router_signature,
|
||||
_random_nickname,
|
||||
_random_ipv4_address,
|
||||
_random_date,
|
||||
_random_crypto_blob,
|
||||
)
|
||||
|
||||
try:
|
||||
# added in python 3.2
|
||||
if stem.prereq._is_lru_cache_available():
|
||||
from functools import lru_cache
|
||||
except ImportError:
|
||||
else:
|
||||
from stem.util.lru_cache import lru_cache
|
||||
|
||||
# relay descriptors must have exactly one of the following
|
||||
|
|
@ -78,6 +106,8 @@ REQUIRED_FIELDS = (
|
|||
|
||||
# optional entries that can appear at most once
|
||||
SINGLE_FIELDS = (
|
||||
'identity-ed25519',
|
||||
'master-key-ed25519',
|
||||
'platform',
|
||||
'fingerprint',
|
||||
'hibernating',
|
||||
|
|
@ -86,17 +116,36 @@ SINGLE_FIELDS = (
|
|||
'read-history',
|
||||
'write-history',
|
||||
'eventdns',
|
||||
'bridge-distribution-request',
|
||||
'family',
|
||||
'caches-extra-info',
|
||||
'extra-info-digest',
|
||||
'hidden-service-dir',
|
||||
'protocols',
|
||||
'allow-single-hop-exits',
|
||||
'tunnelled-dir-server',
|
||||
'proto',
|
||||
'onion-key-crosscert',
|
||||
'ntor-onion-key',
|
||||
'ntor-onion-key-crosscert',
|
||||
'router-sig-ed25519',
|
||||
)
|
||||
|
||||
BridgeDistribution = stem.util.enum.Enum(
|
||||
('ANY', 'any'),
|
||||
('HTTPS', 'https'),
|
||||
('EMAIL', 'email'),
|
||||
('MOAT', 'moat'),
|
||||
('HYPHAE', 'hyphae'),
|
||||
)
|
||||
|
||||
DEFAULT_IPV6_EXIT_POLICY = stem.exit_policy.MicroExitPolicy('reject 1-65535')
|
||||
REJECT_ALL_POLICY = stem.exit_policy.ExitPolicy('reject *:*')
|
||||
DEFAULT_BRIDGE_DISTRIBUTION = 'any'
|
||||
|
||||
|
||||
def _truncated_b64encode(content):
|
||||
return stem.util.str_tools._to_unicode(base64.b64encode(content).rstrip(b'='))
|
||||
|
||||
|
||||
def _parse_file(descriptor_file, is_bridge = False, validate = False, **kwargs):
|
||||
|
|
@ -265,6 +314,17 @@ def _parse_fingerprint_line(descriptor, entries):
|
|||
descriptor.fingerprint = fingerprint
|
||||
|
||||
|
||||
def _parse_extrainfo_digest_line(descriptor, entries):
|
||||
value = _value('extra-info-digest', entries)
|
||||
digest_comp = value.split(' ')
|
||||
|
||||
if not stem.util.tor_tools.is_hex_digits(digest_comp[0], 40):
|
||||
raise ValueError('extra-info-digest should be 40 hex characters: %s' % digest_comp[0])
|
||||
|
||||
descriptor.extra_info_digest = digest_comp[0]
|
||||
descriptor.extra_info_sha256_digest = digest_comp[1] if len(digest_comp) >= 2 else None
|
||||
|
||||
|
||||
def _parse_hibernating_line(descriptor, entries):
|
||||
# "hibernating" 0|1 (in practice only set if one)
|
||||
|
||||
|
|
@ -276,15 +336,6 @@ def _parse_hibernating_line(descriptor, entries):
|
|||
descriptor.hibernating = value == '1'
|
||||
|
||||
|
||||
def _parse_hidden_service_dir_line(descriptor, entries):
|
||||
value = _value('hidden-service-dir', entries)
|
||||
|
||||
if value:
|
||||
descriptor.hidden_service_dir = value.split(' ')
|
||||
else:
|
||||
descriptor.hidden_service_dir = ['2']
|
||||
|
||||
|
||||
def _parse_uptime_line(descriptor, entries):
|
||||
# We need to be tolerant of negative uptimes to accommodate a past tor
|
||||
# bug...
|
||||
|
|
@ -328,19 +379,14 @@ def _parse_or_address_line(descriptor, entries):
|
|||
raise ValueError('or-address line missing a colon: %s' % line)
|
||||
|
||||
address, port = entry.rsplit(':', 1)
|
||||
is_ipv6 = address.startswith('[') and address.endswith(']')
|
||||
|
||||
if is_ipv6:
|
||||
address = address[1:-1] # remove brackets
|
||||
|
||||
if not ((not is_ipv6 and stem.util.connection.is_valid_ipv4_address(address)) or
|
||||
(is_ipv6 and stem.util.connection.is_valid_ipv6_address(address))):
|
||||
if not stem.util.connection.is_valid_ipv4_address(address) and not stem.util.connection.is_valid_ipv6_address(address, allow_brackets = True):
|
||||
raise ValueError('or-address line has a malformed address: %s' % line)
|
||||
|
||||
if not stem.util.connection.is_valid_port(port):
|
||||
raise ValueError('or-address line has a malformed port: %s' % line)
|
||||
|
||||
or_addresses.append((address, int(port), is_ipv6))
|
||||
or_addresses.append((address.lstrip('[').rstrip(']'), int(port), stem.util.connection.is_valid_ipv6_address(address, allow_brackets = True)))
|
||||
|
||||
descriptor.or_addresses = or_addresses
|
||||
|
||||
|
|
@ -364,7 +410,7 @@ def _parse_history_line(keyword, history_end_attribute, history_interval_attribu
|
|||
|
||||
def _parse_exit_policy(descriptor, entries):
|
||||
if hasattr(descriptor, '_unparsed_exit_policy'):
|
||||
if descriptor._unparsed_exit_policy == [str_type('reject *:*')]:
|
||||
if descriptor._unparsed_exit_policy and stem.util.str_tools._to_unicode(descriptor._unparsed_exit_policy[0]) == 'reject *:*':
|
||||
descriptor.exit_policy = REJECT_ALL_POLICY
|
||||
else:
|
||||
descriptor.exit_policy = stem.exit_policy.ExitPolicy(*descriptor._unparsed_exit_policy)
|
||||
|
|
@ -372,20 +418,39 @@ def _parse_exit_policy(descriptor, entries):
|
|||
del descriptor._unparsed_exit_policy
|
||||
|
||||
|
||||
def _parse_identity_ed25519_line(descriptor, entries):
|
||||
_parse_key_block('identity-ed25519', 'ed25519_certificate', 'ED25519 CERT')(descriptor, entries)
|
||||
|
||||
if descriptor.ed25519_certificate:
|
||||
cert_lines = descriptor.ed25519_certificate.split('\n')
|
||||
|
||||
if cert_lines[0] == '-----BEGIN ED25519 CERT-----' and cert_lines[-1] == '-----END ED25519 CERT-----':
|
||||
descriptor.certificate = stem.descriptor.certificate.Ed25519Certificate.parse(''.join(cert_lines[1:-1]))
|
||||
|
||||
|
||||
_parse_master_key_ed25519_line = _parse_simple_line('master-key-ed25519', 'ed25519_master_key')
|
||||
_parse_master_key_ed25519_for_hash_line = _parse_simple_line('master-key-ed25519', 'ed25519_certificate_hash')
|
||||
_parse_contact_line = _parse_bytes_line('contact', 'contact')
|
||||
_parse_published_line = _parse_timestamp_line('published', 'published')
|
||||
_parse_extrainfo_digest_line = _parse_forty_character_hex('extra-info-digest', 'extra_info_digest')
|
||||
_parse_read_history_line = functools.partial(_parse_history_line, 'read-history', 'read_history_end', 'read_history_interval', 'read_history_values')
|
||||
_parse_write_history_line = functools.partial(_parse_history_line, 'write-history', 'write_history_end', 'write_history_interval', 'write_history_values')
|
||||
_parse_ipv6_policy_line = lambda descriptor, entries: setattr(descriptor, 'exit_policy_v6', stem.exit_policy.MicroExitPolicy(_value('ipv6-policy', entries)))
|
||||
_parse_allow_single_hop_exits_line = lambda descriptor, entries: setattr(descriptor, 'allow_single_hop_exits', 'allow_single_hop_exits' in entries)
|
||||
_parse_caches_extra_info_line = lambda descriptor, entries: setattr(descriptor, 'extra_info_cache', 'extra_info_cache' in entries)
|
||||
_parse_family_line = lambda descriptor, entries: setattr(descriptor, 'family', set(_value('family', entries).split(' ')))
|
||||
_parse_eventdns_line = lambda descriptor, entries: setattr(descriptor, 'eventdns', _value('eventdns', entries) == '1')
|
||||
_parse_ipv6_policy_line = _parse_simple_line('ipv6-policy', 'exit_policy_v6', func = lambda v: stem.exit_policy.MicroExitPolicy(v))
|
||||
_parse_allow_single_hop_exits_line = _parse_if_present('allow-single-hop-exits', 'allow_single_hop_exits')
|
||||
_parse_tunneled_dir_server_line = _parse_if_present('tunnelled-dir-server', 'allow_tunneled_dir_requests')
|
||||
_parse_proto_line = _parse_protocol_line('proto', 'protocols')
|
||||
_parse_hidden_service_dir_line = _parse_if_present('hidden-service-dir', 'is_hidden_service_dir')
|
||||
_parse_caches_extra_info_line = _parse_if_present('caches-extra-info', 'extra_info_cache')
|
||||
_parse_bridge_distribution_request_line = _parse_simple_line('bridge-distribution-request', 'bridge_distribution')
|
||||
_parse_family_line = _parse_simple_line('family', 'family', func = lambda v: set(v.split(' ')))
|
||||
_parse_eventdns_line = _parse_simple_line('eventdns', 'eventdns', func = lambda v: v == '1')
|
||||
_parse_onion_key_line = _parse_key_block('onion-key', 'onion_key', 'RSA PUBLIC KEY')
|
||||
_parse_onion_key_crosscert_line = _parse_key_block('onion-key-crosscert', 'onion_key_crosscert', 'CROSSCERT')
|
||||
_parse_signing_key_line = _parse_key_block('signing-key', 'signing_key', 'RSA PUBLIC KEY')
|
||||
_parse_router_signature_line = _parse_key_block('router-signature', 'signature', 'SIGNATURE')
|
||||
_parse_ntor_onion_key_line = _parse_simple_line('ntor-onion-key', 'ntor_onion_key')
|
||||
_parse_ntor_onion_key_crosscert_line = _parse_key_block('ntor-onion-key-crosscert', 'ntor_onion_key_crosscert', 'ED25519 CERT', 'ntor_onion_key_crosscert_sign')
|
||||
_parse_router_sig_ed25519_line = _parse_simple_line('router-sig-ed25519', 'ed25519_signature')
|
||||
_parse_router_digest_sha256_line = _parse_simple_line('router-digest-sha256', 'router_digest_sha256')
|
||||
_parse_router_digest_line = _parse_forty_character_hex('router-digest', '_digest')
|
||||
|
||||
|
||||
|
|
@ -399,7 +464,7 @@ class ServerDescriptor(Descriptor):
|
|||
|
||||
:var str address: **\*** IPv4 address of the relay
|
||||
:var int or_port: **\*** port used for relaying
|
||||
:var int socks_port: **\*** port used as client (deprecated, always **None**)
|
||||
:var int socks_port: **\*** port used as client (**deprecated**, always **None**)
|
||||
:var int dir_port: **\*** port used for descriptor mirroring
|
||||
|
||||
:var bytes platform: line with operating system and tor version
|
||||
|
|
@ -409,6 +474,8 @@ class ServerDescriptor(Descriptor):
|
|||
:var bytes contact: contact information
|
||||
:var stem.exit_policy.ExitPolicy exit_policy: **\*** stated exit policy
|
||||
:var stem.exit_policy.MicroExitPolicy exit_policy_v6: **\*** exit policy for IPv6
|
||||
:var BridgeDistribution bridge_distribution: **\*** preferred method of providing this relay's
|
||||
address if a bridge
|
||||
:var set family: **\*** nicknames or fingerprints of declared family
|
||||
|
||||
:var int average_bandwidth: **\*** average rate it's willing to relay in bytes/s
|
||||
|
|
@ -417,16 +484,23 @@ class ServerDescriptor(Descriptor):
|
|||
|
||||
:var list link_protocols: link protocols supported by the relay
|
||||
:var list circuit_protocols: circuit protocols supported by the relay
|
||||
:var bool is_hidden_service_dir: **\*** indicates if the relay serves hidden
|
||||
service descriptors
|
||||
:var bool hibernating: **\*** hibernating when published
|
||||
:var bool allow_single_hop_exits: **\*** flag if single hop exiting is allowed
|
||||
:var bool allow_tunneled_dir_requests: **\*** flag if tunneled directory
|
||||
requests are accepted
|
||||
:var bool extra_info_cache: **\*** flag if a mirror for extra-info documents
|
||||
:var str extra_info_digest: upper-case hex encoded digest of our extra-info document
|
||||
:var bool eventdns: flag for evdns backend (deprecated, always unset)
|
||||
:var str extra_info_sha256_digest: base64 encoded sha256 digest of our extra-info document
|
||||
:var bool eventdns: flag for evdns backend (**deprecated**, always unset)
|
||||
:var str ntor_onion_key: base64 key used to encrypt EXTEND in the ntor protocol
|
||||
:var list or_addresses: **\*** alternative for our address/or_port
|
||||
attributes, each entry is a tuple of the form (address (**str**), port
|
||||
(**int**), is_ipv6 (**bool**))
|
||||
:var dict protocols: mapping of protocols to their supported versions
|
||||
|
||||
Deprecated, moved to extra-info descriptor...
|
||||
**Deprecated**, moved to extra-info descriptor...
|
||||
|
||||
:var datetime read_history_end: end of the sampling interval
|
||||
:var int read_history_interval: seconds per interval
|
||||
|
|
@ -438,6 +512,20 @@ class ServerDescriptor(Descriptor):
|
|||
|
||||
**\*** attribute is either required when we're parsed with validation or has
|
||||
a default value, others are left as **None** if undefined
|
||||
|
||||
.. versionchanged:: 1.5.0
|
||||
Added the allow_tunneled_dir_requests attribute.
|
||||
|
||||
.. versionchanged:: 1.6.0
|
||||
Added the extra_info_sha256_digest, protocols, and bridge_distribution
|
||||
attributes.
|
||||
|
||||
.. versionchanged:: 1.7.0
|
||||
Added the is_hidden_service_dir attribute.
|
||||
|
||||
.. versionchanged:: 1.7.0
|
||||
Deprecated the hidden_service_dir field, it's never been populated
|
||||
(:spec:`43c2f78`). This field will be removed in Stem 2.0.
|
||||
"""
|
||||
|
||||
ATTRIBUTES = {
|
||||
|
|
@ -457,6 +545,7 @@ class ServerDescriptor(Descriptor):
|
|||
'operating_system': (None, _parse_platform_line),
|
||||
'uptime': (None, _parse_uptime_line),
|
||||
'exit_policy_v6': (DEFAULT_IPV6_EXIT_POLICY, _parse_ipv6_policy_line),
|
||||
'bridge_distribution': (DEFAULT_BRIDGE_DISTRIBUTION, _parse_bridge_distribution_request_line),
|
||||
'family': (set(), _parse_family_line),
|
||||
|
||||
'average_bandwidth': (None, _parse_bandwidth_line),
|
||||
|
|
@ -465,12 +554,16 @@ class ServerDescriptor(Descriptor):
|
|||
|
||||
'link_protocols': (None, _parse_protocols_line),
|
||||
'circuit_protocols': (None, _parse_protocols_line),
|
||||
'is_hidden_service_dir': (False, _parse_hidden_service_dir_line),
|
||||
'hibernating': (False, _parse_hibernating_line),
|
||||
'allow_single_hop_exits': (False, _parse_allow_single_hop_exits_line),
|
||||
'allow_tunneled_dir_requests': (False, _parse_tunneled_dir_server_line),
|
||||
'protocols': ({}, _parse_proto_line),
|
||||
'extra_info_cache': (False, _parse_caches_extra_info_line),
|
||||
'extra_info_digest': (None, _parse_extrainfo_digest_line),
|
||||
'hidden_service_dir': (None, _parse_hidden_service_dir_line),
|
||||
'extra_info_sha256_digest': (None, _parse_extrainfo_digest_line),
|
||||
'eventdns': (None, _parse_eventdns_line),
|
||||
'ntor_onion_key': (None, _parse_ntor_onion_key_line),
|
||||
'or_addresses': ([], _parse_or_address_line),
|
||||
|
||||
'read_history_end': (None, _parse_read_history_line),
|
||||
|
|
@ -494,12 +587,16 @@ class ServerDescriptor(Descriptor):
|
|||
'hidden-service-dir': _parse_hidden_service_dir_line,
|
||||
'uptime': _parse_uptime_line,
|
||||
'protocols': _parse_protocols_line,
|
||||
'ntor-onion-key': _parse_ntor_onion_key_line,
|
||||
'or-address': _parse_or_address_line,
|
||||
'read-history': _parse_read_history_line,
|
||||
'write-history': _parse_write_history_line,
|
||||
'ipv6-policy': _parse_ipv6_policy_line,
|
||||
'allow-single-hop-exits': _parse_allow_single_hop_exits_line,
|
||||
'tunnelled-dir-server': _parse_tunneled_dir_server_line,
|
||||
'proto': _parse_proto_line,
|
||||
'caches-extra-info': _parse_caches_extra_info_line,
|
||||
'bridge-distribution-request': _parse_bridge_distribution_request_line,
|
||||
'family': _parse_family_line,
|
||||
'eventdns': _parse_eventdns_line,
|
||||
}
|
||||
|
|
@ -533,7 +630,13 @@ class ServerDescriptor(Descriptor):
|
|||
# influences the resulting exit policy, but for everything else the order
|
||||
# does not matter so breaking it into key / value pairs.
|
||||
|
||||
entries, self._unparsed_exit_policy = _get_descriptor_components(stem.util.str_tools._to_unicode(raw_contents), validate, ('accept', 'reject'))
|
||||
entries, self._unparsed_exit_policy = _descriptor_components(stem.util.str_tools._to_unicode(raw_contents), validate, extra_keywords = ('accept', 'reject'), non_ascii_fields = ('contact', 'platform'))
|
||||
|
||||
# TODO: Remove the following field in Stem 2.0. It has never been populated...
|
||||
#
|
||||
# https://gitweb.torproject.org/torspec.git/commit/?id=43c2f78
|
||||
|
||||
self.hidden_service_dir = ['2']
|
||||
|
||||
if validate:
|
||||
self._parse(entries, validate)
|
||||
|
|
@ -624,6 +727,12 @@ class ServerDescriptor(Descriptor):
|
|||
if expected_last_keyword and expected_last_keyword != list(entries.keys())[-1]:
|
||||
raise ValueError("Descriptor must end with a '%s' entry" % expected_last_keyword)
|
||||
|
||||
if 'identity-ed25519' in entries.keys():
|
||||
if 'router-sig-ed25519' not in entries.keys():
|
||||
raise ValueError('Descriptor must have router-sig-ed25519 entry to accompany identity-ed25519')
|
||||
elif 'router-sig-ed25519' not in list(entries.keys())[-2:]:
|
||||
raise ValueError("Descriptor must have 'router-sig-ed25519' as the next-to-last entry")
|
||||
|
||||
if not self.exit_policy:
|
||||
raise ValueError("Descriptor must have at least one 'accept' or 'reject' entry")
|
||||
|
||||
|
|
@ -648,29 +757,68 @@ class RelayDescriptor(ServerDescriptor):
|
|||
Server descriptor (`descriptor specification
|
||||
<https://gitweb.torproject.org/torspec.git/tree/dir-spec.txt>`_)
|
||||
|
||||
:var stem.certificate.Ed25519Certificate certificate: ed25519 certificate
|
||||
:var str ed25519_certificate: base64 encoded ed25519 certificate
|
||||
:var str ed25519_master_key: base64 encoded master key for our ed25519 certificate
|
||||
:var str ed25519_signature: signature of this document using ed25519
|
||||
|
||||
:var str onion_key: **\*** key used to encrypt EXTEND cells
|
||||
:var str ntor_onion_key: base64 key used to encrypt EXTEND in the ntor protocol
|
||||
:var str onion_key_crosscert: signature generated using the onion_key
|
||||
:var str ntor_onion_key_crosscert: signature generated using the ntor-onion-key
|
||||
:var str ntor_onion_key_crosscert_sign: sign of the corresponding ed25519 public key
|
||||
:var str signing_key: **\*** relay's long-term identity key
|
||||
:var str signature: **\*** signature for this descriptor
|
||||
|
||||
**\*** attribute is required when we're parsed with validation
|
||||
|
||||
.. versionchanged:: 1.5.0
|
||||
Added the ed25519_certificate, ed25519_master_key, ed25519_signature,
|
||||
onion_key_crosscert, ntor_onion_key_crosscert, and
|
||||
ntor_onion_key_crosscert_sign attributes.
|
||||
|
||||
.. versionchanged:: 1.6.0
|
||||
Moved from the deprecated `pycrypto
|
||||
<https://www.dlitz.net/software/pycrypto/>`_ module to `cryptography
|
||||
<https://pypi.python.org/pypi/cryptography>`_ for validating signatures.
|
||||
|
||||
.. versionchanged:: 1.6.0
|
||||
Added the certificate attribute.
|
||||
|
||||
.. deprecated:: 1.6.0
|
||||
Our **ed25519_certificate** is deprecated in favor of our new
|
||||
**certificate** attribute. The base64 encoded certificate is available via
|
||||
the certificate's **encoded** attribute.
|
||||
|
||||
.. versionchanged:: 1.6.0
|
||||
Added the **skip_crypto_validation** constructor argument.
|
||||
"""
|
||||
|
||||
ATTRIBUTES = dict(ServerDescriptor.ATTRIBUTES, **{
|
||||
'certificate': (None, _parse_identity_ed25519_line),
|
||||
'ed25519_certificate': (None, _parse_identity_ed25519_line),
|
||||
'ed25519_master_key': (None, _parse_master_key_ed25519_line),
|
||||
'ed25519_signature': (None, _parse_router_sig_ed25519_line),
|
||||
|
||||
'onion_key': (None, _parse_onion_key_line),
|
||||
'ntor_onion_key': (None, _parse_ntor_onion_key_line),
|
||||
'onion_key_crosscert': (None, _parse_onion_key_crosscert_line),
|
||||
'ntor_onion_key_crosscert': (None, _parse_ntor_onion_key_crosscert_line),
|
||||
'ntor_onion_key_crosscert_sign': (None, _parse_ntor_onion_key_crosscert_line),
|
||||
'signing_key': (None, _parse_signing_key_line),
|
||||
'signature': (None, _parse_router_signature_line),
|
||||
})
|
||||
|
||||
PARSER_FOR_LINE = dict(ServerDescriptor.PARSER_FOR_LINE, **{
|
||||
'identity-ed25519': _parse_identity_ed25519_line,
|
||||
'master-key-ed25519': _parse_master_key_ed25519_line,
|
||||
'router-sig-ed25519': _parse_router_sig_ed25519_line,
|
||||
'onion-key': _parse_onion_key_line,
|
||||
'ntor-onion-key': _parse_ntor_onion_key_line,
|
||||
'onion-key-crosscert': _parse_onion_key_crosscert_line,
|
||||
'ntor-onion-key-crosscert': _parse_ntor_onion_key_crosscert_line,
|
||||
'signing-key': _parse_signing_key_line,
|
||||
'router-signature': _parse_router_signature_line,
|
||||
})
|
||||
|
||||
def __init__(self, raw_contents, validate = False, annotations = None):
|
||||
def __init__(self, raw_contents, validate = False, annotations = None, skip_crypto_validation = False):
|
||||
super(RelayDescriptor, self).__init__(raw_contents, validate, annotations)
|
||||
|
||||
if validate:
|
||||
|
|
@ -680,12 +828,65 @@ class RelayDescriptor(ServerDescriptor):
|
|||
if key_hash != self.fingerprint.lower():
|
||||
raise ValueError('Fingerprint does not match the hash of our signing key (fingerprint: %s, signing key hash: %s)' % (self.fingerprint.lower(), key_hash))
|
||||
|
||||
if stem.prereq.is_crypto_available():
|
||||
if not skip_crypto_validation and stem.prereq.is_crypto_available():
|
||||
signed_digest = self._digest_for_signature(self.signing_key, self.signature)
|
||||
|
||||
if signed_digest != self.digest():
|
||||
raise ValueError('Decrypted digest does not match local digest (calculated: %s, local: %s)' % (signed_digest, self.digest()))
|
||||
|
||||
if self.onion_key_crosscert and stem.prereq.is_crypto_available():
|
||||
onion_key_crosscert_digest = self._digest_for_signature(self.onion_key, self.onion_key_crosscert)
|
||||
|
||||
if onion_key_crosscert_digest != self._onion_key_crosscert_digest():
|
||||
raise ValueError('Decrypted onion-key-crosscert digest does not match local digest (calculated: %s, local: %s)' % (onion_key_crosscert_digest, self._onion_key_crosscert_digest()))
|
||||
|
||||
if stem.prereq._is_pynacl_available() and self.certificate:
|
||||
self.certificate.validate(self)
|
||||
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False, signing_key = None):
|
||||
if signing_key:
|
||||
sign = True
|
||||
|
||||
if attr is None:
|
||||
attr = {}
|
||||
|
||||
base_header = (
|
||||
('router', '%s %s 9001 0 0' % (_random_nickname(), _random_ipv4_address())),
|
||||
('published', _random_date()),
|
||||
('bandwidth', '153600 256000 104590'),
|
||||
('reject', '*:*'),
|
||||
('onion-key', _random_crypto_blob('RSA PUBLIC KEY')),
|
||||
('signing-key', _random_crypto_blob('RSA PUBLIC KEY')),
|
||||
)
|
||||
|
||||
if sign:
|
||||
if attr and 'signing-key' in attr:
|
||||
raise ValueError('Cannot sign the descriptor if a signing-key has been provided')
|
||||
elif attr and 'router-signature' in attr:
|
||||
raise ValueError('Cannot sign the descriptor if a router-signature has been provided')
|
||||
|
||||
if signing_key is None:
|
||||
signing_key = create_signing_key()
|
||||
|
||||
if 'fingerprint' not in attr:
|
||||
fingerprint = hashlib.sha1(_bytes_for_block(stem.util.str_tools._to_unicode(signing_key.public_digest.strip()))).hexdigest().upper()
|
||||
attr['fingerprint'] = ' '.join(stem.util.str_tools._split_by_length(fingerprint, 4))
|
||||
|
||||
attr['signing-key'] = signing_key.public_digest
|
||||
|
||||
content = _descriptor_content(attr, exclude, base_header) + b'\nrouter-signature\n'
|
||||
return _append_router_signature(content, signing_key.private)
|
||||
else:
|
||||
return _descriptor_content(attr, exclude, base_header, (
|
||||
('router-sig-ed25519', None),
|
||||
('router-signature', _random_crypto_blob('SIGNATURE')),
|
||||
))
|
||||
|
||||
@classmethod
|
||||
def create(cls, attr = None, exclude = (), validate = True, sign = False, signing_key = None):
|
||||
return cls(cls.content(attr, exclude, sign, signing_key), validate = validate, skip_crypto_validation = not sign)
|
||||
|
||||
@lru_cache()
|
||||
def digest(self):
|
||||
"""
|
||||
|
|
@ -693,23 +894,88 @@ class RelayDescriptor(ServerDescriptor):
|
|||
|
||||
:returns: the digest string encoded in uppercase hex
|
||||
|
||||
:raises: ValueError if the digest canot be calculated
|
||||
:raises: ValueError if the digest cannot be calculated
|
||||
"""
|
||||
|
||||
return self._digest_for_content(b'router ', b'\nrouter-signature\n')
|
||||
|
||||
def make_router_status_entry(self):
|
||||
"""
|
||||
Provides a RouterStatusEntryV3 for this descriptor content.
|
||||
|
||||
.. versionadded:: 1.6.0
|
||||
|
||||
:returns: :class:`~stem.descriptor.router_status_entry.RouterStatusEntryV3`
|
||||
that would be in the consensus
|
||||
"""
|
||||
|
||||
if not self.fingerprint:
|
||||
raise ValueError('Server descriptor lacks a fingerprint. This is an optional field, but required to make a router status entry.')
|
||||
|
||||
attr = {
|
||||
'r': ' '.join([
|
||||
self.nickname,
|
||||
_truncated_b64encode(binascii.unhexlify(stem.util.str_tools._to_bytes(self.fingerprint))),
|
||||
_truncated_b64encode(binascii.unhexlify(stem.util.str_tools._to_bytes(self.digest()))),
|
||||
self.published.strftime('%Y-%m-%d %H:%M:%S'),
|
||||
self.address,
|
||||
str(self.or_port),
|
||||
str(self.dir_port) if self.dir_port else '0',
|
||||
]),
|
||||
'w': 'Bandwidth=%i' % self.average_bandwidth,
|
||||
'p': self.exit_policy.summary().replace(', ', ','),
|
||||
}
|
||||
|
||||
if self.tor_version:
|
||||
attr['v'] = 'Tor %s' % self.tor_version
|
||||
|
||||
if self.or_addresses:
|
||||
attr['a'] = ['%s:%s' % (addr, port) for addr, port, _ in self.or_addresses]
|
||||
|
||||
if self.certificate:
|
||||
attr['id'] = 'ed25519 %s' % _truncated_b64encode(self.certificate.key)
|
||||
|
||||
return RouterStatusEntryV3.create(attr)
|
||||
|
||||
@lru_cache()
|
||||
def _onion_key_crosscert_digest(self):
|
||||
"""
|
||||
Provides the digest of the onion-key-crosscert data. This consists of the
|
||||
RSA identity key sha1 and ed25519 identity key.
|
||||
|
||||
:returns: **unicode** digest encoded in uppercase hex
|
||||
|
||||
:raises: ValueError if the digest cannot be calculated
|
||||
"""
|
||||
|
||||
signing_key_digest = hashlib.sha1(_bytes_for_block(self.signing_key)).digest()
|
||||
data = signing_key_digest + base64.b64decode(stem.util.str_tools._to_bytes(self.ed25519_master_key) + b'=')
|
||||
return stem.util.str_tools._to_unicode(binascii.hexlify(data).upper())
|
||||
|
||||
def _compare(self, other, method):
|
||||
if not isinstance(other, RelayDescriptor):
|
||||
return False
|
||||
|
||||
return method(str(self).strip(), str(other).strip())
|
||||
|
||||
def _check_constraints(self, entries):
|
||||
super(RelayDescriptor, self)._check_constraints(entries)
|
||||
|
||||
if self.ed25519_certificate:
|
||||
if not self.onion_key_crosscert:
|
||||
raise ValueError("Descriptor must have a 'onion-key-crosscert' when identity-ed25519 is present")
|
||||
elif not self.ed25519_signature:
|
||||
raise ValueError("Descriptor must have a 'router-sig-ed25519' when identity-ed25519 is present")
|
||||
|
||||
def __hash__(self):
|
||||
return hash(str(self).strip())
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
|
|
@ -720,17 +986,42 @@ class RelayDescriptor(ServerDescriptor):
|
|||
class BridgeDescriptor(ServerDescriptor):
|
||||
"""
|
||||
Bridge descriptor (`bridge descriptor specification
|
||||
<https://collector.torproject.org/formats.html#bridge-descriptors>`_)
|
||||
<https://metrics.torproject.org/collector.html#bridge-descriptors>`_)
|
||||
|
||||
:var str ed25519_certificate_hash: sha256 hash of the original identity-ed25519
|
||||
:var str router_digest_sha256: sha256 digest of this document
|
||||
|
||||
.. versionchanged:: 1.5.0
|
||||
Added the ed25519_certificate_hash and router_digest_sha256 attributes.
|
||||
Also added ntor_onion_key (previously this only belonged to unsanitized
|
||||
descriptors).
|
||||
"""
|
||||
|
||||
ATTRIBUTES = dict(ServerDescriptor.ATTRIBUTES, **{
|
||||
'ed25519_certificate_hash': (None, _parse_master_key_ed25519_for_hash_line),
|
||||
'router_digest_sha256': (None, _parse_router_digest_sha256_line),
|
||||
'_digest': (None, _parse_router_digest_line),
|
||||
})
|
||||
|
||||
PARSER_FOR_LINE = dict(ServerDescriptor.PARSER_FOR_LINE, **{
|
||||
'master-key-ed25519': _parse_master_key_ed25519_for_hash_line,
|
||||
'router-digest-sha256': _parse_router_digest_sha256_line,
|
||||
'router-digest': _parse_router_digest_line,
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def content(cls, attr = None, exclude = (), sign = False):
|
||||
if sign:
|
||||
raise NotImplementedError('Signing of %s not implemented' % cls.__name__)
|
||||
|
||||
return _descriptor_content(attr, exclude, (
|
||||
('router', '%s %s 9001 0 0' % (_random_nickname(), _random_ipv4_address())),
|
||||
('router-digest', '006FD96BA35E7785A6A3B8B75FE2E2435A13BDB4'),
|
||||
('published', _random_date()),
|
||||
('bandwidth', '409600 819200 5120'),
|
||||
('reject', '*:*'),
|
||||
))
|
||||
|
||||
def digest(self):
|
||||
return self._digest
|
||||
|
||||
|
|
@ -738,7 +1029,7 @@ class BridgeDescriptor(ServerDescriptor):
|
|||
"""
|
||||
Checks if we've been properly scrubbed in accordance with the `bridge
|
||||
descriptor specification
|
||||
<https://collector.torproject.org/formats.html#bridge-descriptors>`_.
|
||||
<https://metrics.torproject.org/collector.html#bridge-descriptors>`_.
|
||||
Validation is a moving target so this may not be fully up to date.
|
||||
|
||||
:returns: **True** if we're scrubbed, **False** otherwise
|
||||
|
|
@ -815,6 +1106,9 @@ class BridgeDescriptor(ServerDescriptor):
|
|||
def __eq__(self, other):
|
||||
return self._compare(other, lambda s, o: s == o)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __lt__(self, other):
|
||||
return self._compare(other, lambda s, o: s < o)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright 2013-2015, Damian Johnson and The Tor Project
|
||||
# Copyright 2013-2018, Damian Johnson and The Tor Project
|
||||
# See LICENSE for licensing information
|
||||
|
||||
"""
|
||||
|
|
@ -17,7 +17,7 @@ import stem.util.tor_tools
|
|||
from stem.descriptor import (
|
||||
Descriptor,
|
||||
_read_until_keywords,
|
||||
_get_descriptor_components,
|
||||
_descriptor_components,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -63,7 +63,7 @@ class TorDNSEL(Descriptor):
|
|||
def __init__(self, raw_contents, validate):
|
||||
super(TorDNSEL, self).__init__(raw_contents)
|
||||
raw_contents = stem.util.str_tools._to_unicode(raw_contents)
|
||||
entries = _get_descriptor_components(raw_contents, validate)
|
||||
entries = _descriptor_components(raw_contents, validate)
|
||||
|
||||
self.fingerprint = None
|
||||
self.published = None
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue