add Linux_i686

This commit is contained in:
j 2014-05-17 18:11:40 +00:00 committed by Ubuntu
commit 95cd9b11f2
1644 changed files with 564260 additions and 0 deletions

View file

@ -0,0 +1,13 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Twisted Python: Utilities and Enhancements for Python.
"""

View file

@ -0,0 +1,66 @@
/*****************************************************************************
Copyright (c) 2002 Zope Corporation and Contributors. All Rights Reserved.
This software is subject to the provisions of the Zope Public License,
Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
FOR A PARTICULAR PURPOSE
****************************************************************************/
/*
* This has been reported for inclusion in Python here: http://bugs.python.org/issue7333
* Hopefully we may be able to remove this file in some years.
*/
#include "Python.h"
#if defined(__unix__) || defined(unix) || defined(__NetBSD__) || defined(__MACH__) /* Mac OS X */
#include <grp.h>
#include <sys/types.h>
#include <unistd.h>
static PyObject *
initgroups_initgroups(PyObject *self, PyObject *args)
{
char *username;
unsigned int igid;
gid_t gid;
if (!PyArg_ParseTuple(args, "sI:initgroups", &username, &igid))
return NULL;
gid = igid;
if (initgroups(username, gid) == -1)
return PyErr_SetFromErrno(PyExc_OSError);
Py_INCREF(Py_None);
return Py_None;
}
static PyMethodDef InitgroupsMethods[] = {
{"initgroups", initgroups_initgroups, METH_VARARGS},
{NULL, NULL}
};
#else
/* This module is empty on non-UNIX systems. */
static PyMethodDef InitgroupsMethods[] = {
{NULL, NULL}
};
#endif /* defined(__unix__) || defined(unix) */
void
init_initgroups(void)
{
Py_InitModule("_initgroups", InitgroupsMethods);
}

View file

@ -0,0 +1,101 @@
# -*- test-case-name: twisted.internet.test.test_inotify -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Very low-level ctypes-based interface to Linux inotify(7).
ctypes and a version of libc which supports inotify system calls are
required.
"""
import ctypes
import ctypes.util
class INotifyError(Exception):
"""
Unify all the possible exceptions that can be raised by the INotify API.
"""
def init():
"""
Create an inotify instance and return the associated file descriptor.
"""
fd = libc.inotify_init()
if fd < 0:
raise INotifyError("INotify initialization error.")
return fd
def add(fd, path, mask):
"""
Add a watch for the given path to the inotify file descriptor, and return
the watch descriptor.
"""
wd = libc.inotify_add_watch(fd, path, mask)
if wd < 0:
raise INotifyError("Failed to add watch on '%r' - (%r)" % (path, wd))
return wd
def remove(fd, wd):
"""
Remove the given watch descriptor from the inotify file descriptor.
"""
# When inotify_rm_watch returns -1 there's an error:
# The errno for this call can be either one of the following:
# EBADF: fd is not a valid file descriptor.
# EINVAL: The watch descriptor wd is not valid; or fd is
# not an inotify file descriptor.
#
# if we can't access the errno here we cannot even raise
# an exception and we need to ignore the problem, one of
# the most common cases is when you remove a directory from
# the filesystem and that directory is observed. When inotify
# tries to call inotify_rm_watch with a non existing directory
# either of the 2 errors might come up because the files inside
# it might have events generated way before they were handled.
# Unfortunately only ctypes in Python 2.6 supports accessing errno:
# http://bugs.python.org/issue1798 and in order to solve
# the problem for previous versions we need to introduce
# code that is quite complex:
# http://stackoverflow.com/questions/661017/access-to-errno-from-python
#
# See #4310 for future resolution of this issue.
libc.inotify_rm_watch(fd, wd)
def initializeModule(libc):
"""
Intialize the module, checking if the expected APIs exist and setting the
argtypes and restype for for C{inotify_init}, C{inotify_add_watch}, and
C{inotify_rm_watch}.
"""
for function in ("inotify_add_watch", "inotify_init", "inotify_rm_watch"):
if getattr(libc, function, None) is None:
raise ImportError("libc6 2.4 or higher needed")
libc.inotify_init.argtypes = []
libc.inotify_init.restype = ctypes.c_int
libc.inotify_rm_watch.argtypes = [
ctypes.c_int, ctypes.c_int]
libc.inotify_rm_watch.restype = ctypes.c_int
libc.inotify_add_watch.argtypes = [
ctypes.c_int, ctypes.c_char_p, ctypes.c_uint32]
libc.inotify_add_watch.restype = ctypes.c_int
name = ctypes.util.find_library('c')
if not name:
raise ImportError("Can't find C library.")
libc = ctypes.cdll.LoadLibrary(name)
initializeModule(libc)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,668 @@
# -*- test-case-name: twisted.python.test.test_shellcomp -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
No public APIs are provided by this module. Internal use only.
This module implements dynamic tab-completion for any command that uses
twisted.python.usage. Currently, only zsh is supported. Bash support may
be added in the future.
Maintainer: Eric P. Mangold - twisted AT teratorn DOT org
In order for zsh completion to take place the shell must be able to find an
appropriate "stub" file ("completion function") that invokes this code and
displays the results to the user.
The stub used for Twisted commands is in the file C{twisted-completion.zsh},
which is also included in the official Zsh distribution at
C{Completion/Unix/Command/_twisted}. Use this file as a basis for completion
functions for your own commands. You should only need to change the first line
to something like C{#compdef mycommand}.
The main public documentation exists in the L{twisted.python.usage.Options}
docstring, the L{twisted.python.usage.Completions} docstring, and the
Options howto.
"""
import itertools, getopt, inspect
from twisted.python import reflect, util, usage
def shellComplete(config, cmdName, words, shellCompFile):
"""
Perform shell completion.
A completion function (shell script) is generated for the requested
shell and written to C{shellCompFile}, typically C{stdout}. The result
is then eval'd by the shell to produce the desired completions.
@type config: L{twisted.python.usage.Options}
@param config: The L{twisted.python.usage.Options} instance to generate
completions for.
@type cmdName: C{str}
@param cmdName: The name of the command we're generating completions for.
In the case of zsh, this is used to print an appropriate
"#compdef $CMD" line at the top of the output. This is
not necessary for the functionality of the system, but it
helps in debugging, since the output we produce is properly
formed and may be saved in a file and used as a stand-alone
completion function.
@type words: C{list} of C{str}
@param words: The raw command-line words passed to use by the shell
stub function. argv[0] has already been stripped off.
@type shellCompFile: C{file}
@param shellCompFile: The file to write completion data to.
"""
# shellName is provided for forward-compatibility. It is not used,
# since we currently only support zsh.
shellName, position = words[-1].split(":")
position = int(position)
# zsh gives the completion position ($CURRENT) as a 1-based index,
# and argv[0] has already been stripped off, so we subtract 2 to
# get the real 0-based index.
position -= 2
cWord = words[position]
# since the user may hit TAB at any time, we may have been called with an
# incomplete command-line that would generate getopt errors if parsed
# verbatim. However, we must do *some* parsing in order to determine if
# there is a specific subcommand that we need to provide completion for.
# So, to make the command-line more sane we work backwards from the
# current completion position and strip off all words until we find one
# that "looks" like a subcommand. It may in fact be the argument to a
# normal command-line option, but that won't matter for our purposes.
while position >= 1:
if words[position - 1].startswith("-"):
position -= 1
else:
break
words = words[:position]
subCommands = getattr(config, 'subCommands', None)
if subCommands:
# OK, this command supports sub-commands, so lets see if we have been
# given one.
# If the command-line arguments are not valid then we won't be able to
# sanely detect the sub-command, so just generate completions as if no
# sub-command was found.
args = None
try:
opts, args = getopt.getopt(words,
config.shortOpt, config.longOpt)
except getopt.error:
pass
if args:
# yes, we have a subcommand. Try to find it.
for (cmd, short, parser, doc) in config.subCommands:
if args[0] == cmd or args[0] == short:
subOptions = parser()
subOptions.parent = config
gen = ZshSubcommandBuilder(subOptions, config, cmdName,
shellCompFile)
gen.write()
return
# sub-command not given, or did not match any knowns sub-command names
genSubs = True
if cWord.startswith("-"):
# optimization: if the current word being completed starts
# with a hyphen then it can't be a sub-command, so skip
# the expensive generation of the sub-command list
genSubs = False
gen = ZshBuilder(config, cmdName, shellCompFile)
gen.write(genSubs=genSubs)
else:
gen = ZshBuilder(config, cmdName, shellCompFile)
gen.write()
class SubcommandAction(usage.Completer):
def _shellCode(self, optName, shellType):
if shellType == usage._ZSH:
return '*::subcmd:->subcmd'
raise NotImplementedError("Unknown shellType %r" % (shellType,))
class ZshBuilder(object):
"""
Constructs zsh code that will complete options for a given usage.Options
instance, possibly including a list of subcommand names.
Completions for options to subcommands won't be generated because this
class will never be used if the user is completing options for a specific
subcommand. (See L{ZshSubcommandBuilder} below)
@type options: L{twisted.python.usage.Options}
@ivar options: The L{twisted.python.usage.Options} instance defined for this
command.
@type cmdName: C{str}
@ivar cmdName: The name of the command we're generating completions for.
@type file: C{file}
@ivar file: The C{file} to write the completion function to.
"""
def __init__(self, options, cmdName, file):
self.options = options
self.cmdName = cmdName
self.file = file
def write(self, genSubs=True):
"""
Generate the completion function and write it to the output file
@return: C{None}
@type genSubs: C{bool}
@param genSubs: Flag indicating whether or not completions for the list
of subcommand should be generated. Only has an effect
if the C{subCommands} attribute has been defined on the
L{twisted.python.usage.Options} instance.
"""
if genSubs and getattr(self.options, 'subCommands', None) is not None:
gen = ZshArgumentsGenerator(self.options, self.cmdName, self.file)
gen.extraActions.insert(0, SubcommandAction())
gen.write()
self.file.write('local _zsh_subcmds_array\n_zsh_subcmds_array=(\n')
for (cmd, short, parser, desc) in self.options.subCommands:
self.file.write('"%s:%s"\n' % (cmd, desc))
self.file.write(")\n\n")
self.file.write('_describe "sub-command" _zsh_subcmds_array\n')
else:
gen = ZshArgumentsGenerator(self.options, self.cmdName, self.file)
gen.write()
class ZshSubcommandBuilder(ZshBuilder):
"""
Constructs zsh code that will complete options for a given usage.Options
instance, and also for a single sub-command. This will only be used in
the case where the user is completing options for a specific subcommand.
@type subOptions: L{twisted.python.usage.Options}
@ivar subOptions: The L{twisted.python.usage.Options} instance defined for
the sub command.
"""
def __init__(self, subOptions, *args):
self.subOptions = subOptions
ZshBuilder.__init__(self, *args)
def write(self):
"""
Generate the completion function and write it to the output file
@return: C{None}
"""
gen = ZshArgumentsGenerator(self.options, self.cmdName, self.file)
gen.extraActions.insert(0, SubcommandAction())
gen.write()
gen = ZshArgumentsGenerator(self.subOptions, self.cmdName, self.file)
gen.write()
class ZshArgumentsGenerator(object):
"""
Generate a call to the zsh _arguments completion function
based on data in a usage.Options instance
@type options: L{twisted.python.usage.Options}
@ivar options: The L{twisted.python.usage.Options} instance to generate for
@type cmdName: C{str}
@ivar cmdName: The name of the command we're generating completions for.
@type file: C{file}
@ivar file: The C{file} to write the completion function to
The following non-constructor variables are populated by this class
with data gathered from the C{Options} instance passed in, and its
base classes.
@type descriptions: C{dict}
@ivar descriptions: A dict mapping long option names to alternate
descriptions. When this variable is defined, the descriptions
contained here will override those descriptions provided in the
optFlags and optParameters variables.
@type multiUse: C{list}
@ivar multiUse: An iterable containing those long option names which may
appear on the command line more than once. By default, options will
only be completed one time.
@type mutuallyExclusive: C{list} of C{tuple}
@ivar mutuallyExclusive: A sequence of sequences, with each sub-sequence
containing those long option names that are mutually exclusive. That is,
those options that cannot appear on the command line together.
@type optActions: C{dict}
@ivar optActions: A dict mapping long option names to shell "actions".
These actions define what may be completed as the argument to the
given option, and should be given as instances of
L{twisted.python.usage.Completer}.
Callables may instead be given for the values in this dict. The
callable should accept no arguments, and return a C{Completer}
instance used as the action.
@type extraActions: C{list} of C{twisted.python.usage.Completer}
@ivar extraActions: Extra arguments are those arguments typically
appearing at the end of the command-line, which are not associated
with any particular named option. That is, the arguments that are
given to the parseArgs() method of your usage.Options subclass.
"""
def __init__(self, options, cmdName, file):
self.options = options
self.cmdName = cmdName
self.file = file
self.descriptions = {}
self.multiUse = set()
self.mutuallyExclusive = []
self.optActions = {}
self.extraActions = []
for cls in reversed(inspect.getmro(options.__class__)):
data = getattr(cls, 'compData', None)
if data:
self.descriptions.update(data.descriptions)
self.optActions.update(data.optActions)
self.multiUse.update(data.multiUse)
self.mutuallyExclusive.extend(data.mutuallyExclusive)
# I don't see any sane way to aggregate extraActions, so just
# take the one at the top of the MRO (nearest the `options'
# instance).
if data.extraActions:
self.extraActions = data.extraActions
aCL = reflect.accumulateClassList
aCD = reflect.accumulateClassDict
optFlags = []
optParams = []
aCL(options.__class__, 'optFlags', optFlags)
aCL(options.__class__, 'optParameters', optParams)
for i, optList in enumerate(optFlags):
if len(optList) != 3:
optFlags[i] = util.padTo(3, optList)
for i, optList in enumerate(optParams):
if len(optList) != 5:
optParams[i] = util.padTo(5, optList)
self.optFlags = optFlags
self.optParams = optParams
paramNameToDefinition = {}
for optList in optParams:
paramNameToDefinition[optList[0]] = optList[1:]
self.paramNameToDefinition = paramNameToDefinition
flagNameToDefinition = {}
for optList in optFlags:
flagNameToDefinition[optList[0]] = optList[1:]
self.flagNameToDefinition = flagNameToDefinition
allOptionsNameToDefinition = {}
allOptionsNameToDefinition.update(paramNameToDefinition)
allOptionsNameToDefinition.update(flagNameToDefinition)
self.allOptionsNameToDefinition = allOptionsNameToDefinition
self.addAdditionalOptions()
# makes sure none of the Completions metadata references
# option names that don't exist. (great for catching typos)
self.verifyZshNames()
self.excludes = self.makeExcludesDict()
def write(self):
"""
Write the zsh completion code to the file given to __init__
@return: C{None}
"""
self.writeHeader()
self.writeExtras()
self.writeOptions()
self.writeFooter()
def writeHeader(self):
"""
This is the start of the code that calls _arguments
@return: C{None}
"""
self.file.write('#compdef %s\n\n'
'_arguments -s -A "-*" \\\n' % (self.cmdName,))
def writeOptions(self):
"""
Write out zsh code for each option in this command
@return: C{None}
"""
optNames = self.allOptionsNameToDefinition.keys()
optNames.sort()
for longname in optNames:
self.writeOpt(longname)
def writeExtras(self):
"""
Write out completion information for extra arguments appearing on the
command-line. These are extra positional arguments not associated
with a named option. That is, the stuff that gets passed to
Options.parseArgs().
@return: C{None}
@raises: ValueError: if C{Completer} with C{repeat=True} is found and
is not the last item in the C{extraActions} list.
"""
for i, action in enumerate(self.extraActions):
descr = ""
if action._descr:
descr = action._descr
# a repeatable action must be the last action in the list
if action._repeat and i != len(self.extraActions) - 1:
raise ValueError("Completer with repeat=True must be "
"last item in Options.extraActions")
self.file.write(escape(action._shellCode('', usage._ZSH)))
self.file.write(' \\\n')
def writeFooter(self):
"""
Write the last bit of code that finishes the call to _arguments
@return: C{None}
"""
self.file.write('&& return 0\n')
def verifyZshNames(self):
"""
Ensure that none of the option names given in the metadata are typoed
@return: C{None}
@raise ValueError: Raised if unknown option names have been found.
"""
def err(name):
raise ValueError("Unknown option name \"%s\" found while\n"
"examining Completions instances on %s" % (
name, self.options))
for name in itertools.chain(self.descriptions, self.optActions,
self.multiUse):
if name not in self.allOptionsNameToDefinition:
err(name)
for seq in self.mutuallyExclusive:
for name in seq:
if name not in self.allOptionsNameToDefinition:
err(name)
def excludeStr(self, longname, buildShort=False):
"""
Generate an "exclusion string" for the given option
@type longname: C{str}
@param longname: The long option name (e.g. "verbose" instead of "v")
@type buildShort: C{bool}
@param buildShort: May be True to indicate we're building an excludes
string for the short option that correspondes to the given long opt.
@return: The generated C{str}
"""
if longname in self.excludes:
exclusions = self.excludes[longname].copy()
else:
exclusions = set()
# if longname isn't a multiUse option (can't appear on the cmd line more
# than once), then we have to exclude the short option if we're
# building for the long option, and vice versa.
if longname not in self.multiUse:
if buildShort is False:
short = self.getShortOption(longname)
if short is not None:
exclusions.add(short)
else:
exclusions.add(longname)
if not exclusions:
return ''
strings = []
for optName in exclusions:
if len(optName) == 1:
# short option
strings.append("-" + optName)
else:
strings.append("--" + optName)
strings.sort() # need deterministic order for reliable unit-tests
return "(%s)" % " ".join(strings)
def makeExcludesDict(self):
"""
@return: A C{dict} that maps each option name appearing in
self.mutuallyExclusive to a list of those option names that is it
mutually exclusive with (can't appear on the cmd line with).
"""
#create a mapping of long option name -> single character name
longToShort = {}
for optList in itertools.chain(self.optParams, self.optFlags):
if optList[1] != None:
longToShort[optList[0]] = optList[1]
excludes = {}
for lst in self.mutuallyExclusive:
for i, longname in enumerate(lst):
tmp = set(lst[:i] + lst[i+1:])
for name in tmp.copy():
if name in longToShort:
tmp.add(longToShort[name])
if longname in excludes:
excludes[longname] = excludes[longname].union(tmp)
else:
excludes[longname] = tmp
return excludes
def writeOpt(self, longname):
"""
Write out the zsh code for the given argument. This is just part of the
one big call to _arguments
@type longname: C{str}
@param longname: The long option name (e.g. "verbose" instead of "v")
@return: C{None}
"""
if longname in self.flagNameToDefinition:
# It's a flag option. Not one that takes a parameter.
longField = "--%s" % longname
else:
longField = "--%s=" % longname
short = self.getShortOption(longname)
if short != None:
shortField = "-" + short
else:
shortField = ''
descr = self.getDescription(longname)
descriptionField = descr.replace("[", "\[")
descriptionField = descriptionField.replace("]", "\]")
descriptionField = '[%s]' % descriptionField
actionField = self.getAction(longname)
if longname in self.multiUse:
multiField = '*'
else:
multiField = ''
longExclusionsField = self.excludeStr(longname)
if short:
#we have to write an extra line for the short option if we have one
shortExclusionsField = self.excludeStr(longname, buildShort=True)
self.file.write(escape('%s%s%s%s%s' % (shortExclusionsField,
multiField, shortField, descriptionField, actionField)))
self.file.write(' \\\n')
self.file.write(escape('%s%s%s%s%s' % (longExclusionsField,
multiField, longField, descriptionField, actionField)))
self.file.write(' \\\n')
def getAction(self, longname):
"""
Return a zsh "action" string for the given argument
@return: C{str}
"""
if longname in self.optActions:
if callable(self.optActions[longname]):
action = self.optActions[longname]()
else:
action = self.optActions[longname]
return action._shellCode(longname, usage._ZSH)
if longname in self.paramNameToDefinition:
return ':%s:_files' % (longname,)
return ''
def getDescription(self, longname):
"""
Return the description to be used for this argument
@return: C{str}
"""
#check if we have an alternate descr for this arg, and if so use it
if longname in self.descriptions:
return self.descriptions[longname]
#otherwise we have to get it from the optFlags or optParams
try:
descr = self.flagNameToDefinition[longname][1]
except KeyError:
try:
descr = self.paramNameToDefinition[longname][2]
except KeyError:
descr = None
if descr is not None:
return descr
# let's try to get it from the opt_foo method doc string if there is one
longMangled = longname.replace('-', '_') # this is what t.p.usage does
obj = getattr(self.options, 'opt_%s' % longMangled, None)
if obj is not None:
descr = descrFromDoc(obj)
if descr is not None:
return descr
return longname # we really ought to have a good description to use
def getShortOption(self, longname):
"""
Return the short option letter or None
@return: C{str} or C{None}
"""
optList = self.allOptionsNameToDefinition[longname]
return optList[0] or None
def addAdditionalOptions(self):
"""
Add additional options to the optFlags and optParams lists.
These will be defined by 'opt_foo' methods of the Options subclass
@return: C{None}
"""
methodsDict = {}
reflect.accumulateMethods(self.options, methodsDict, 'opt_')
methodToShort = {}
for name in methodsDict.copy():
if len(name) == 1:
methodToShort[methodsDict[name]] = name
del methodsDict[name]
for methodName, methodObj in methodsDict.items():
longname = methodName.replace('_', '-') # t.p.usage does this
# if this option is already defined by the optFlags or
# optParameters then we don't want to override that data
if longname in self.allOptionsNameToDefinition:
continue
descr = self.getDescription(longname)
short = None
if methodObj in methodToShort:
short = methodToShort[methodObj]
reqArgs = methodObj.im_func.func_code.co_argcount
if reqArgs == 2:
self.optParams.append([longname, short, None, descr])
self.paramNameToDefinition[longname] = [short, None, descr]
self.allOptionsNameToDefinition[longname] = [short, None, descr]
else:
# reqArgs must equal 1. self.options would have failed
# to instantiate if it had opt_ methods with bad signatures.
self.optFlags.append([longname, short, descr])
self.flagNameToDefinition[longname] = [short, descr]
self.allOptionsNameToDefinition[longname] = [short, None, descr]
def descrFromDoc(obj):
"""
Generate an appropriate description from docstring of the given object
"""
if obj.__doc__ is None or obj.__doc__.isspace():
return None
lines = [x.strip() for x in obj.__doc__.split("\n")
if x and not x.isspace()]
return " ".join(lines)
def escape(x):
"""
Shell escape the given string
Implementation borrowed from now-deprecated commands.mkarg() in the stdlib
"""
if '\'' not in x:
return '\'' + x + '\''
s = '"'
for c in x:
if c in '\\$"`':
s = s + '\\'
s = s + c
s = s + '"'
return s

View file

@ -0,0 +1,316 @@
# -*- test-case-name: twisted.python.test.test_textattributes -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
This module provides some common functionality for the manipulation of
formatting states.
Defining the mechanism by which text containing character attributes is
constructed begins by subclassing L{CharacterAttributesMixin}.
Defining how a single formatting state is to be serialized begins by
subclassing L{_FormattingStateMixin}.
Serializing a formatting structure is done with L{flatten}.
@see: L{twisted.conch.insults.helper._FormattingState}
@see: L{twisted.conch.insults.text._CharacterAttributes}
@see: L{twisted.words.protocols.irc._FormattingState}
@see: L{twisted.words.protocols.irc._CharacterAttributes}
"""
from twisted.python.util import FancyEqMixin
class _Attribute(object, FancyEqMixin):
"""
A text attribute.
Indexing a text attribute with a C{str} or another text attribute adds that
object as a child, indexing with a C{list} or C{tuple} adds the elements as
children; in either case C{self} is returned.
@type children: C{list}
@ivar children: Child attributes.
"""
compareAttributes = ('children',)
def __init__(self):
self.children = []
def __repr__(self):
return '<%s %r>' % (type(self).__name__, vars(self))
def __getitem__(self, item):
assert isinstance(item, (list, tuple, _Attribute, str))
if isinstance(item, (list, tuple)):
self.children.extend(item)
else:
self.children.append(item)
return self
def serialize(self, write, attrs=None, attributeRenderer='toVT102'):
"""
Serialize the text attribute and its children.
@param write: C{callable}, taking one C{str} argument, called to output
a single text attribute at a time.
@param attrs: A formatting state instance used to determine how to
serialize the attribute children.
@type attributeRenderer: C{str}
@param attributeRenderer: Name of the method on L{attrs} that should be
called to render the attributes during serialization. Defaults to
C{'toVT102'}.
"""
if attrs is None:
attrs = DefaultFormattingState()
for ch in self.children:
if isinstance(ch, _Attribute):
ch.serialize(write, attrs.copy(), attributeRenderer)
else:
renderMeth = getattr(attrs, attributeRenderer)
write(renderMeth())
write(ch)
class _NormalAttr(_Attribute):
"""
A text attribute for normal text.
"""
def serialize(self, write, attrs, attributeRenderer):
attrs.__init__()
_Attribute.serialize(self, write, attrs, attributeRenderer)
class _OtherAttr(_Attribute):
"""
A text attribute for text with formatting attributes.
The unary minus operator returns the inverse of this attribute, where that
makes sense.
@type attrname: C{str}
@ivar attrname: Text attribute name.
@ivar attrvalue: Text attribute value.
"""
compareAttributes = ('attrname', 'attrvalue', 'children')
def __init__(self, attrname, attrvalue):
_Attribute.__init__(self)
self.attrname = attrname
self.attrvalue = attrvalue
def __neg__(self):
result = _OtherAttr(self.attrname, not self.attrvalue)
result.children.extend(self.children)
return result
def serialize(self, write, attrs, attributeRenderer):
attrs = attrs._withAttribute(self.attrname, self.attrvalue)
_Attribute.serialize(self, write, attrs, attributeRenderer)
class _ColorAttr(_Attribute):
"""
Generic color attribute.
@param color: Color value.
@param ground: Foreground or background attribute name.
"""
compareAttributes = ('color', 'ground', 'children')
def __init__(self, color, ground):
_Attribute.__init__(self)
self.color = color
self.ground = ground
def serialize(self, write, attrs, attributeRenderer):
attrs = attrs._withAttribute(self.ground, self.color)
_Attribute.serialize(self, write, attrs, attributeRenderer)
class _ForegroundColorAttr(_ColorAttr):
"""
Foreground color attribute.
"""
def __init__(self, color):
_ColorAttr.__init__(self, color, 'foreground')
class _BackgroundColorAttr(_ColorAttr):
"""
Background color attribute.
"""
def __init__(self, color):
_ColorAttr.__init__(self, color, 'background')
class _ColorAttribute(object):
"""
A color text attribute.
Attribute access results in a color value lookup, by name, in L{attrs}.
@type ground: L{_ColorAttr}
@param ground: Foreground or background color attribute to look color names
up from.
@param attrs: Mapping of color names to color values.
"""
def __init__(self, ground, attrs):
self.ground = ground
self.attrs = attrs
def __getattr__(self, name):
try:
return self.ground(self.attrs[name])
except KeyError:
raise AttributeError(name)
class CharacterAttributesMixin(object):
"""
Mixin for character attributes that implements a C{__getattr__} method
returning a new C{_NormalAttr} instance when attempting to access
a C{'normal'} attribute; otherwise a new C{_OtherAttr} instance is returned
for names that appears in the C{'attrs'} attribute.
"""
def __getattr__(self, name):
if name == 'normal':
return _NormalAttr()
if name in self.attrs:
return _OtherAttr(name, True)
raise AttributeError(name)
class DefaultFormattingState(object, FancyEqMixin):
"""
A character attribute that does nothing, thus applying no attributes to
text.
"""
compareAttributes = ('_dummy',)
_dummy = 0
def copy(self):
"""
Make a copy of this formatting state.
@return: A formatting state instance.
"""
return type(self)()
def _withAttribute(self, name, value):
"""
Add a character attribute to a copy of this formatting state.
@param name: Attribute name to be added to formatting state.
@param value: Attribute value.
@return: A formatting state instance with the new attribute.
"""
return self.copy()
def toVT102(self):
"""
Emit a VT102 control sequence that will set up all the attributes this
formatting state has set.
@return: A string containing VT102 control sequences that mimic this
formatting state.
"""
return ''
class _FormattingStateMixin(DefaultFormattingState):
"""
Mixin for the formatting state/attributes of a single character.
"""
def copy(self):
c = DefaultFormattingState.copy(self)
c.__dict__.update(vars(self))
return c
def _withAttribute(self, name, value):
if getattr(self, name) != value:
attr = self.copy()
attr._subtracting = not value
setattr(attr, name, value)
return attr
else:
return self.copy()
def flatten(output, attrs, attributeRenderer='toVT102'):
"""
Serialize a sequence of characters with attribute information
The resulting string can be interpreted by compatible software so that the
contained characters are displayed and, for those attributes which are
supported by the software, the attributes expressed. The exact result of
the serialization depends on the behavior of the method specified by
L{attributeRenderer}.
For example, if your terminal is VT102 compatible, you might run
this for a colorful variation on the \"hello world\" theme::
from twisted.conch.insults.text import flatten, attributes as A
from twisted.conch.insults.helper import CharacterAttribute
print flatten(
A.normal[A.bold[A.fg.red['He'], A.fg.green['ll'], A.fg.magenta['o'], ' ',
A.fg.yellow['Wo'], A.fg.blue['rl'], A.fg.cyan['d!']]],
CharacterAttribute())
@param output: Object returned by accessing attributes of the
module-level attributes object.
@param attrs: A formatting state instance used to determine how to
serialize C{output}.
@type attributeRenderer: C{str}
@param attributeRenderer: Name of the method on L{attrs} that should be
called to render the attributes during serialization. Defaults to
C{'toVT102'}.
@return: A string expressing the text and display attributes specified by
L{output}.
"""
flattened = []
output.serialize(flattened.append, attrs, attributeRenderer)
return ''.join(flattened)
__all__ = [
'flatten', 'DefaultFormattingState', 'CharacterAttributesMixin']

View file

@ -0,0 +1,446 @@
# -*- test-case-name: twisted.test.test_compat -*-
#
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Compatibility module to provide backwards compatibility for useful Python
features.
This is mainly for use of internal Twisted code. We encourage you to use
the latest version of Python directly from your code, if possible.
@var unicode: The type of Unicode strings, C{unicode} on Python 2 and C{str}
on Python 3.
@var NativeStringIO: An in-memory file-like object that operates on the native
string type (bytes in Python 2, unicode in Python 3).
"""
from __future__ import division
import sys, string, socket, struct
if sys.version_info < (3, 0):
_PY3 = False
else:
_PY3 = True
def inet_pton(af, addr):
if af == socket.AF_INET:
return socket.inet_aton(addr)
elif af == getattr(socket, 'AF_INET6', 'AF_INET6'):
if [x for x in addr if x not in string.hexdigits + ':.']:
raise ValueError("Illegal characters: %r" % (''.join(x),))
parts = addr.split(':')
elided = parts.count('')
ipv4Component = '.' in parts[-1]
if len(parts) > (8 - ipv4Component) or elided > 3:
raise ValueError("Syntactically invalid address")
if elided == 3:
return '\x00' * 16
if elided:
zeros = ['0'] * (8 - len(parts) - ipv4Component + elided)
if addr.startswith('::'):
parts[:2] = zeros
elif addr.endswith('::'):
parts[-2:] = zeros
else:
idx = parts.index('')
parts[idx:idx+1] = zeros
if len(parts) != 8 - ipv4Component:
raise ValueError("Syntactically invalid address")
else:
if len(parts) != (8 - ipv4Component):
raise ValueError("Syntactically invalid address")
if ipv4Component:
if parts[-1].count('.') != 3:
raise ValueError("Syntactically invalid address")
rawipv4 = socket.inet_aton(parts[-1])
unpackedipv4 = struct.unpack('!HH', rawipv4)
parts[-1:] = [hex(x)[2:] for x in unpackedipv4]
parts = [int(x, 16) for x in parts]
return struct.pack('!8H', *parts)
else:
raise socket.error(97, 'Address family not supported by protocol')
def inet_ntop(af, addr):
if af == socket.AF_INET:
return socket.inet_ntoa(addr)
elif af == socket.AF_INET6:
if len(addr) != 16:
raise ValueError("address length incorrect")
parts = struct.unpack('!8H', addr)
curBase = bestBase = None
for i in range(8):
if not parts[i]:
if curBase is None:
curBase = i
curLen = 0
curLen += 1
else:
if curBase is not None:
if bestBase is None or curLen > bestLen:
bestBase = curBase
bestLen = curLen
curBase = None
if curBase is not None and (bestBase is None or curLen > bestLen):
bestBase = curBase
bestLen = curLen
parts = [hex(x)[2:] for x in parts]
if bestBase is not None:
parts[bestBase:bestBase + bestLen] = ['']
if parts[0] == '':
parts.insert(0, '')
if parts[-1] == '':
parts.insert(len(parts) - 1, '')
return ':'.join(parts)
else:
raise socket.error(97, 'Address family not supported by protocol')
try:
socket.AF_INET6
except AttributeError:
socket.AF_INET6 = 'AF_INET6'
try:
socket.inet_pton(socket.AF_INET6, "::")
except (AttributeError, NameError, socket.error):
socket.inet_pton = inet_pton
socket.inet_ntop = inet_ntop
adict = dict
if _PY3:
# These are actually useless in Python 2 as well, but we need to go
# through deprecation process there (ticket #5895):
del adict, inet_pton, inet_ntop
set = set
frozenset = frozenset
try:
from functools import reduce
except ImportError:
reduce = reduce
def execfile(filename, globals, locals=None):
"""
Execute a Python script in the given namespaces.
Similar to the execfile builtin, but a namespace is mandatory, partly
because that's a sensible thing to require, and because otherwise we'd
have to do some frame hacking.
This is a compatibility implementation for Python 3 porting, to avoid the
use of the deprecated builtin C{execfile} function.
"""
if locals is None:
locals = globals
fin = open(filename, "rbU")
try:
source = fin.read()
finally:
fin.close()
code = compile(source, filename, "exec")
exec(code, globals, locals)
try:
cmp = cmp
except NameError:
def cmp(a, b):
"""
Compare two objects.
Returns a negative number if C{a < b}, zero if they are equal, and a
positive number if C{a > b}.
"""
if a < b:
return -1
elif a == b:
return 0
else:
return 1
def comparable(klass):
"""
Class decorator that ensures support for the special C{__cmp__} method.
On Python 2 this does nothing.
On Python 3, C{__eq__}, C{__lt__}, etc. methods are added to the class,
relying on C{__cmp__} to implement their comparisons.
"""
# On Python 2, __cmp__ will just work, so no need to add extra methods:
if not _PY3:
return klass
def __eq__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c == 0
def __ne__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c != 0
def __lt__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c < 0
def __le__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c <= 0
def __gt__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c > 0
def __ge__(self, other):
c = self.__cmp__(other)
if c is NotImplemented:
return c
return c >= 0
klass.__lt__ = __lt__
klass.__gt__ = __gt__
klass.__le__ = __le__
klass.__ge__ = __ge__
klass.__eq__ = __eq__
klass.__ne__ = __ne__
return klass
if _PY3:
unicode = str
else:
unicode = unicode
def nativeString(s):
"""
Convert C{bytes} or C{unicode} to the native C{str} type, using ASCII
encoding if conversion is necessary.
@raise UnicodeError: The input string is not ASCII encodable/decodable.
@raise TypeError: The input is neither C{bytes} nor C{unicode}.
"""
if not isinstance(s, (bytes, unicode)):
raise TypeError("%r is neither bytes nor unicode" % s)
if _PY3:
if isinstance(s, bytes):
return s.decode("ascii")
else:
# Ensure we're limited to ASCII subset:
s.encode("ascii")
else:
if isinstance(s, unicode):
return s.encode("ascii")
else:
# Ensure we're limited to ASCII subset:
s.decode("ascii")
return s
if _PY3:
def reraise(exception, traceback):
raise exception.with_traceback(traceback)
else:
exec("""def reraise(exception, traceback):
raise exception.__class__, exception, traceback""")
reraise.__doc__ = """
Re-raise an exception, with an optional traceback, in a way that is compatible
with both Python 2 and Python 3.
Note that on Python 3, re-raised exceptions will be mutated, with their
C{__traceback__} attribute being set.
@param exception: The exception instance.
@param traceback: The traceback to use, or C{None} indicating a new traceback.
"""
if _PY3:
from io import StringIO as NativeStringIO
else:
from io import BytesIO as NativeStringIO
# Functions for dealing with Python 3's bytes type, which is somewhat
# different than Python 2's:
if _PY3:
def iterbytes(originalBytes):
for i in range(len(originalBytes)):
yield originalBytes[i:i+1]
def intToBytes(i):
return ("%d" % i).encode("ascii")
# Ideally we would use memoryview, but it has a number of differences from
# the Python 2 buffer() that make that impractical
# (http://bugs.python.org/issue15945, incompatiblity with pyOpenSSL due to
# PyArg_ParseTuple differences.)
def lazyByteSlice(object, offset=0, size=None):
"""
Return a copy of the given bytes-like object.
If an offset is given, the copy starts at that offset. If a size is
given, the copy will only be of that length.
@param object: C{bytes} to be copied.
@param offset: C{int}, starting index of copy.
@param size: Optional, if an C{int} is given limit the length of copy
to this size.
"""
if size is None:
return object[offset:]
else:
return object[offset:(offset + size)]
def networkString(s):
if not isinstance(s, unicode):
raise TypeError("Can only convert text to bytes on Python 3")
return s.encode('ascii')
else:
def iterbytes(originalBytes):
return originalBytes
def intToBytes(i):
return b"%d" % i
lazyByteSlice = buffer
def networkString(s):
if not isinstance(s, str):
raise TypeError("Can only pass-through bytes on Python 2")
# Ensure we're limited to ASCII subset:
s.decode('ascii')
return s
iterbytes.__doc__ = """
Return an iterable wrapper for a C{bytes} object that provides the behavior of
iterating over C{bytes} on Python 2.
In particular, the results of iteration are the individual bytes (rather than
integers as on Python 3).
@param originalBytes: A C{bytes} object that will be wrapped.
"""
intToBytes.__doc__ = """
Convert the given integer into C{bytes}, as ASCII-encoded Arab numeral.
In other words, this is equivalent to calling C{bytes} in Python 2 on an
integer.
@param i: The C{int} to convert to C{bytes}.
@rtype: C{bytes}
"""
networkString.__doc__ = """
Convert the native string type to C{bytes} if it is not already C{bytes} using
ASCII encoding if conversion is necessary.
This is useful for sending text-like bytes that are constructed using string
interpolation. For example, this is safe on Python 2 and Python 3:
networkString("Hello %d" % (n,))
@param s: A native string to convert to bytes if necessary.
@type s: C{str}
@raise UnicodeError: The input string is not ASCII encodable/decodable.
@raise TypeError: The input is neither C{bytes} nor C{unicode}.
@rtype: C{bytes}
"""
try:
StringType = basestring
except NameError:
# Python 3+
StringType = str
try:
from types import InstanceType
except ImportError:
# Python 3+
InstanceType = object
try:
from types import FileType
except ImportError:
from io import IOBase
# Python 3+
FileType = IOBase
__all__ = [
"reraise",
"execfile",
"frozenset",
"reduce",
"set",
"cmp",
"comparable",
"nativeString",
"NativeStringIO",
"networkString",
"unicode",
"iterbytes",
"intToBytes",
"lazyByteSlice",
"StringType",
"InstanceType",
"FileType",
]

View file

@ -0,0 +1,443 @@
# -*- test-case-name: twisted.python.test.test_components -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Component architecture for Twisted, based on Zope3 components.
Using the Zope3 API directly is strongly recommended. Everything
you need is in the top-level of the zope.interface package, e.g.::
from zope.interface import Interface, implements
class IFoo(Interface):
pass
class Foo:
implements(IFoo)
print IFoo.implementedBy(Foo) # True
print IFoo.providedBy(Foo()) # True
L{twisted.python.components.registerAdapter} from this module may be used to
add to Twisted's global adapter registry.
L{twisted.python.components.proxyForInterface} is a factory for classes
which allow access to only the parts of another class defined by a specified
interface.
"""
from __future__ import division, absolute_import
# zope3 imports
from zope.interface import interface, declarations
from zope.interface.adapter import AdapterRegistry
# twisted imports
from twisted.python.compat import NativeStringIO
from twisted.python import reflect
# Twisted's global adapter registry
globalRegistry = AdapterRegistry()
# Attribute that registerAdapter looks at. Is this supposed to be public?
ALLOW_DUPLICATES = 0
# Define a function to find the registered adapter factory, using either a
# version of Zope Interface which has the `registered' method or an older
# version which does not.
if getattr(AdapterRegistry, 'registered', None) is None:
def _registered(registry, required, provided):
"""
Return the adapter factory for the given parameters in the given
registry, or None if there is not one.
"""
return registry.get(required).selfImplied.get(provided, {}).get('')
else:
def _registered(registry, required, provided):
"""
Return the adapter factory for the given parameters in the given
registry, or None if there is not one.
"""
return registry.registered([required], provided)
def registerAdapter(adapterFactory, origInterface, *interfaceClasses):
"""Register an adapter class.
An adapter class is expected to implement the given interface, by
adapting instances implementing 'origInterface'. An adapter class's
__init__ method should accept one parameter, an instance implementing
'origInterface'.
"""
self = globalRegistry
assert interfaceClasses, "You need to pass an Interface"
global ALLOW_DUPLICATES
# deal with class->interface adapters:
if not isinstance(origInterface, interface.InterfaceClass):
origInterface = declarations.implementedBy(origInterface)
for interfaceClass in interfaceClasses:
factory = _registered(self, origInterface, interfaceClass)
if factory is not None and not ALLOW_DUPLICATES:
raise ValueError("an adapter (%s) was already registered." % (factory, ))
for interfaceClass in interfaceClasses:
self.register([origInterface], interfaceClass, '', adapterFactory)
def getAdapterFactory(fromInterface, toInterface, default):
"""Return registered adapter for a given class and interface.
Note that is tied to the *Twisted* global registry, and will
thus not find adapters registered elsewhere.
"""
self = globalRegistry
if not isinstance(fromInterface, interface.InterfaceClass):
fromInterface = declarations.implementedBy(fromInterface)
factory = self.lookup1(fromInterface, toInterface)
if factory is None:
factory = default
return factory
def _addHook(registry):
"""
Add an adapter hook which will attempt to look up adapters in the given
registry.
@type registry: L{zope.interface.adapter.AdapterRegistry}
@return: The hook which was added, for later use with L{_removeHook}.
"""
lookup = registry.lookup1
def _hook(iface, ob):
factory = lookup(declarations.providedBy(ob), iface)
if factory is None:
return None
else:
return factory(ob)
interface.adapter_hooks.append(_hook)
return _hook
def _removeHook(hook):
"""
Remove a previously added adapter hook.
@param hook: An object previously returned by a call to L{_addHook}. This
will be removed from the list of adapter hooks.
"""
interface.adapter_hooks.remove(hook)
# add global adapter lookup hook for our newly created registry
_addHook(globalRegistry)
def getRegistry():
"""Returns the Twisted global
C{zope.interface.adapter.AdapterRegistry} instance.
"""
return globalRegistry
# FIXME: deprecate attribute somehow?
CannotAdapt = TypeError
class Adapter:
"""I am the default implementation of an Adapter for some interface.
This docstring contains a limerick, by popular demand::
Subclassing made Zope and TR
much harder to work with by far.
So before you inherit,
be sure to declare it
Adapter, not PyObject*
@cvar temporaryAdapter: If this is True, the adapter will not be
persisted on the Componentized.
@cvar multiComponent: If this adapter is persistent, should it be
automatically registered for all appropriate interfaces.
"""
# These attributes are used with Componentized.
temporaryAdapter = 0
multiComponent = 1
def __init__(self, original):
"""Set my 'original' attribute to be the object I am adapting.
"""
self.original = original
def __conform__(self, interface):
"""
I forward __conform__ to self.original if it has it, otherwise I
simply return None.
"""
if hasattr(self.original, "__conform__"):
return self.original.__conform__(interface)
return None
def isuper(self, iface, adapter):
"""
Forward isuper to self.original
"""
return self.original.isuper(iface, adapter)
class Componentized:
"""I am a mixin to allow you to be adapted in various ways persistently.
I define a list of persistent adapters. This is to allow adapter classes
to store system-specific state, and initialized on demand. The
getComponent method implements this. You must also register adapters for
this class for the interfaces that you wish to pass to getComponent.
Many other classes and utilities listed here are present in Zope3; this one
is specific to Twisted.
"""
persistenceVersion = 1
def __init__(self):
self._adapterCache = {}
def locateAdapterClass(self, klass, interfaceClass, default):
return getAdapterFactory(klass, interfaceClass, default)
def setAdapter(self, interfaceClass, adapterClass):
"""
Cache a provider for the given interface, by adapting C{self} using
the given adapter class.
"""
self.setComponent(interfaceClass, adapterClass(self))
def addAdapter(self, adapterClass, ignoreClass=0):
"""Utility method that calls addComponent. I take an adapter class and
instantiate it with myself as the first argument.
@return: The adapter instantiated.
"""
adapt = adapterClass(self)
self.addComponent(adapt, ignoreClass)
return adapt
def setComponent(self, interfaceClass, component):
"""
Cache a provider of the given interface.
"""
self._adapterCache[reflect.qual(interfaceClass)] = component
def addComponent(self, component, ignoreClass=0):
"""
Add a component to me, for all appropriate interfaces.
In order to determine which interfaces are appropriate, the component's
provided interfaces will be scanned.
If the argument 'ignoreClass' is True, then all interfaces are
considered appropriate.
Otherwise, an 'appropriate' interface is one for which its class has
been registered as an adapter for my class according to the rules of
getComponent.
@return: the list of appropriate interfaces
"""
for iface in declarations.providedBy(component):
if (ignoreClass or
(self.locateAdapterClass(self.__class__, iface, None)
== component.__class__)):
self._adapterCache[reflect.qual(iface)] = component
def unsetComponent(self, interfaceClass):
"""Remove my component specified by the given interface class."""
del self._adapterCache[reflect.qual(interfaceClass)]
def removeComponent(self, component):
"""
Remove the given component from me entirely, for all interfaces for which
it has been registered.
@return: a list of the interfaces that were removed.
"""
l = []
for k, v in list(self._adapterCache.items()):
if v is component:
del self._adapterCache[k]
l.append(reflect.namedObject(k))
return l
def getComponent(self, interface, default=None):
"""Create or retrieve an adapter for the given interface.
If such an adapter has already been created, retrieve it from the cache
that this instance keeps of all its adapters. Adapters created through
this mechanism may safely store system-specific state.
If you want to register an adapter that will be created through
getComponent, but you don't require (or don't want) your adapter to be
cached and kept alive for the lifetime of this Componentized object,
set the attribute 'temporaryAdapter' to True on your adapter class.
If you want to automatically register an adapter for all appropriate
interfaces (with addComponent), set the attribute 'multiComponent' to
True on your adapter class.
"""
k = reflect.qual(interface)
if k in self._adapterCache:
return self._adapterCache[k]
else:
adapter = interface.__adapt__(self)
if adapter is not None and not (
hasattr(adapter, "temporaryAdapter") and
adapter.temporaryAdapter):
self._adapterCache[k] = adapter
if (hasattr(adapter, "multiComponent") and
adapter.multiComponent):
self.addComponent(adapter)
if adapter is None:
return default
return adapter
def __conform__(self, interface):
return self.getComponent(interface)
class ReprableComponentized(Componentized):
def __init__(self):
Componentized.__init__(self)
def __repr__(self):
from pprint import pprint
sio = NativeStringIO()
pprint(self._adapterCache, sio)
return sio.getvalue()
def proxyForInterface(iface, originalAttribute='original'):
"""
Create a class which proxies all method calls which adhere to an interface
to another provider of that interface.
This function is intended for creating specialized proxies. The typical way
to use it is by subclassing the result::
class MySpecializedProxy(proxyForInterface(IFoo)):
def someInterfaceMethod(self, arg):
if arg == 3:
return 3
return self.original.someInterfaceMethod(arg)
@param iface: The Interface to which the resulting object will conform, and
which the wrapped object must provide.
@param originalAttribute: name of the attribute used to save the original
object in the resulting class. Default to C{original}.
@type originalAttribute: C{str}
@return: A class whose constructor takes the original object as its only
argument. Constructing the class creates the proxy.
"""
def __init__(self, original):
setattr(self, originalAttribute, original)
contents = {"__init__": __init__}
for name in iface:
contents[name] = _ProxyDescriptor(name, originalAttribute)
proxy = type("(Proxy for %s)"
% (reflect.qual(iface),), (object,), contents)
declarations.classImplements(proxy, iface)
return proxy
class _ProxiedClassMethod(object):
"""
A proxied class method.
@ivar methodName: the name of the method which this should invoke when
called.
@type methodName: C{str}
@ivar originalAttribute: name of the attribute of the proxy where the
original object is stored.
@type orginalAttribute: C{str}
"""
def __init__(self, methodName, originalAttribute):
self.methodName = methodName
self.originalAttribute = originalAttribute
def __call__(self, oself, *args, **kw):
"""
Invoke the specified L{methodName} method of the C{original} attribute
for proxyForInterface.
@param oself: an instance of a L{proxyForInterface} object.
@return: the result of the underlying method.
"""
original = getattr(oself, self.originalAttribute)
actualMethod = getattr(original, self.methodName)
return actualMethod(*args, **kw)
class _ProxyDescriptor(object):
"""
A descriptor which will proxy attribute access, mutation, and
deletion to the L{original} attribute of the object it is being accessed
from.
@ivar attributeName: the name of the attribute which this descriptor will
retrieve from instances' C{original} attribute.
@type attributeName: C{str}
@ivar originalAttribute: name of the attribute of the proxy where the
original object is stored.
@type orginalAttribute: C{str}
"""
def __init__(self, attributeName, originalAttribute):
self.attributeName = attributeName
self.originalAttribute = originalAttribute
def __get__(self, oself, type=None):
"""
Retrieve the C{self.attributeName} property from L{oself}.
"""
if oself is None:
return _ProxiedClassMethod(self.attributeName,
self.originalAttribute)
original = getattr(oself, self.originalAttribute)
return getattr(original, self.attributeName)
def __set__(self, oself, value):
"""
Set the C{self.attributeName} property of L{oself}.
"""
original = getattr(oself, self.originalAttribute)
setattr(original, self.attributeName, value)
def __delete__(self, oself):
"""
Delete the C{self.attributeName} property of L{oself}.
"""
original = getattr(oself, self.originalAttribute)
delattr(original, self.attributeName)
__all__ = [
"registerAdapter", "getAdapterFactory",
"Adapter", "Componentized", "ReprableComponentized", "getRegistry",
"proxyForInterface",
]

View file

@ -0,0 +1,500 @@
# -*- test-case-name: twisted.python.test.test_constants -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Symbolic constant support, including collections and constants with text,
numeric, and bit flag values.
"""
from __future__ import division, absolute_import
__all__ = [
'NamedConstant', 'ValueConstant', 'FlagConstant',
'Names', 'Values', 'Flags']
from functools import partial
from itertools import count
from operator import and_, or_, xor
_unspecified = object()
_constantOrder = partial(next, count())
class _Constant(object):
"""
@ivar _index: A C{int} allocated from a shared counter in order to keep
track of the order in which L{_Constant}s are instantiated.
@ivar name: A C{str} giving the name of this constant; only set once the
constant is initialized by L{_ConstantsContainer}.
@ivar _container: The L{_ConstantsContainer} subclass this constant belongs
to; C{None} until the constant is initialized by that subclass.
"""
def __init__(self):
self._container = None
self._index = _constantOrder()
def __repr__(self):
"""
Return text identifying both which constant this is and which
collection it belongs to.
"""
return "<%s=%s>" % (self._container.__name__, self.name)
def __lt__(self, other):
"""
Implements C{<}. Order is defined by instantiation order.
@param other: An object.
@return: C{NotImplemented} if C{other} is not a constant belonging to
the same container as this constant, C{True} if this constant is
defined before C{other}, otherwise C{False}.
"""
if (
not isinstance(other, self.__class__) or
not self._container == other._container
):
return NotImplemented
return self._index < other._index
def __le__(self, other):
"""
Implements C{<=}. Order is defined by instantiation order.
@param other: An object.
@return: C{NotImplemented} if C{other} is not a constant belonging to
the same container as this constant, C{True} if this constant is
defined before or equal to C{other}, otherwise C{False}.
"""
if (
not isinstance(other, self.__class__) or
not self._container == other._container
):
return NotImplemented
return self is other or self._index < other._index
def __gt__(self, other):
"""
Implements C{>}. Order is defined by instantiation order.
@param other: An object.
@return: C{NotImplemented} if C{other} is not a constant belonging to
the same container as this constant, C{True} if this constant is
defined after C{other}, otherwise C{False}.
"""
if (
not isinstance(other, self.__class__) or
not self._container == other._container
):
return NotImplemented
return self._index > other._index
def __ge__(self, other):
"""
Implements C{>=}. Order is defined by instantiation order.
@param other: An object.
@return: C{NotImplemented} if C{other} is not a constant belonging to
the same container as this constant, C{True} if this constant is
defined after or equal to C{other}, otherwise C{False}.
"""
if (
not isinstance(other, self.__class__) or
not self._container == other._container
):
return NotImplemented
return self is other or self._index > other._index
def _realize(self, container, name, value):
"""
Complete the initialization of this L{_Constant}.
@param container: The L{_ConstantsContainer} subclass this constant is
part of.
@param name: The name of this constant in its container.
@param value: The value of this constant; not used, as named constants
have no value apart from their identity.
"""
self._container = container
self.name = name
class _ConstantsContainerType(type):
"""
L{_ConstantsContainerType} is a metaclass for creating constants container
classes.
"""
def __new__(self, name, bases, attributes):
"""
Create a new constants container class.
If C{attributes} includes a value of C{None} for the C{"_constantType"}
key, the new class will not be initialized as a constants container and
it will behave as a normal class.
@param name: The name of the container class.
@type name: L{str}
@param bases: A tuple of the base classes for the new container class.
@type bases: L{tuple} of L{_ConstantsContainerType} instances
@param attributes: The attributes of the new container class, including
any constants it is to contain.
@type attributes: L{dict}
"""
cls = super(_ConstantsContainerType, self).__new__(
self, name, bases, attributes)
# Only realize constants in concrete _ConstantsContainer subclasses.
# Ignore intermediate base classes.
constantType = getattr(cls, '_constantType', None)
if constantType is None:
return cls
constants = []
for (name, descriptor) in attributes.items():
if isinstance(descriptor, cls._constantType):
if descriptor._container is not None:
raise ValueError(
"Cannot use %s as the value of an attribute on %s" % (
descriptor, cls.__name__))
constants.append((descriptor._index, name, descriptor))
enumerants = {}
for (index, enumerant, descriptor) in sorted(constants):
value = cls._constantFactory(enumerant, descriptor)
descriptor._realize(cls, enumerant, value)
enumerants[enumerant] = descriptor
# Save the dictionary which contains *only* constants (distinct from
# any other attributes the application may have given the container)
# where the class can use it later (eg for lookupByName).
cls._enumerants = enumerants
return cls
# In Python3 metaclasses are defined using a C{metaclass} keyword argument in
# the class definition. This would cause a syntax error in Python2.
# So we use L{type} to introduce an intermediate base class with the desired
# metaclass.
# See:
# * http://docs.python.org/2/library/functions.html#type
# * http://docs.python.org/3/reference/datamodel.html#customizing-class-creation
class _ConstantsContainer(_ConstantsContainerType('', (object,), {})):
"""
L{_ConstantsContainer} is a class with attributes used as symbolic
constants. It is up to subclasses to specify what kind of constants are
allowed.
@cvar _constantType: Specified by a L{_ConstantsContainer} subclass to
specify the type of constants allowed by that subclass.
@cvar _enumerants: A C{dict} mapping the names of constants (eg
L{NamedConstant} instances) found in the class definition to those
instances.
"""
_constantType = None
def __new__(cls):
"""
Classes representing constants containers are not intended to be
instantiated.
The class object itself is used directly.
"""
raise TypeError("%s may not be instantiated." % (cls.__name__,))
@classmethod
def _constantFactory(cls, name, descriptor):
"""
Construct the value for a new constant to add to this container.
@param name: The name of the constant to create.
@param descriptor: An instance of a L{_Constant} subclass (eg
L{NamedConstant}) which is assigned to C{name}.
@return: L{NamedConstant} instances have no value apart from identity,
so return a meaningless dummy value.
"""
return _unspecified
@classmethod
def lookupByName(cls, name):
"""
Retrieve a constant by its name or raise a C{ValueError} if there is no
constant associated with that name.
@param name: A C{str} giving the name of one of the constants defined
by C{cls}.
@raise ValueError: If C{name} is not the name of one of the constants
defined by C{cls}.
@return: The L{NamedConstant} associated with C{name}.
"""
if name in cls._enumerants:
return getattr(cls, name)
raise ValueError(name)
@classmethod
def iterconstants(cls):
"""
Iteration over a L{Names} subclass results in all of the constants it
contains.
@return: an iterator the elements of which are the L{NamedConstant}
instances defined in the body of this L{Names} subclass.
"""
constants = cls._enumerants.values()
return iter(
sorted(constants, key=lambda descriptor: descriptor._index))
class NamedConstant(_Constant):
"""
L{NamedConstant} defines an attribute to be a named constant within a
collection defined by a L{Names} subclass.
L{NamedConstant} is only for use in the definition of L{Names}
subclasses. Do not instantiate L{NamedConstant} elsewhere and do not
subclass it.
"""
class Names(_ConstantsContainer):
"""
A L{Names} subclass contains constants which differ only in their names and
identities.
"""
_constantType = NamedConstant
class ValueConstant(_Constant):
"""
L{ValueConstant} defines an attribute to be a named constant within a
collection defined by a L{Values} subclass.
L{ValueConstant} is only for use in the definition of L{Values} subclasses.
Do not instantiate L{ValueConstant} elsewhere and do not subclass it.
"""
def __init__(self, value):
_Constant.__init__(self)
self.value = value
class Values(_ConstantsContainer):
"""
A L{Values} subclass contains constants which are associated with arbitrary
values.
"""
_constantType = ValueConstant
@classmethod
def lookupByValue(cls, value):
"""
Retrieve a constant by its value or raise a C{ValueError} if there is
no constant associated with that value.
@param value: The value of one of the constants defined by C{cls}.
@raise ValueError: If C{value} is not the value of one of the constants
defined by C{cls}.
@return: The L{ValueConstant} associated with C{value}.
"""
for constant in cls.iterconstants():
if constant.value == value:
return constant
raise ValueError(value)
def _flagOp(op, left, right):
"""
Implement a binary operator for a L{FlagConstant} instance.
@param op: A two-argument callable implementing the binary operation. For
example, C{operator.or_}.
@param left: The left-hand L{FlagConstant} instance.
@param right: The right-hand L{FlagConstant} instance.
@return: A new L{FlagConstant} instance representing the result of the
operation.
"""
value = op(left.value, right.value)
names = op(left.names, right.names)
result = FlagConstant()
result._realize(left._container, names, value)
return result
class FlagConstant(_Constant):
"""
L{FlagConstant} defines an attribute to be a flag constant within a
collection defined by a L{Flags} subclass.
L{FlagConstant} is only for use in the definition of L{Flags} subclasses.
Do not instantiate L{FlagConstant} elsewhere and do not subclass it.
"""
def __init__(self, value=_unspecified):
_Constant.__init__(self)
self.value = value
def _realize(self, container, names, value):
"""
Complete the initialization of this L{FlagConstant}.
This implementation differs from other C{_realize} implementations in
that a L{FlagConstant} may have several names which apply to it, due to
flags being combined with various operators.
@param container: The L{Flags} subclass this constant is part of.
@param names: When a single-flag value is being initialized, a C{str}
giving the name of that flag. This is the case which happens when
a L{Flags} subclass is being initialized and L{FlagConstant}
instances from its body are being realized. Otherwise, a C{set} of
C{str} giving names of all the flags set on this L{FlagConstant}
instance. This is the case when two flags are combined using C{|},
for example.
"""
if isinstance(names, str):
name = names
names = set([names])
elif len(names) == 1:
(name,) = names
else:
name = "{" + ",".join(sorted(names)) + "}"
_Constant._realize(self, container, name, value)
self.value = value
self.names = names
def __or__(self, other):
"""
Define C{|} on two L{FlagConstant} instances to create a new
L{FlagConstant} instance with all flags set in either instance set.
"""
return _flagOp(or_, self, other)
def __and__(self, other):
"""
Define C{&} on two L{FlagConstant} instances to create a new
L{FlagConstant} instance with only flags set in both instances set.
"""
return _flagOp(and_, self, other)
def __xor__(self, other):
"""
Define C{^} on two L{FlagConstant} instances to create a new
L{FlagConstant} instance with only flags set on exactly one instance
set.
"""
return _flagOp(xor, self, other)
def __invert__(self):
"""
Define C{~} on a L{FlagConstant} instance to create a new
L{FlagConstant} instance with all flags not set on this instance set.
"""
result = FlagConstant()
result._realize(self._container, set(), 0)
for flag in self._container.iterconstants():
if flag.value & self.value == 0:
result |= flag
return result
def __iter__(self):
"""
@return: An iterator of flags set on this instance set.
"""
return (self._container.lookupByName(name) for name in self.names)
def __contains__(self, flag):
"""
@param flag: The flag to test for membership in this instance
set.
@return: C{True} if C{flag} is in this instance set, else
C{False}.
"""
# Optimization for testing membership without iteration.
return bool(flag & self)
def __nonzero__(self):
"""
@return: C{False} if this flag's value is 0, else C{True}.
"""
return bool(self.value)
__bool__ = __nonzero__
class Flags(Values):
"""
A L{Flags} subclass contains constants which can be combined using the
common bitwise operators (C{|}, C{&}, etc) similar to a I{bitvector} from a
language like C.
"""
_constantType = FlagConstant
_value = 1
@classmethod
def _constantFactory(cls, name, descriptor):
"""
For L{FlagConstant} instances with no explicitly defined value, assign
the next power of two as its value.
@param name: The name of the constant to create.
@param descriptor: An instance of a L{FlagConstant} which is assigned
to C{name}.
@return: Either the value passed to the C{descriptor} constructor, or
the next power of 2 value which will be assigned to C{descriptor},
relative to the value of the last defined L{FlagConstant}.
"""
if descriptor.value is _unspecified:
value = cls._value
cls._value <<= 1
else:
value = descriptor.value
cls._value = value << 1
return value

View file

@ -0,0 +1,133 @@
# -*- test-case-name: twisted.test.test_context -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Dynamic pseudo-scoping for Python.
Call functions with context.call({key: value}, func); func and
functions that it calls will be able to use 'context.get(key)' to
retrieve 'value'.
This is thread-safe.
"""
from __future__ import division, absolute_import
from threading import local
defaultContextDict = {}
setDefault = defaultContextDict.__setitem__
class ContextTracker:
"""
A L{ContextTracker} provides a way to pass arbitrary key/value data up and
down a call stack without passing them as parameters to the functions on
that call stack.
This can be useful when functions on the top and bottom of the call stack
need to cooperate but the functions in between them do not allow passing the
necessary state. For example::
from twisted.python.context import call, get
def handleRequest(request):
call({'request-id': request.id}, renderRequest, request.url)
def renderRequest(url):
renderHeader(url)
renderBody(url)
def renderHeader(url):
return "the header"
def renderBody(url):
return "the body (request id=%r)" % (get("request-id"),)
This should be used sparingly, since the lack of a clear connection between
the two halves can result in code which is difficult to understand and
maintain.
@ivar contexts: A C{list} of C{dict}s tracking the context state. Each new
L{ContextTracker.callWithContext} pushes a new C{dict} onto this stack
for the duration of the call, making the data available to the function
called and restoring the previous data once it is complete..
"""
def __init__(self):
self.contexts = [defaultContextDict]
def callWithContext(self, newContext, func, *args, **kw):
"""
Call C{func(*args, **kw)} such that the contents of C{newContext} will
be available for it to retrieve using L{getContext}.
@param newContext: A C{dict} of data to push onto the context for the
duration of the call to C{func}.
@param func: A callable which will be called.
@param *args: Any additional positional arguments to pass to C{func}.
@param **kw: Any additional keyword arguments to pass to C{func}.
@return: Whatever is returned by C{func}
@raise: Whatever is raised by C{func}.
"""
self.contexts.append(newContext)
try:
return func(*args,**kw)
finally:
self.contexts.pop()
def getContext(self, key, default=None):
"""
Retrieve the value for a key from the context.
@param key: The key to look up in the context.
@param default: The value to return if C{key} is not found in the
context.
@return: The value most recently remembered in the context for C{key}.
"""
for ctx in reversed(self.contexts):
try:
return ctx[key]
except KeyError:
pass
return default
class ThreadedContextTracker(object):
def __init__(self):
self.storage = local()
def currentContext(self):
try:
return self.storage.ct
except AttributeError:
ct = self.storage.ct = ContextTracker()
return ct
def callWithContext(self, ctx, func, *args, **kw):
return self.currentContext().callWithContext(ctx, func, *args, **kw)
def getContext(self, key, default=None):
return self.currentContext().getContext(key, default)
def installContextTracker(ctr):
global theContextTracker
global call
global get
theContextTracker = ctr
call = theContextTracker.callWithContext
get = theContextTracker.getContext
installContextTracker(ThreadedContextTracker())

View file

@ -0,0 +1,613 @@
# -*- test-case-name: twisted.python.test.test_deprecate -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Deprecation framework for Twisted.
To mark a method or function as being deprecated do this::
from twisted.python.versions import Version
from twisted.python.deprecate import deprecated
@deprecated(Version("Twisted", 8, 0, 0))
def badAPI(self, first, second):
'''
Docstring for badAPI.
'''
...
The newly-decorated badAPI will issue a warning when called. It will also have
a deprecation notice appended to its docstring.
To mark module-level attributes as being deprecated you can use::
badAttribute = "someValue"
...
deprecatedModuleAttribute(
Version("Twisted", 8, 0, 0),
"Use goodAttribute instead.",
"your.full.module.name",
"badAttribute")
The deprecated attributes will issue a warning whenever they are accessed. If
the attributes being deprecated are in the same module as the
L{deprecatedModuleAttribute} call is being made from, the C{__name__} global
can be used as the C{moduleName} parameter.
See also L{Version}.
@type DEPRECATION_WARNING_FORMAT: C{str}
@var DEPRECATION_WARNING_FORMAT: The default deprecation warning string format
to use when one is not provided by the user.
"""
from __future__ import division, absolute_import
__all__ = [
'deprecated',
'getDeprecationWarningString',
'getWarningMethod',
'setWarningMethod',
'deprecatedModuleAttribute',
]
import sys, inspect
from warnings import warn, warn_explicit
from dis import findlinestarts
from functools import wraps
from twisted.python.versions import getVersionString
DEPRECATION_WARNING_FORMAT = '%(fqpn)s was deprecated in %(version)s'
# Notionally, part of twisted.python.reflect, but defining it there causes a
# cyclic dependency between this module and that module. Define it here,
# instead, and let reflect import it to re-expose to the public.
def _fullyQualifiedName(obj):
"""
Return the fully qualified name of a module, class, method or function.
Classes and functions need to be module level ones to be correctly
qualified.
@rtype: C{str}.
"""
try:
name = obj.__qualname__
except AttributeError:
name = obj.__name__
if inspect.isclass(obj) or inspect.isfunction(obj):
moduleName = obj.__module__
return "%s.%s" % (moduleName, name)
elif inspect.ismethod(obj):
try:
cls = obj.im_class
except AttributeError:
# Python 3 eliminates im_class, substitutes __module__ and
# __qualname__ to provide similar information.
return "%s.%s" % (obj.__module__, obj.__qualname__)
else:
className = _fullyQualifiedName(cls)
return "%s.%s" % (className, name)
return name
# Try to keep it looking like something in twisted.python.reflect.
_fullyQualifiedName.__module__ = 'twisted.python.reflect'
_fullyQualifiedName.__name__ = 'fullyQualifiedName'
_fullyQualifiedName.__qualname__ = 'fullyQualifiedName'
def _getReplacementString(replacement):
"""
Surround a replacement for a deprecated API with some polite text exhorting
the user to consider it as an alternative.
@type replacement: C{str} or callable
@return: a string like "please use twisted.python.modules.getModule
instead".
"""
if callable(replacement):
replacement = _fullyQualifiedName(replacement)
return "please use %s instead" % (replacement,)
def _getDeprecationDocstring(version, replacement=None):
"""
Generate an addition to a deprecated object's docstring that explains its
deprecation.
@param version: the version it was deprecated.
@type version: L{Version}
@param replacement: The replacement, if specified.
@type replacement: C{str} or callable
@return: a string like "Deprecated in Twisted 27.2.0; please use
twisted.timestream.tachyon.flux instead."
"""
doc = "Deprecated in %s" % (getVersionString(version),)
if replacement:
doc = "%s; %s" % (doc, _getReplacementString(replacement))
return doc + "."
def _getDeprecationWarningString(fqpn, version, format=None, replacement=None):
"""
Return a string indicating that the Python name was deprecated in the given
version.
@param fqpn: Fully qualified Python name of the thing being deprecated
@type fqpn: C{str}
@param version: Version that C{fqpn} was deprecated in.
@type version: L{twisted.python.versions.Version}
@param format: A user-provided format to interpolate warning values into, or
L{DEPRECATION_WARNING_FORMAT
<twisted.python.deprecate.DEPRECATION_WARNING_FORMAT>} if C{None} is
given.
@type format: C{str}
@param replacement: what should be used in place of C{fqpn}. Either pass in
a string, which will be inserted into the warning message, or a
callable, which will be expanded to its full import path.
@type replacement: C{str} or callable
@return: A textual description of the deprecation
@rtype: C{str}
"""
if format is None:
format = DEPRECATION_WARNING_FORMAT
warningString = format % {
'fqpn': fqpn,
'version': getVersionString(version)}
if replacement:
warningString = "%s; %s" % (
warningString, _getReplacementString(replacement))
return warningString
def getDeprecationWarningString(callableThing, version, format=None,
replacement=None):
"""
Return a string indicating that the callable was deprecated in the given
version.
@type callableThing: C{callable}
@param callableThing: Callable object to be deprecated
@type version: L{twisted.python.versions.Version}
@param version: Version that C{callableThing} was deprecated in
@type format: C{str}
@param format: A user-provided format to interpolate warning values into,
or L{DEPRECATION_WARNING_FORMAT
<twisted.python.deprecate.DEPRECATION_WARNING_FORMAT>} if C{None} is
given
@param callableThing: A callable to be deprecated.
@param version: The L{twisted.python.versions.Version} that the callable
was deprecated in.
@param replacement: what should be used in place of the callable. Either
pass in a string, which will be inserted into the warning message,
or a callable, which will be expanded to its full import path.
@type replacement: C{str} or callable
@return: A string describing the deprecation.
@rtype: C{str}
"""
return _getDeprecationWarningString(
_fullyQualifiedName(callableThing), version, format, replacement)
def _appendToDocstring(thingWithDoc, textToAppend):
"""
Append the given text to the docstring of C{thingWithDoc}.
If C{thingWithDoc} has no docstring, then the text just replaces the
docstring. If it has a single-line docstring then it appends a blank line
and the message text. If it has a multi-line docstring, then in appends a
blank line a the message text, and also does the indentation correctly.
"""
if thingWithDoc.__doc__:
docstringLines = thingWithDoc.__doc__.splitlines()
else:
docstringLines = []
if len(docstringLines) == 0:
docstringLines.append(textToAppend)
elif len(docstringLines) == 1:
docstringLines.extend(['', textToAppend, ''])
else:
spaces = docstringLines.pop()
docstringLines.extend(['',
spaces + textToAppend,
spaces])
thingWithDoc.__doc__ = '\n'.join(docstringLines)
def deprecated(version, replacement=None):
"""
Return a decorator that marks callables as deprecated.
@type version: L{twisted.python.versions.Version}
@param version: The version in which the callable will be marked as
having been deprecated. The decorated function will be annotated
with this version, having it set as its C{deprecatedVersion}
attribute.
@param version: the version that the callable was deprecated in.
@type version: L{twisted.python.versions.Version}
@param replacement: what should be used in place of the callable. Either
pass in a string, which will be inserted into the warning message,
or a callable, which will be expanded to its full import path.
@type replacement: C{str} or callable
"""
def deprecationDecorator(function):
"""
Decorator that marks C{function} as deprecated.
"""
warningString = getDeprecationWarningString(
function, version, None, replacement)
@wraps(function)
def deprecatedFunction(*args, **kwargs):
warn(
warningString,
DeprecationWarning,
stacklevel=2)
return function(*args, **kwargs)
_appendToDocstring(deprecatedFunction,
_getDeprecationDocstring(version, replacement))
deprecatedFunction.deprecatedVersion = version
return deprecatedFunction
return deprecationDecorator
def getWarningMethod():
"""
Return the warning method currently used to record deprecation warnings.
"""
return warn
def setWarningMethod(newMethod):
"""
Set the warning method to use to record deprecation warnings.
The callable should take message, category and stacklevel. The return
value is ignored.
"""
global warn
warn = newMethod
class _InternalState(object):
"""
An L{_InternalState} is a helper object for a L{_ModuleProxy}, so that it
can easily access its own attributes, bypassing its logic for delegating to
another object that it's proxying for.
@ivar proxy: a L{ModuleProxy}
"""
def __init__(self, proxy):
object.__setattr__(self, 'proxy', proxy)
def __getattribute__(self, name):
return object.__getattribute__(object.__getattribute__(self, 'proxy'),
name)
def __setattr__(self, name, value):
return object.__setattr__(object.__getattribute__(self, 'proxy'),
name, value)
class _ModuleProxy(object):
"""
Python module wrapper to hook module-level attribute access.
Access to deprecated attributes first checks
L{_ModuleProxy._deprecatedAttributes}, if the attribute does not appear
there then access falls through to L{_ModuleProxy._module}, the wrapped
module object.
@ivar _module: Module on which to hook attribute access.
@type _module: C{module}
@ivar _deprecatedAttributes: Mapping of attribute names to objects that
retrieve the module attribute's original value.
@type _deprecatedAttributes: C{dict} mapping C{str} to
L{_DeprecatedAttribute}
@ivar _lastWasPath: Heuristic guess as to whether warnings about this
package should be ignored for the next call. If the last attribute
access of this module was a C{getattr} of C{__path__}, we will assume
that it was the import system doing it and we won't emit a warning for
the next access, even if it is to a deprecated attribute. The CPython
import system always tries to access C{__path__}, then the attribute
itself, then the attribute itself again, in both successful and failed
cases.
@type _lastWasPath: C{bool}
"""
def __init__(self, module):
state = _InternalState(self)
state._module = module
state._deprecatedAttributes = {}
state._lastWasPath = False
def __repr__(self):
"""
Get a string containing the type of the module proxy and a
representation of the wrapped module object.
"""
state = _InternalState(self)
return '<%s module=%r>' % (type(self).__name__, state._module)
def __setattr__(self, name, value):
"""
Set an attribute on the wrapped module object.
"""
state = _InternalState(self)
state._lastWasPath = False
setattr(state._module, name, value)
def __getattribute__(self, name):
"""
Get an attribute from the module object, possibly emitting a warning.
If the specified name has been deprecated, then a warning is issued.
(Unless certain obscure conditions are met; see
L{_ModuleProxy._lastWasPath} for more information about what might quash
such a warning.)
"""
state = _InternalState(self)
if state._lastWasPath:
deprecatedAttribute = None
else:
deprecatedAttribute = state._deprecatedAttributes.get(name)
if deprecatedAttribute is not None:
# If we have a _DeprecatedAttribute object from the earlier lookup,
# allow it to issue the warning.
value = deprecatedAttribute.get()
else:
# Otherwise, just retrieve the underlying value directly; it's not
# deprecated, there's no warning to issue.
value = getattr(state._module, name)
if name == '__path__':
state._lastWasPath = True
else:
state._lastWasPath = False
return value
class _DeprecatedAttribute(object):
"""
Wrapper for deprecated attributes.
This is intended to be used by L{_ModuleProxy}. Calling
L{_DeprecatedAttribute.get} will issue a warning and retrieve the
underlying attribute's value.
@type module: C{module}
@ivar module: The original module instance containing this attribute
@type fqpn: C{str}
@ivar fqpn: Fully qualified Python name for the deprecated attribute
@type version: L{twisted.python.versions.Version}
@ivar version: Version that the attribute was deprecated in
@type message: C{str}
@ivar message: Deprecation message
"""
def __init__(self, module, name, version, message):
"""
Initialise a deprecated name wrapper.
"""
self.module = module
self.__name__ = name
self.fqpn = module.__name__ + '.' + name
self.version = version
self.message = message
def get(self):
"""
Get the underlying attribute value and issue a deprecation warning.
"""
# This might fail if the deprecated thing is a module inside a package.
# In that case, don't emit the warning this time. The import system
# will come back again when it's not an AttributeError and we can emit
# the warning then.
result = getattr(self.module, self.__name__)
message = _getDeprecationWarningString(self.fqpn, self.version,
DEPRECATION_WARNING_FORMAT + ': ' + self.message)
warn(message, DeprecationWarning, stacklevel=3)
return result
def _deprecateAttribute(proxy, name, version, message):
"""
Mark a module-level attribute as being deprecated.
@type proxy: L{_ModuleProxy}
@param proxy: The module proxy instance proxying the deprecated attributes
@type name: C{str}
@param name: Attribute name
@type version: L{twisted.python.versions.Version}
@param version: Version that the attribute was deprecated in
@type message: C{str}
@param message: Deprecation message
"""
_module = object.__getattribute__(proxy, '_module')
attr = _DeprecatedAttribute(_module, name, version, message)
# Add a deprecated attribute marker for this module's attribute. When this
# attribute is accessed via _ModuleProxy a warning is emitted.
_deprecatedAttributes = object.__getattribute__(
proxy, '_deprecatedAttributes')
_deprecatedAttributes[name] = attr
def deprecatedModuleAttribute(version, message, moduleName, name):
"""
Declare a module-level attribute as being deprecated.
@type version: L{twisted.python.versions.Version}
@param version: Version that the attribute was deprecated in
@type message: C{str}
@param message: Deprecation message
@type moduleName: C{str}
@param moduleName: Fully-qualified Python name of the module containing
the deprecated attribute; if called from the same module as the
attributes are being deprecated in, using the C{__name__} global can
be helpful
@type name: C{str}
@param name: Attribute name to deprecate
"""
module = sys.modules[moduleName]
if not isinstance(module, _ModuleProxy):
module = _ModuleProxy(module)
sys.modules[moduleName] = module
_deprecateAttribute(module, name, version, message)
def warnAboutFunction(offender, warningString):
"""
Issue a warning string, identifying C{offender} as the responsible code.
This function is used to deprecate some behavior of a function. It differs
from L{warnings.warn} in that it is not limited to deprecating the behavior
of a function currently on the call stack.
@param function: The function that is being deprecated.
@param warningString: The string that should be emitted by this warning.
@type warningString: C{str}
@since: 11.0
"""
# inspect.getmodule() is attractive, but somewhat
# broken in Python < 2.6. See Python bug 4845.
offenderModule = sys.modules[offender.__module__]
filename = inspect.getabsfile(offenderModule)
lineStarts = list(findlinestarts(offender.__code__))
lastLineNo = lineStarts[-1][1]
globals = offender.__globals__
kwargs = dict(
category=DeprecationWarning,
filename=filename,
lineno=lastLineNo,
module=offenderModule.__name__,
registry=globals.setdefault("__warningregistry__", {}),
module_globals=None)
warn_explicit(warningString, **kwargs)
def _passed(argspec, positional, keyword):
"""
Take an L{inspect.ArgSpec}, a tuple of positional arguments, and a dict of
keyword arguments, and return a mapping of arguments that were actually
passed to their passed values.
@param argspec: The argument specification for the function to inspect.
@type argspec: L{inspect.ArgSpec}
@param positional: The positional arguments that were passed.
@type positional: L{tuple}
@param keyword: The keyword arguments that were passed.
@type keyword: L{dict}
@return: A dictionary mapping argument names (those declared in C{argspec})
to values that were passed explicitly by the user.
@rtype: L{dict} mapping L{str} to L{object}
"""
result = {}
unpassed = len(argspec.args) - len(positional)
if argspec.keywords is not None:
kwargs = result[argspec.keywords] = {}
if unpassed < 0:
if argspec.varargs is None:
raise TypeError("Too many arguments.")
else:
result[argspec.varargs] = positional[len(argspec.args):]
for name, value in zip(argspec.args, positional):
result[name] = value
for name, value in keyword.items():
if name in argspec.args:
if name in result:
raise TypeError("Already passed.")
result[name] = value
elif argspec.keywords is not None:
kwargs[name] = value
else:
raise TypeError("no such param")
return result
def _mutuallyExclusiveArguments(argumentPairs):
"""
Decorator which causes its decoratee to raise a L{TypeError} if two of the
given arguments are passed at the same time.
@param argumentPairs: pairs of argument identifiers, each pair indicating
an argument that may not be passed in conjunction with another.
@type argumentPairs: sequence of 2-sequences of L{str}
@return: A decorator, used like so::
@_mutuallyExclusiveArguments([["tweedledum", "tweedledee"]])
def function(tweedledum=1, tweedledee=2):
"Don't pass tweedledum and tweedledee at the same time."
@rtype: 1-argument callable taking a callable and returning a callable.
"""
def wrapper(wrappee):
argspec = inspect.getargspec(wrappee)
@wraps(wrappee)
def wrapped(*args, **kwargs):
arguments = _passed(argspec, args, kwargs)
for this, that in argumentPairs:
if this in arguments and that in arguments:
raise TypeError("nope")
return wrappee(*args, **kwargs)
return wrapped
return wrapper

View file

@ -0,0 +1,447 @@
# -*- test-case-name: twisted.python.test.test_dist -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Distutils convenience functionality.
Don't use this outside of Twisted.
Maintainer: Christopher Armstrong
"""
from distutils.command import build_scripts, install_data, build_ext
from distutils.errors import CompileError
from distutils import core
from distutils.core import Extension
import fnmatch
import os
import platform
import sys
from twisted import copyright
from twisted.python.compat import execfile
STATIC_PACKAGE_METADATA = dict(
name="Twisted",
version=copyright.version,
description="An asynchronous networking framework written in Python",
author="Twisted Matrix Laboratories",
author_email="twisted-python@twistedmatrix.com",
maintainer="Glyph Lefkowitz",
maintainer_email="glyph@twistedmatrix.com",
url="http://twistedmatrix.com/",
license="MIT",
long_description="""\
An extensible framework for Python programming, with special focus
on event-based network programming and multiprotocol integration.
""",
classifiers=[
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
],
)
twisted_subprojects = ["conch", "lore", "mail", "names",
"news", "pair", "runner", "web",
"words"]
class ConditionalExtension(Extension):
"""
An extension module that will only be compiled if certain conditions are
met.
@param condition: A callable of one argument which returns True or False to
indicate whether the extension should be built. The argument is an
instance of L{build_ext_twisted}, which has useful methods for checking
things about the platform.
"""
def __init__(self, *args, **kwargs):
self.condition = kwargs.pop("condition", lambda builder: True)
Extension.__init__(self, *args, **kwargs)
def setup(**kw):
"""
An alternative to distutils' setup() which is specially designed
for Twisted subprojects.
Pass twisted_subproject=projname if you want package and data
files to automatically be found for you.
@param conditionalExtensions: Extensions to optionally build.
@type conditionalExtensions: C{list} of L{ConditionalExtension}
"""
return core.setup(**get_setup_args(**kw))
def get_setup_args(**kw):
if 'twisted_subproject' in kw:
if 'twisted' not in os.listdir('.'):
raise RuntimeError("Sorry, you need to run setup.py from the "
"toplevel source directory.")
projname = kw['twisted_subproject']
projdir = os.path.join('twisted', projname)
kw['packages'] = getPackages(projdir, parent='twisted')
kw['version'] = getVersion(projname)
plugin = "twisted/plugins/twisted_" + projname + ".py"
if os.path.exists(plugin):
kw.setdefault('py_modules', []).append(
plugin.replace("/", ".")[:-3])
kw['data_files'] = getDataFiles(projdir, parent='twisted')
del kw['twisted_subproject']
else:
if 'plugins' in kw:
py_modules = []
for plg in kw['plugins']:
py_modules.append("twisted.plugins." + plg)
kw.setdefault('py_modules', []).extend(py_modules)
del kw['plugins']
if 'cmdclass' not in kw:
kw['cmdclass'] = {
'install_data': install_data_twisted,
'build_scripts': build_scripts_twisted}
if "conditionalExtensions" in kw:
extensions = kw["conditionalExtensions"]
del kw["conditionalExtensions"]
if 'ext_modules' not in kw:
# This is a workaround for distutils behavior; ext_modules isn't
# actually used by our custom builder. distutils deep-down checks
# to see if there are any ext_modules defined before invoking
# the build_ext command. We need to trigger build_ext regardless
# because it is the thing that does the conditional checks to see
# if it should build any extensions. The reason we have to delay
# the conditional checks until then is that the compiler objects
# are not yet set up when this code is executed.
kw["ext_modules"] = extensions
class my_build_ext(build_ext_twisted):
conditionalExtensions = extensions
kw.setdefault('cmdclass', {})['build_ext'] = my_build_ext
return kw
def getVersion(proj, base="twisted"):
"""
Extract the version number for a given project.
@param proj: the name of the project. Examples are "core",
"conch", "words", "mail".
@rtype: str
@returns: The version number of the project, as a string like
"2.0.0".
"""
if proj == 'core':
vfile = os.path.join(base, '_version.py')
else:
vfile = os.path.join(base, proj, '_version.py')
ns = {'__name__': 'Nothing to see here'}
execfile(vfile, ns)
return ns['version'].base()
# Names that are exluded from globbing results:
EXCLUDE_NAMES = ["{arch}", "CVS", ".cvsignore", "_darcs",
"RCS", "SCCS", ".svn"]
EXCLUDE_PATTERNS = ["*.py[cdo]", "*.s[ol]", ".#*", "*~", "*.py"]
def _filterNames(names):
"""
Given a list of file names, return those names that should be copied.
"""
names = [n for n in names
if n not in EXCLUDE_NAMES]
# This is needed when building a distro from a working
# copy (likely a checkout) rather than a pristine export:
for pattern in EXCLUDE_PATTERNS:
names = [n for n in names
if (not fnmatch.fnmatch(n, pattern))
and (not n.endswith('.py'))]
return names
def relativeTo(base, relativee):
"""
Gets 'relativee' relative to 'basepath'.
i.e.,
>>> relativeTo('/home/', '/home/radix/')
'radix'
>>> relativeTo('.', '/home/radix/Projects/Twisted') # curdir is /home/radix
'Projects/Twisted'
The 'relativee' must be a child of 'basepath'.
"""
basepath = os.path.abspath(base)
relativee = os.path.abspath(relativee)
if relativee.startswith(basepath):
relative = relativee[len(basepath):]
if relative.startswith(os.sep):
relative = relative[1:]
return os.path.join(base, relative)
raise ValueError("%s is not a subpath of %s" % (relativee, basepath))
def getDataFiles(dname, ignore=None, parent=None):
"""
Get all the data files that should be included in this distutils Project.
'dname' should be the path to the package that you're distributing.
'ignore' is a list of sub-packages to ignore. This facilitates
disparate package hierarchies. That's a fancy way of saying that
the 'twisted' package doesn't want to include the 'twisted.conch'
package, so it will pass ['conch'] as the value.
'parent' is necessary if you're distributing a subpackage like
twisted.conch. 'dname' should point to 'twisted/conch' and 'parent'
should point to 'twisted'. This ensures that your data_files are
generated correctly, only using relative paths for the first element
of the tuple ('twisted/conch/*').
The default 'parent' is the current working directory.
"""
parent = parent or "."
ignore = ignore or []
result = []
for directory, subdirectories, filenames in os.walk(dname):
resultfiles = []
for exname in EXCLUDE_NAMES:
if exname in subdirectories:
subdirectories.remove(exname)
for ig in ignore:
if ig in subdirectories:
subdirectories.remove(ig)
for filename in _filterNames(filenames):
resultfiles.append(filename)
if resultfiles:
result.append((relativeTo(parent, directory),
[relativeTo(parent,
os.path.join(directory, filename))
for filename in resultfiles]))
return result
def getExtensions():
"""
Get all extensions from core and all subprojects.
"""
extensions = []
if not sys.platform.startswith('java'):
for dir in os.listdir("twisted") + [""]:
topfiles = os.path.join("twisted", dir, "topfiles")
if os.path.isdir(topfiles):
ns = {}
setup_py = os.path.join(topfiles, "setup.py")
execfile(setup_py, ns, ns)
if "extensions" in ns:
extensions.extend(ns["extensions"])
return extensions
def getPackages(dname, pkgname=None, results=None, ignore=None, parent=None):
"""
Get all packages which are under dname. This is necessary for
Python 2.2's distutils. Pretty similar arguments to getDataFiles,
including 'parent'.
"""
parent = parent or ""
prefix = []
if parent:
prefix = [parent]
bname = os.path.basename(dname)
ignore = ignore or []
if bname in ignore:
return []
if results is None:
results = []
if pkgname is None:
pkgname = []
subfiles = os.listdir(dname)
abssubfiles = [os.path.join(dname, x) for x in subfiles]
if '__init__.py' in subfiles:
results.append(prefix + pkgname + [bname])
for subdir in filter(os.path.isdir, abssubfiles):
getPackages(subdir, pkgname=pkgname + [bname],
results=results, ignore=ignore,
parent=parent)
res = ['.'.join(result) for result in results]
return res
def getAllScripts():
# "" is included because core scripts are directly in bin/
projects = [''] + [x for x in os.listdir('bin')
if os.path.isdir(os.path.join("bin", x))
and x in twisted_subprojects]
scripts = []
for i in projects:
scripts.extend(getScripts(i))
return scripts
def getScripts(projname, basedir=''):
"""
Returns a list of scripts for a Twisted subproject; this works in
any of an SVN checkout, a project-specific tarball.
"""
scriptdir = os.path.join(basedir, 'bin', projname)
if not os.path.isdir(scriptdir):
# Probably a project-specific tarball, in which case only this
# project's bins are included in 'bin'
scriptdir = os.path.join(basedir, 'bin')
if not os.path.isdir(scriptdir):
return []
thingies = os.listdir(scriptdir)
for specialExclusion in ['.svn', '_preamble.py', '_preamble.pyc']:
if specialExclusion in thingies:
thingies.remove(specialExclusion)
return filter(os.path.isfile,
[os.path.join(scriptdir, x) for x in thingies])
## Helpers and distutil tweaks
class build_scripts_twisted(build_scripts.build_scripts):
"""
Renames scripts so they end with '.py' on Windows.
"""
def run(self):
build_scripts.build_scripts.run(self)
if not os.name == "nt":
return
for f in os.listdir(self.build_dir):
fpath = os.path.join(self.build_dir, f)
if not fpath.endswith(".py"):
pypath = fpath + ".py"
if os.path.exists(pypath):
os.unlink(pypath)
os.rename(fpath, pypath)
class install_data_twisted(install_data.install_data):
"""
I make sure data files are installed in the package directory.
"""
def finalize_options(self):
self.set_undefined_options('install',
('install_lib', 'install_dir')
)
install_data.install_data.finalize_options(self)
class build_ext_twisted(build_ext.build_ext):
"""
Allow subclasses to easily detect and customize Extensions to
build at install-time.
"""
def prepare_extensions(self):
"""
Prepare the C{self.extensions} attribute (used by
L{build_ext.build_ext}) by checking which extensions in
L{conditionalExtensions} should be built. In addition, if we are
building on NT, define the WIN32 macro to 1.
"""
# always define WIN32 under Windows
if os.name == 'nt':
self.define_macros = [("WIN32", 1)]
else:
self.define_macros = []
# On Solaris 10, we need to define the _XOPEN_SOURCE and
# _XOPEN_SOURCE_EXTENDED macros to build in order to gain access to
# the msg_control, msg_controllen, and msg_flags members in
# sendmsg.c. (according to
# http://stackoverflow.com/questions/1034587). See the documentation
# of X/Open CAE in the standards(5) man page of Solaris.
if sys.platform.startswith('sunos'):
self.define_macros.append(('_XOPEN_SOURCE', 1))
self.define_macros.append(('_XOPEN_SOURCE_EXTENDED', 1))
self.extensions = [x for x in self.conditionalExtensions
if x.condition(self)]
for ext in self.extensions:
ext.define_macros.extend(self.define_macros)
def build_extensions(self):
"""
Check to see which extension modules to build and then build them.
"""
self.prepare_extensions()
build_ext.build_ext.build_extensions(self)
def _remove_conftest(self):
for filename in ("conftest.c", "conftest.o", "conftest.obj"):
try:
os.unlink(filename)
except EnvironmentError:
pass
def _compile_helper(self, content):
conftest = open("conftest.c", "w")
try:
conftest.write(content)
conftest.close()
try:
self.compiler.compile(["conftest.c"], output_dir='')
except CompileError:
return False
return True
finally:
self._remove_conftest()
def _check_header(self, header_name):
"""
Check if the given header can be included by trying to compile a file
that contains only an #include line.
"""
self.compiler.announce("checking for %s ..." % header_name, 0)
return self._compile_helper("#include <%s>\n" % header_name)
def _checkCPython(sys=sys, platform=platform):
"""
Checks if this implementation is CPython.
This uses C{platform.python_implementation}.
This takes C{sys} and C{platform} kwargs that by default use the real
modules. You shouldn't care about these -- they are for testing purposes
only.
@return: C{False} if the implementation is definitely not CPython, C{True}
otherwise.
"""
return platform.python_implementation() == "CPython"
_isCPython = _checkCPython()

View file

@ -0,0 +1,264 @@
# -*- test-case-name: twisted.python.test.test_dist3 -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Support for installing Twisted on Python 3.
Only necessary while parts of Twisted are unported.
@var modules: A list of modules that have been ported,
e.g. "twisted.python.versions"; a package name (e.g. "twisted.python")
indicates the corresponding __init__.py file has been ported
(e.g. "twisted/python/__init__.py"). To reduce merge conflicts, add new
lines in alphabetical sort.
@var testModules: A list of test modules that have been ported, e.g
"twisted.python.test.test_versions". To reduce merge conflicts, add new
lines in alphabetical sort.
@var almostModules: A list of any other modules which are needed by any of the
modules in the other two lists, but which themselves have not actually
been properly ported to Python 3. These modules might work well enough to
satisfy some of the requirements of the modules that depend on them, but
cannot be considered generally usable otherwise.
@var modulesToInstall: A list of all modules that should be installed on
Python 3.
"""
from __future__ import division
modules = [
"twisted",
"twisted.copyright",
"twisted.internet",
"twisted.internet.abstract",
"twisted.internet.address",
"twisted.internet.base",
"twisted.internet.default",
"twisted.internet.defer",
"twisted.internet.endpoints",
"twisted.internet.epollreactor",
"twisted.internet.error",
"twisted.internet.interfaces",
"twisted.internet.fdesc",
"twisted.internet.gireactor",
"twisted.internet._glibbase",
"twisted.internet.gtk3reactor",
"twisted.internet.main",
"twisted.internet._newtls",
"twisted.internet.posixbase",
"twisted.internet.protocol",
"twisted.internet.pollreactor",
"twisted.internet.reactor",
"twisted.internet.selectreactor",
"twisted.internet._signals",
"twisted.internet.ssl",
"twisted.internet.task",
"twisted.internet.tcp",
"twisted.internet.test",
"twisted.internet.test.connectionmixins",
"twisted.internet.test.modulehelpers",
"twisted.internet.test._posixifaces",
"twisted.internet.test.reactormixins",
"twisted.internet.threads",
"twisted.internet.udp",
"twisted.internet.utils",
"twisted.names",
"twisted.names.cache",
"twisted.names.client",
"twisted.names.common",
"twisted.names.dns",
"twisted.names.error",
"twisted.names.hosts",
"twisted.names.resolve",
"twisted.names._rfc1982",
"twisted.names.test",
"twisted.names._version",
"twisted.protocols",
"twisted.protocols.basic",
"twisted.protocols.policies",
"twisted.protocols.test",
"twisted.protocols.tls",
"twisted.python",
"twisted.python.compat",
"twisted.python.components",
"twisted.python.constants",
"twisted.python.context",
"twisted.python.deprecate",
"twisted.python.dist3",
"twisted.python.failure",
"twisted.python.filepath",
"twisted.python.lockfile",
"twisted.python.log",
"twisted.python.monkey",
"twisted.python.randbytes",
"twisted.python.reflect",
"twisted.python.runtime",
"twisted.python.test",
"twisted.python.test.deprecatedattributes",
"twisted.python.test.modules_helpers",
"twisted.python.threadable",
"twisted.python.threadpool",
"twisted.python.util",
"twisted.python.versions",
"twisted.test",
"twisted.test.proto_helpers",
"twisted.test.iosim",
"twisted.test.ssl_helpers",
"twisted.trial",
"twisted.trial._asynctest",
"twisted.trial.itrial",
"twisted.trial._synctest",
"twisted.trial.test",
"twisted.trial.test.detests",
"twisted.trial.test.erroneous",
"twisted.trial.test.suppression",
"twisted.trial.test.packages",
"twisted.trial.test.skipping",
"twisted.trial.test.suppression",
"twisted.trial.unittest",
"twisted.trial.util",
"twisted._version",
"twisted.web",
"twisted.web.http_headers",
"twisted.web.resource",
"twisted.web._responses",
"twisted.web.test",
"twisted.web.test.requesthelper",
"twisted.web._version",
]
testModules = [
"twisted.internet.test.test_abstract",
"twisted.internet.test.test_address",
"twisted.internet.test.test_base",
"twisted.internet.test.test_core",
"twisted.internet.test.test_default",
"twisted.internet.test.test_endpoints",
"twisted.internet.test.test_epollreactor",
"twisted.internet.test.test_fdset",
"twisted.internet.test.test_filedescriptor",
"twisted.internet.test.test_inlinecb",
"twisted.internet.test.test_gireactor",
"twisted.internet.test.test_glibbase",
"twisted.internet.test.test_main",
"twisted.internet.test.test_newtls",
"twisted.internet.test.test_posixbase",
"twisted.internet.test.test_protocol",
"twisted.internet.test.test_sigchld",
"twisted.internet.test.test_tcp",
"twisted.internet.test.test_threads",
"twisted.internet.test.test_tls",
"twisted.internet.test.test_udp",
"twisted.internet.test.test_udp_internals",
"twisted.names.test.test_cache",
"twisted.names.test.test_client",
"twisted.names.test.test_common",
"twisted.names.test.test_dns",
"twisted.names.test.test_hosts",
"twisted.names.test.test_rfc1982",
"twisted.protocols.test.test_basic",
"twisted.protocols.test.test_tls",
"twisted.python.test.test_components",
"twisted.python.test.test_constants",
"twisted.python.test.test_deprecate",
"twisted.python.test.test_dist3",
"twisted.python.test.test_runtime",
"twisted.python.test.test_util",
"twisted.python.test.test_versions",
"twisted.test.test_abstract",
"twisted.test.test_compat",
"twisted.test.test_context",
"twisted.test.test_cooperator",
"twisted.test.test_defer",
"twisted.test.test_defgen",
"twisted.test.test_error",
"twisted.test.test_factories",
"twisted.test.test_failure",
"twisted.test.test_fdesc",
"twisted.test.test_internet",
"twisted.test.test_iosim",
"twisted.test.test_iutils",
"twisted.test.test_lockfile",
"twisted.test.test_log",
"twisted.test.test_loopback",
"twisted.test.test_monkey",
"twisted.test.test_paths",
"twisted.test.test_policies",
"twisted.test.test_randbytes",
"twisted.test.test_reflect",
"twisted.test.test_setup",
"twisted.test.test_ssl",
"twisted.test.test_sslverify",
"twisted.test.test_task",
"twisted.test.test_tcp",
"twisted.test.test_tcp_internals",
"twisted.test.test_threadable",
"twisted.test.test_threads",
"twisted.test.test_twisted",
"twisted.test.test_threadpool",
"twisted.test.test_udp",
"twisted.trial.test.test_assertions",
"twisted.trial.test.test_asyncassertions",
"twisted.trial.test.test_deferred",
"twisted.trial.test.test_pyunitcompat",
"twisted.trial.test.test_suppression",
"twisted.trial.test.test_testcase",
"twisted.trial.test.test_tests",
"twisted.trial.test.test_util",
"twisted.trial.test.test_warning",
# The downloadPage tests weren't ported:
"twisted.web.test.test_webclient",
"twisted.web.test.test_http",
"twisted.web.test.test_http_headers",
"twisted.web.test.test_resource",
"twisted.web.test.test_web",
]
almostModules = [
# Missing test coverage, see #6156:
"twisted.internet._sslverify",
# twisted.names.client semi-depends on twisted.names.root, but only on
# Windows really:
"twisted.names.root",
# Missing test coverage:
"twisted.protocols.loopback",
# Minimally used by setup3.py:
"twisted.python.dist",
# twisted.python.filepath depends on twisted.python.win32, but on Linux it
# only really needs to import:
"twisted.python.win32",
"twisted.test.reflect_helper_IE",
"twisted.test.reflect_helper_VE",
"twisted.test.reflect_helper_ZDE",
# Required by some of the ported trial tests:
"twisted.trial.reporter",
# Agent code and downloadPage aren't ported, test coverage isn't complete:
"twisted.web.client",
# twisted.web.resource depends on twisted.web.error, so it is sorta
# ported, but its tests are not yet ported, so it probably doesn't
# completely work.
"twisted.web.error",
# Required by twisted.web.server, no actual code here:
"twisted.web.iweb",
# Required by twisted.web.server for an error handling case:
"twisted.web.html",
# This module has a lot of missing test coverage. What tests it has pass,
# but it needs a lot more. It was ported only enough to make the client
# work.
"twisted.web.http",
# GzipEncoder and allowed methods functionality not ported, no doubt
# missing lots of test coverage:
"twisted.web.server",
]
modulesToInstall = modules + testModules + almostModules

View file

@ -0,0 +1,654 @@
# -*- test-case-name: twisted.test.test_failure -*-
# See also test suite twisted.test.test_pbfailure
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Asynchronous-friendly error mechanism.
See L{Failure}.
"""
from __future__ import division, absolute_import
# System Imports
import sys
import linecache
import inspect
import opcode
from inspect import getmro
from twisted.python.compat import _PY3, NativeStringIO as StringIO
from twisted.python import reflect
count = 0
traceupLength = 4
class DefaultException(Exception):
pass
def format_frames(frames, write, detail="default"):
"""Format and write frames.
@param frames: is a list of frames as used by Failure.frames, with
each frame being a list of
(funcName, fileName, lineNumber, locals.items(), globals.items())
@type frames: list
@param write: this will be called with formatted strings.
@type write: callable
@param detail: Four detail levels are available:
default, brief, verbose, and verbose-vars-not-captured.
C{Failure.printDetailedTraceback} uses the latter when the caller asks
for verbose, but no vars were captured, so that an explicit warning
about the missing data is shown.
@type detail: string
"""
if detail not in ('default', 'brief', 'verbose',
'verbose-vars-not-captured'):
raise ValueError(
"Detail must be default, brief, verbose, or "
"verbose-vars-not-captured. (not %r)" % (detail,))
w = write
if detail == "brief":
for method, filename, lineno, localVars, globalVars in frames:
w('%s:%s:%s\n' % (filename, lineno, method))
elif detail == "default":
for method, filename, lineno, localVars, globalVars in frames:
w( ' File "%s", line %s, in %s\n' % (filename, lineno, method))
w( ' %s\n' % linecache.getline(filename, lineno).strip())
elif detail == "verbose-vars-not-captured":
for method, filename, lineno, localVars, globalVars in frames:
w("%s:%d: %s(...)\n" % (filename, lineno, method))
w(' [Capture of Locals and Globals disabled (use captureVars=True)]\n')
elif detail == "verbose":
for method, filename, lineno, localVars, globalVars in frames:
w("%s:%d: %s(...)\n" % (filename, lineno, method))
w(' [ Locals ]\n')
# Note: the repr(val) was (self.pickled and val) or repr(val)))
for name, val in localVars:
w(" %s : %s\n" % (name, repr(val)))
w(' ( Globals )\n')
for name, val in globalVars:
w(" %s : %s\n" % (name, repr(val)))
# slyphon: i have a need to check for this value in trial
# so I made it a module-level constant
EXCEPTION_CAUGHT_HERE = "--- <exception caught here> ---"
class NoCurrentExceptionError(Exception):
"""
Raised when trying to create a Failure from the current interpreter
exception state and there is no current exception state.
"""
class _Traceback(object):
"""
Fake traceback object which can be passed to functions in the standard
library L{traceback} module.
"""
def __init__(self, frames):
"""
Construct a fake traceback object using a list of frames. Note that
although frames generally include locals and globals, this information
is not kept by this object, since locals and globals are not used in
standard tracebacks.
@param frames: [(methodname, filename, lineno, locals, globals), ...]
"""
assert len(frames) > 0, "Must pass some frames"
head, frames = frames[0], frames[1:]
name, filename, lineno, localz, globalz = head
self.tb_frame = _Frame(name, filename)
self.tb_lineno = lineno
if len(frames) == 0:
self.tb_next = None
else:
self.tb_next = _Traceback(frames)
class _Frame(object):
"""
A fake frame object, used by L{_Traceback}.
@ivar f_code: fake L{code<types.CodeType>} object
@ivar f_globals: fake f_globals dictionary (usually empty)
@ivar f_locals: fake f_locals dictionary (usually empty)
"""
def __init__(self, name, filename):
"""
@param name: method/function name for this frame.
@type name: C{str}
@param filename: filename for this frame.
@type name: C{str}
"""
self.f_code = _Code(name, filename)
self.f_globals = {}
self.f_locals = {}
class _Code(object):
"""
A fake code object, used by L{_Traceback} via L{_Frame}.
"""
def __init__(self, name, filename):
self.co_name = name
self.co_filename = filename
class Failure:
"""
A basic abstraction for an error that has occurred.
This is necessary because Python's built-in error mechanisms are
inconvenient for asynchronous communication.
The C{stack} and C{frame} attributes contain frames. Each frame is a tuple
of (funcName, fileName, lineNumber, localsItems, globalsItems), where
localsItems and globalsItems are the contents of
C{locals().items()}/C{globals().items()} for that frame, or an empty tuple
if those details were not captured.
@ivar value: The exception instance responsible for this failure.
@ivar type: The exception's class.
@ivar stack: list of frames, innermost last, excluding C{Failure.__init__}.
@ivar frames: list of frames, innermost first.
"""
pickled = 0
stack = None
# The opcode of "yield" in Python bytecode. We need this in _findFailure in
# order to identify whether an exception was thrown by a
# throwExceptionIntoGenerator.
_yieldOpcode = chr(opcode.opmap["YIELD_VALUE"])
def __init__(self, exc_value=None, exc_type=None, exc_tb=None,
captureVars=False):
"""
Initialize me with an explanation of the error.
By default, this will use the current C{exception}
(L{sys.exc_info}()). However, if you want to specify a
particular kind of failure, you can pass an exception as an
argument.
If no C{exc_value} is passed, then an "original" C{Failure} will
be searched for. If the current exception handler that this
C{Failure} is being constructed in is handling an exception
raised by L{raiseException}, then this C{Failure} will act like
the original C{Failure}.
For C{exc_tb} only L{traceback} instances or C{None} are allowed.
If C{None} is supplied for C{exc_value}, the value of C{exc_tb} is
ignored, otherwise if C{exc_tb} is C{None}, it will be found from
execution context (ie, L{sys.exc_info}).
@param captureVars: if set, capture locals and globals of stack
frames. This is pretty slow, and makes no difference unless you
are going to use L{printDetailedTraceback}.
"""
global count
count = count + 1
self.count = count
self.type = self.value = tb = None
self.captureVars = captureVars
if isinstance(exc_value, str) and exc_type is None:
raise TypeError("Strings are not supported by Failure")
stackOffset = 0
if exc_value is None:
exc_value = self._findFailure()
if exc_value is None:
self.type, self.value, tb = sys.exc_info()
if self.type is None:
raise NoCurrentExceptionError()
stackOffset = 1
elif exc_type is None:
if isinstance(exc_value, Exception):
self.type = exc_value.__class__
else: #allow arbitrary objects.
self.type = type(exc_value)
self.value = exc_value
else:
self.type = exc_type
self.value = exc_value
if isinstance(self.value, Failure):
self.__dict__ = self.value.__dict__
return
if tb is None:
if exc_tb:
tb = exc_tb
elif _PY3:
tb = self.value.__traceback__
frames = self.frames = []
stack = self.stack = []
# added 2003-06-23 by Chris Armstrong. Yes, I actually have a
# use case where I need this traceback object, and I've made
# sure that it'll be cleaned up.
self.tb = tb
if tb:
f = tb.tb_frame
elif not isinstance(self.value, Failure):
# we don't do frame introspection since it's expensive,
# and if we were passed a plain exception with no
# traceback, it's not useful anyway
f = stackOffset = None
while stackOffset and f:
# This excludes this Failure.__init__ frame from the
# stack, leaving it to start with our caller instead.
f = f.f_back
stackOffset -= 1
# Keeps the *full* stack. Formerly in spread.pb.print_excFullStack:
#
# The need for this function arises from the fact that several
# PB classes have the peculiar habit of discarding exceptions
# with bareword "except:"s. This premature exception
# catching means tracebacks generated here don't tend to show
# what called upon the PB object.
while f:
if captureVars:
localz = f.f_locals.copy()
if f.f_locals is f.f_globals:
globalz = {}
else:
globalz = f.f_globals.copy()
for d in globalz, localz:
if "__builtins__" in d:
del d["__builtins__"]
localz = localz.items()
globalz = globalz.items()
else:
localz = globalz = ()
stack.insert(0, (
f.f_code.co_name,
f.f_code.co_filename,
f.f_lineno,
localz,
globalz,
))
f = f.f_back
while tb is not None:
f = tb.tb_frame
if captureVars:
localz = f.f_locals.copy()
if f.f_locals is f.f_globals:
globalz = {}
else:
globalz = f.f_globals.copy()
for d in globalz, localz:
if "__builtins__" in d:
del d["__builtins__"]
localz = list(localz.items())
globalz = list(globalz.items())
else:
localz = globalz = ()
frames.append((
f.f_code.co_name,
f.f_code.co_filename,
tb.tb_lineno,
localz,
globalz,
))
tb = tb.tb_next
if inspect.isclass(self.type) and issubclass(self.type, Exception):
parentCs = getmro(self.type)
self.parents = list(map(reflect.qual, parentCs))
else:
self.parents = [self.type]
def trap(self, *errorTypes):
"""Trap this failure if its type is in a predetermined list.
This allows you to trap a Failure in an error callback. It will be
automatically re-raised if it is not a type that you expect.
The reason for having this particular API is because it's very useful
in Deferred errback chains::
def _ebFoo(self, failure):
r = failure.trap(Spam, Eggs)
print 'The Failure is due to either Spam or Eggs!'
if r == Spam:
print 'Spam did it!'
elif r == Eggs:
print 'Eggs did it!'
If the failure is not a Spam or an Eggs, then the Failure will be
'passed on' to the next errback. In Python 2 the Failure will be
raised; in Python 3 the underlying exception will be re-raised.
@type errorTypes: L{Exception}
"""
error = self.check(*errorTypes)
if not error:
if _PY3:
self.raiseException()
else:
raise self
return error
def check(self, *errorTypes):
"""Check if this failure's type is in a predetermined list.
@type errorTypes: list of L{Exception} classes or
fully-qualified class names.
@returns: the matching L{Exception} type, or None if no match.
"""
for error in errorTypes:
err = error
if inspect.isclass(error) and issubclass(error, Exception):
err = reflect.qual(error)
if err in self.parents:
return error
return None
# It would be nice to use twisted.python.compat.reraise, but that breaks
# the stack exploration in _findFailure; possibly this can be fixed in
# #5931.
if _PY3:
def raiseException(self):
raise self.value.with_traceback(self.tb)
else:
exec("""def raiseException(self):
raise self.type, self.value, self.tb""")
raiseException.__doc__ = (
"""
raise the original exception, preserving traceback
information if available.
""")
def throwExceptionIntoGenerator(self, g):
"""
Throw the original exception into the given generator,
preserving traceback information if available.
@return: The next value yielded from the generator.
@raise StopIteration: If there are no more values in the generator.
@raise anything else: Anything that the generator raises.
"""
return g.throw(self.type, self.value, self.tb)
def _findFailure(cls):
"""
Find the failure that represents the exception currently in context.
"""
tb = sys.exc_info()[-1]
if not tb:
return
secondLastTb = None
lastTb = tb
while lastTb.tb_next:
secondLastTb = lastTb
lastTb = lastTb.tb_next
lastFrame = lastTb.tb_frame
# NOTE: f_locals.get('self') is used rather than
# f_locals['self'] because psyco frames do not contain
# anything in their locals() dicts. psyco makes debugging
# difficult anyhow, so losing the Failure objects (and thus
# the tracebacks) here when it is used is not that big a deal.
# handle raiseException-originated exceptions
if lastFrame.f_code is cls.raiseException.__code__:
return lastFrame.f_locals.get('self')
# handle throwExceptionIntoGenerator-originated exceptions
# this is tricky, and differs if the exception was caught
# inside the generator, or above it:
# it is only really originating from
# throwExceptionIntoGenerator if the bottom of the traceback
# is a yield.
# Pyrex and Cython extensions create traceback frames
# with no co_code, but they can't yield so we know it's okay to just return here.
if ((not lastFrame.f_code.co_code) or
lastFrame.f_code.co_code[lastTb.tb_lasti] != cls._yieldOpcode):
return
# if the exception was caught above the generator.throw
# (outside the generator), it will appear in the tb (as the
# second last item):
if secondLastTb:
frame = secondLastTb.tb_frame
if frame.f_code is cls.throwExceptionIntoGenerator.__code__:
return frame.f_locals.get('self')
# if the exception was caught below the generator.throw
# (inside the generator), it will appear in the frames' linked
# list, above the top-level traceback item (which must be the
# generator frame itself, thus its caller is
# throwExceptionIntoGenerator).
frame = tb.tb_frame.f_back
if frame and frame.f_code is cls.throwExceptionIntoGenerator.__code__:
return frame.f_locals.get('self')
_findFailure = classmethod(_findFailure)
def __repr__(self):
return "<%s %s>" % (self.__class__, self.type)
def __str__(self):
return "[Failure instance: %s]" % self.getBriefTraceback()
def __getstate__(self):
"""Avoid pickling objects in the traceback.
"""
if self.pickled:
return self.__dict__
c = self.__dict__.copy()
c['frames'] = [
[
v[0], v[1], v[2],
_safeReprVars(v[3]),
_safeReprVars(v[4]),
] for v in self.frames
]
# added 2003-06-23. See comment above in __init__
c['tb'] = None
if self.stack is not None:
# XXX: This is a band-aid. I can't figure out where these
# (failure.stack is None) instances are coming from.
c['stack'] = [
[
v[0], v[1], v[2],
_safeReprVars(v[3]),
_safeReprVars(v[4]),
] for v in self.stack
]
c['pickled'] = 1
return c
def cleanFailure(self):
"""
Remove references to other objects, replacing them with strings.
On Python 3, this will also set the C{__traceback__} attribute of the
exception instance to C{None}.
"""
self.__dict__ = self.__getstate__()
if _PY3:
self.value.__traceback__ = None
def getTracebackObject(self):
"""
Get an object that represents this Failure's stack that can be passed
to traceback.extract_tb.
If the original traceback object is still present, return that. If this
traceback object has been lost but we still have the information,
return a fake traceback object (see L{_Traceback}). If there is no
traceback information at all, return None.
"""
if self.tb is not None:
return self.tb
elif len(self.frames) > 0:
return _Traceback(self.frames)
else:
return None
def getErrorMessage(self):
"""Get a string of the exception which caused this Failure."""
if isinstance(self.value, Failure):
return self.value.getErrorMessage()
return reflect.safe_str(self.value)
def getBriefTraceback(self):
io = StringIO()
self.printBriefTraceback(file=io)
return io.getvalue()
def getTraceback(self, elideFrameworkCode=0, detail='default'):
io = StringIO()
self.printTraceback(file=io, elideFrameworkCode=elideFrameworkCode, detail=detail)
return io.getvalue()
def printTraceback(self, file=None, elideFrameworkCode=False, detail='default'):
"""
Emulate Python's standard error reporting mechanism.
@param file: If specified, a file-like object to which to write the
traceback.
@param elideFrameworkCode: A flag indicating whether to attempt to
remove uninteresting frames from within Twisted itself from the
output.
@param detail: A string indicating how much information to include
in the traceback. Must be one of C{'brief'}, C{'default'}, or
C{'verbose'}.
"""
if file is None:
from twisted.python import log
file = log.logerr
w = file.write
if detail == 'verbose' and not self.captureVars:
# We don't have any locals or globals, so rather than show them as
# empty make the output explicitly say that we don't have them at
# all.
formatDetail = 'verbose-vars-not-captured'
else:
formatDetail = detail
# Preamble
if detail == 'verbose':
w( '*--- Failure #%d%s---\n' %
(self.count,
(self.pickled and ' (pickled) ') or ' '))
elif detail == 'brief':
if self.frames:
hasFrames = 'Traceback'
else:
hasFrames = 'Traceback (failure with no frames)'
w("%s: %s: %s\n" % (
hasFrames,
reflect.safe_str(self.type),
reflect.safe_str(self.value)))
else:
w( 'Traceback (most recent call last):\n')
# Frames, formatted in appropriate style
if self.frames:
if not elideFrameworkCode:
format_frames(self.stack[-traceupLength:], w, formatDetail)
w("%s\n" % (EXCEPTION_CAUGHT_HERE,))
format_frames(self.frames, w, formatDetail)
elif not detail == 'brief':
# Yeah, it's not really a traceback, despite looking like one...
w("Failure: ")
# postamble, if any
if not detail == 'brief':
w("%s: %s\n" % (reflect.qual(self.type),
reflect.safe_str(self.value)))
# chaining
if isinstance(self.value, Failure):
# TODO: indentation for chained failures?
file.write(" (chained Failure)\n")
self.value.printTraceback(file, elideFrameworkCode, detail)
if detail == 'verbose':
w('*--- End of Failure #%d ---\n' % self.count)
def printBriefTraceback(self, file=None, elideFrameworkCode=0):
"""Print a traceback as densely as possible.
"""
self.printTraceback(file, elideFrameworkCode, detail='brief')
def printDetailedTraceback(self, file=None, elideFrameworkCode=0):
"""Print a traceback with detailed locals and globals information.
"""
self.printTraceback(file, elideFrameworkCode, detail='verbose')
def _safeReprVars(varsDictItems):
"""
Convert a list of (name, object) pairs into (name, repr) pairs.
L{twisted.python.reflect.safe_repr} is used to generate the repr, so no
exceptions will be raised by faulty C{__repr__} methods.
@param varsDictItems: a sequence of (name, value) pairs as returned by e.g.
C{locals().items()}.
@returns: a sequence of (name, repr) pairs.
"""
return [(name, reflect.safe_repr(obj)) for (name, obj) in varsDictItems]
# slyphon: make post-morteming exceptions tweakable
DO_POST_MORTEM = True
def _debuginit(self, exc_value=None, exc_type=None, exc_tb=None,
captureVars=False,
Failure__init__=Failure.__init__):
"""
Initialize failure object, possibly spawning pdb.
"""
if (exc_value, exc_type, exc_tb) == (None, None, None):
exc = sys.exc_info()
if not exc[0] == self.__class__ and DO_POST_MORTEM:
try:
strrepr = str(exc[1])
except:
strrepr = "broken str"
print("Jumping into debugger for post-mortem of exception '%s':" % (strrepr,))
import pdb
pdb.post_mortem(exc[2])
Failure__init__(self, exc_value, exc_type, exc_tb, captureVars)
def startDebugMode():
"""Enable debug hooks for Failures."""
Failure.__init__ = _debuginit

View file

@ -0,0 +1,219 @@
# -*- test-case-name: twisted.python.test.test_fakepwd -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
L{twisted.python.fakepwd} provides a fake implementation of the L{pwd} API.
"""
__all__ = ['UserDatabase', 'ShadowDatabase']
class _UserRecord(object):
"""
L{_UserRecord} holds the user data for a single user in L{UserDatabase}.
It corresponds to L{pwd.struct_passwd}. See that class for attribute
documentation.
"""
def __init__(self, name, password, uid, gid, gecos, home, shell):
self.pw_name = name
self.pw_passwd = password
self.pw_uid = uid
self.pw_gid = gid
self.pw_gecos = gecos
self.pw_dir = home
self.pw_shell = shell
def __len__(self):
return 7
def __getitem__(self, index):
return (
self.pw_name, self.pw_passwd, self.pw_uid,
self.pw_gid, self.pw_gecos, self.pw_dir, self.pw_shell)[index]
class UserDatabase(object):
"""
L{UserDatabase} holds a traditional POSIX user data in memory and makes it
available via the same API as L{pwd}.
@ivar _users: A C{list} of L{_UserRecord} instances holding all user data
added to this database.
"""
def __init__(self):
self._users = []
def addUser(self, username, password, uid, gid, gecos, home, shell):
"""
Add a new user record to this database.
@param username: The value for the C{pw_name} field of the user
record to add.
@type username: C{str}
@param password: The value for the C{pw_passwd} field of the user
record to add.
@type password: C{str}
@param uid: The value for the C{pw_uid} field of the user record to
add.
@type uid: C{int}
@param gid: The value for the C{pw_gid} field of the user record to
add.
@type gid: C{int}
@param gecos: The value for the C{pw_gecos} field of the user record
to add.
@type gecos: C{str}
@param home: The value for the C{pw_dir} field of the user record to
add.
@type home: C{str}
@param shell: The value for the C{pw_shell} field of the user record to
add.
@type shell: C{str}
"""
self._users.append(_UserRecord(
username, password, uid, gid, gecos, home, shell))
def getpwuid(self, uid):
"""
Return the user record corresponding to the given uid.
"""
for entry in self._users:
if entry.pw_uid == uid:
return entry
raise KeyError()
def getpwnam(self, name):
"""
Return the user record corresponding to the given username.
"""
for entry in self._users:
if entry.pw_name == name:
return entry
raise KeyError()
def getpwall(self):
"""
Return a list of all user records.
"""
return self._users
class _ShadowRecord(object):
"""
L{_ShadowRecord} holds the shadow user data for a single user in
L{ShadowDatabase}. It corresponds to C{spwd.struct_spwd}. See that class
for attribute documentation.
"""
def __init__(self, username, password, lastChange, min, max, warn, inact,
expire, flag):
self.sp_nam = username
self.sp_pwd = password
self.sp_lstchg = lastChange
self.sp_min = min
self.sp_max = max
self.sp_warn = warn
self.sp_inact = inact
self.sp_expire = expire
self.sp_flag = flag
def __len__(self):
return 9
def __getitem__(self, index):
return (
self.sp_nam, self.sp_pwd, self.sp_lstchg, self.sp_min,
self.sp_max, self.sp_warn, self.sp_inact, self.sp_expire,
self.sp_flag)[index]
class ShadowDatabase(object):
"""
L{ShadowDatabase} holds a shadow user database in memory and makes it
available via the same API as C{spwd}.
@ivar _users: A C{list} of L{_ShadowRecord} instances holding all user data
added to this database.
@since: 12.0
"""
def __init__(self):
self._users = []
def addUser(self, username, password, lastChange, min, max, warn, inact,
expire, flag):
"""
Add a new user record to this database.
@param username: The value for the C{sp_nam} field of the user record to
add.
@type username: C{str}
@param password: The value for the C{sp_pwd} field of the user record to
add.
@type password: C{str}
@param lastChange: The value for the C{sp_lstchg} field of the user
record to add.
@type lastChange: C{int}
@param min: The value for the C{sp_min} field of the user record to add.
@type min: C{int}
@param max: The value for the C{sp_max} field of the user record to add.
@type max: C{int}
@param warn: The value for the C{sp_warn} field of the user record to
add.
@type warn: C{int}
@param inact: The value for the C{sp_inact} field of the user record to
add.
@type inact: C{int}
@param expire: The value for the C{sp_expire} field of the user record
to add.
@type expire: C{int}
@param flag: The value for the C{sp_flag} field of the user record to
add.
@type flag: C{int}
"""
self._users.append(_ShadowRecord(
username, password, lastChange,
min, max, warn, inact, expire, flag))
def getspnam(self, username):
"""
Return the shadow user record corresponding to the given username.
"""
for entry in self._users:
if entry.sp_nam == username:
return entry
raise KeyError
def getspall(self):
"""
Return a list of all shadow user records.
"""
return self._users

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,46 @@
"""
A module for externalized finalizers.
"""
import weakref
garbageKey = 0
def callbackFactory(num, fins):
def _cb(w):
del refs[num]
for fx in fins:
fx()
return _cb
refs = {}
def register(inst):
global garbageKey
garbageKey += 1
r = weakref.ref(inst, callbackFactory(garbageKey, inst.__finalizers__()))
refs[garbageKey] = r
if __name__ == '__main__':
def fin():
print 'I am _so_ dead.'
class Finalizeable:
"""
An un-sucky __del__
"""
def __finalizers__(self):
"""
I'm going away.
"""
return [fin]
f = Finalizeable()
f.f2 = f
register(f)
del f
import gc
gc.collect()
print 'deled'

View file

@ -0,0 +1,363 @@
# -*- test-case-name: twisted.test.test_formmethod -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Form-based method objects.
This module contains support for descriptive method signatures that can be used
to format methods.
"""
import calendar
class FormException(Exception):
"""An error occurred calling the form method.
"""
def __init__(self, *args, **kwargs):
Exception.__init__(self, *args)
self.descriptions = kwargs
class InputError(FormException):
"""
An error occurred with some input.
"""
class Argument:
"""Base class for form arguments."""
# default value for argument, if no other default is given
defaultDefault = None
def __init__(self, name, default=None, shortDesc=None,
longDesc=None, hints=None, allowNone=1):
self.name = name
self.allowNone = allowNone
if default is None:
default = self.defaultDefault
self.default = default
self.shortDesc = shortDesc
self.longDesc = longDesc
if not hints:
hints = {}
self.hints = hints
def addHints(self, **kwargs):
self.hints.update(kwargs)
def getHint(self, name, default=None):
return self.hints.get(name, default)
def getShortDescription(self):
return self.shortDesc or self.name.capitalize()
def getLongDescription(self):
return self.longDesc or '' #self.shortDesc or "The %s." % self.name
def coerce(self, val):
"""Convert the value to the correct format."""
raise NotImplementedError("implement in subclass")
class String(Argument):
"""A single string.
"""
defaultDefault = ''
min = 0
max = None
def __init__(self, name, default=None, shortDesc=None,
longDesc=None, hints=None, allowNone=1, min=0, max=None):
Argument.__init__(self, name, default=default, shortDesc=shortDesc,
longDesc=longDesc, hints=hints, allowNone=allowNone)
self.min = min
self.max = max
def coerce(self, val):
s = str(val)
if len(s) < self.min:
raise InputError("Value must be at least %s characters long" % self.min)
if self.max != None and len(s) > self.max:
raise InputError("Value must be at most %s characters long" % self.max)
return str(val)
class Text(String):
"""A long string.
"""
class Password(String):
"""A string which should be obscured when input.
"""
class VerifiedPassword(String):
"""A string that should be obscured when input and needs verification."""
def coerce(self, vals):
if len(vals) != 2 or vals[0] != vals[1]:
raise InputError("Please enter the same password twice.")
s = str(vals[0])
if len(s) < self.min:
raise InputError("Value must be at least %s characters long" % self.min)
if self.max != None and len(s) > self.max:
raise InputError("Value must be at most %s characters long" % self.max)
return s
class Hidden(String):
"""A string which is not displayed.
The passed default is used as the value.
"""
class Integer(Argument):
"""A single integer.
"""
defaultDefault = None
def __init__(self, name, allowNone=1, default=None, shortDesc=None,
longDesc=None, hints=None):
#although Argument now has allowNone, that was recently added, and
#putting it at the end kept things which relied on argument order
#from breaking. However, allowNone originally was in here, so
#I have to keep the same order, to prevent breaking code that
#depends on argument order only
Argument.__init__(self, name, default, shortDesc, longDesc, hints,
allowNone)
def coerce(self, val):
if not val.strip() and self.allowNone:
return None
try:
return int(val)
except ValueError:
raise InputError("%s is not valid, please enter a whole number, e.g. 10" % val)
class IntegerRange(Integer):
def __init__(self, name, min, max, allowNone=1, default=None, shortDesc=None,
longDesc=None, hints=None):
self.min = min
self.max = max
Integer.__init__(self, name, allowNone=allowNone, default=default, shortDesc=shortDesc,
longDesc=longDesc, hints=hints)
def coerce(self, val):
result = Integer.coerce(self, val)
if self.allowNone and result == None:
return result
if result < self.min:
raise InputError("Value %s is too small, it should be at least %s" % (result, self.min))
if result > self.max:
raise InputError("Value %s is too large, it should be at most %s" % (result, self.max))
return result
class Float(Argument):
defaultDefault = None
def __init__(self, name, allowNone=1, default=None, shortDesc=None,
longDesc=None, hints=None):
#although Argument now has allowNone, that was recently added, and
#putting it at the end kept things which relied on argument order
#from breaking. However, allowNone originally was in here, so
#I have to keep the same order, to prevent breaking code that
#depends on argument order only
Argument.__init__(self, name, default, shortDesc, longDesc, hints,
allowNone)
def coerce(self, val):
if not val.strip() and self.allowNone:
return None
try:
return float(val)
except ValueError:
raise InputError("Invalid float: %s" % val)
class Choice(Argument):
"""
The result of a choice between enumerated types. The choices should
be a list of tuples of tag, value, and description. The tag will be
the value returned if the user hits "Submit", and the description
is the bale for the enumerated type. default is a list of all the
values (seconds element in choices). If no defaults are specified,
initially the first item will be selected. Only one item can (should)
be selected at once.
"""
def __init__(self, name, choices=[], default=[], shortDesc=None,
longDesc=None, hints=None, allowNone=1):
self.choices = choices
if choices and not default:
default.append(choices[0][1])
Argument.__init__(self, name, default, shortDesc, longDesc, hints, allowNone=allowNone)
def coerce(self, inIdent):
for ident, val, desc in self.choices:
if ident == inIdent:
return val
else:
raise InputError("Invalid Choice: %s" % inIdent)
class Flags(Argument):
"""
The result of a checkbox group or multi-menu. The flags should be a
list of tuples of tag, value, and description. The tag will be
the value returned if the user hits "Submit", and the description
is the bale for the enumerated type. default is a list of all the
values (second elements in flags). If no defaults are specified,
initially nothing will be selected. Several items may be selected at
once.
"""
def __init__(self, name, flags=(), default=(), shortDesc=None,
longDesc=None, hints=None, allowNone=1):
self.flags = flags
Argument.__init__(self, name, default, shortDesc, longDesc, hints, allowNone=allowNone)
def coerce(self, inFlagKeys):
if not inFlagKeys:
return []
outFlags = []
for inFlagKey in inFlagKeys:
for flagKey, flagVal, flagDesc in self.flags:
if inFlagKey == flagKey:
outFlags.append(flagVal)
break
else:
raise InputError("Invalid Flag: %s" % inFlagKey)
return outFlags
class CheckGroup(Flags):
pass
class RadioGroup(Choice):
pass
class Boolean(Argument):
def coerce(self, inVal):
if not inVal:
return 0
lInVal = str(inVal).lower()
if lInVal in ('no', 'n', 'f', 'false', '0'):
return 0
return 1
class File(Argument):
def __init__(self, name, allowNone=1, shortDesc=None, longDesc=None,
hints=None):
Argument.__init__(self, name, None, shortDesc, longDesc, hints,
allowNone=allowNone)
def coerce(self, file):
if not file and self.allowNone:
return None
elif file:
return file
else:
raise InputError("Invalid File")
def positiveInt(x):
x = int(x)
if x <= 0: raise ValueError
return x
class Date(Argument):
"""A date -- (year, month, day) tuple."""
defaultDefault = None
def __init__(self, name, allowNone=1, default=None, shortDesc=None,
longDesc=None, hints=None):
Argument.__init__(self, name, default, shortDesc, longDesc, hints)
self.allowNone = allowNone
if not allowNone:
self.defaultDefault = (1970, 1, 1)
def coerce(self, args):
"""Return tuple of ints (year, month, day)."""
if tuple(args) == ("", "", "") and self.allowNone:
return None
try:
year, month, day = map(positiveInt, args)
except ValueError:
raise InputError("Invalid date")
if (month, day) == (2, 29):
if not calendar.isleap(year):
raise InputError("%d was not a leap year" % year)
else:
return year, month, day
try:
mdays = calendar.mdays[month]
except IndexError:
raise InputError("Invalid date")
if day > mdays:
raise InputError("Invalid date")
return year, month, day
class Submit(Choice):
"""Submit button or a reasonable facsimile thereof."""
def __init__(self, name, choices=[("Submit", "submit", "Submit form")],
reset=0, shortDesc=None, longDesc=None, allowNone=0, hints=None):
Choice.__init__(self, name, choices=choices, shortDesc=shortDesc,
longDesc=longDesc, hints=hints)
self.allowNone = allowNone
self.reset = reset
def coerce(self, value):
if self.allowNone and not value:
return None
else:
return Choice.coerce(self, value)
class PresentationHint:
"""
A hint to a particular system.
"""
class MethodSignature:
def __init__(self, *sigList):
"""
"""
self.methodSignature = sigList
def getArgument(self, name):
for a in self.methodSignature:
if a.name == name:
return a
def method(self, callable, takesRequest=False):
return FormMethod(self, callable, takesRequest)
class FormMethod:
"""A callable object with a signature."""
def __init__(self, signature, callable, takesRequest=False):
self.signature = signature
self.callable = callable
self.takesRequest = takesRequest
def getArgs(self):
return tuple(self.signature.methodSignature)
def call(self,*args,**kw):
return self.callable(*args,**kw)

View file

@ -0,0 +1,23 @@
# -*- test-case-name: twisted.python.test.test_hashlib -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Deprecated in Twisted 13.1.0; please use hashlib from stdlib instead.
L{twisted.python.hashlib} presents a subset of the interface provided by
U{hashlib<http://docs.python.org/library/hashlib.html>}. The subset is the
interface required by various parts of Twisted. This allows application code
to transparently use APIs which existed before C{hashlib} was introduced or to
use C{hashlib} if it is available.
"""
from __future__ import absolute_import
from hashlib import md5, sha1
import warnings
__all__ = ["md5", "sha1"]
warnings.warn(
"twisted.python.hashlib was deprecated in "
"Twisted 13.1.0: Please use hashlib from stdlib.",
DeprecationWarning, stacklevel=2)

View file

@ -0,0 +1,176 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
I define support for hookable instance methods.
These are methods which you can register pre-call and post-call external
functions to augment their functionality. People familiar with more esoteric
languages may think of these as \"method combinations\".
This could be used to add optional preconditions, user-extensible callbacks
(a-la emacs) or a thread-safety mechanism.
The four exported calls are:
- L{addPre}
- L{addPost}
- L{removePre}
- L{removePost}
All have the signature (class, methodName, callable), and the callable they
take must always have the signature (instance, *args, **kw) unless the
particular signature of the method they hook is known.
Hooks should typically not throw exceptions, however, no effort will be made by
this module to prevent them from doing so. Pre-hooks will always be called,
but post-hooks will only be called if the pre-hooks do not raise any exceptions
(they will still be called if the main method raises an exception). The return
values and exception status of the main method will be propogated (assuming
none of the hooks raise an exception). Hooks will be executed in the order in
which they are added.
"""
### Public Interface
class HookError(Exception):
"An error which will fire when an invariant is violated."
def addPre(klass, name, func):
"""hook.addPre(klass, name, func) -> None
Add a function to be called before the method klass.name is invoked.
"""
_addHook(klass, name, PRE, func)
def addPost(klass, name, func):
"""hook.addPost(klass, name, func) -> None
Add a function to be called after the method klass.name is invoked.
"""
_addHook(klass, name, POST, func)
def removePre(klass, name, func):
"""hook.removePre(klass, name, func) -> None
Remove a function (previously registered with addPre) so that it
is no longer executed before klass.name.
"""
_removeHook(klass, name, PRE, func)
def removePost(klass, name, func):
"""hook.removePre(klass, name, func) -> None
Remove a function (previously registered with addPost) so that it
is no longer executed after klass.name.
"""
_removeHook(klass, name, POST, func)
### "Helper" functions.
hooked_func = """
import %(module)s
def %(name)s(*args, **kw):
klazz = %(module)s.%(klass)s
for preMethod in klazz.%(preName)s:
preMethod(*args, **kw)
try:
return klazz.%(originalName)s(*args, **kw)
finally:
for postMethod in klazz.%(postName)s:
postMethod(*args, **kw)
"""
_PRE = '__hook_pre_%s_%s_%s__'
_POST = '__hook_post_%s_%s_%s__'
_ORIG = '__hook_orig_%s_%s_%s__'
def _XXX(k,n,s):
"""
String manipulation garbage.
"""
x = s % (k.__module__.replace('.', '_'), k.__name__, n)
return x
def PRE(k,n):
"(private) munging to turn a method name into a pre-hook-method-name"
return _XXX(k,n,_PRE)
def POST(k,n):
"(private) munging to turn a method name into a post-hook-method-name"
return _XXX(k,n,_POST)
def ORIG(k,n):
"(private) munging to turn a method name into an `original' identifier"
return _XXX(k,n,_ORIG)
def _addHook(klass, name, phase, func):
"(private) adds a hook to a method on a class"
_enhook(klass, name)
if not hasattr(klass, phase(klass, name)):
setattr(klass, phase(klass, name), [])
phaselist = getattr(klass, phase(klass, name))
phaselist.append(func)
def _removeHook(klass, name, phase, func):
"(private) removes a hook from a method on a class"
phaselistname = phase(klass, name)
if not hasattr(klass, ORIG(klass,name)):
raise HookError("no hooks present!")
phaselist = getattr(klass, phase(klass, name))
try: phaselist.remove(func)
except ValueError:
raise HookError("hook %s not found in removal list for %s"%
(name,klass))
if not getattr(klass, PRE(klass,name)) and not getattr(klass, POST(klass, name)):
_dehook(klass, name)
def _enhook(klass, name):
"(private) causes a certain method name to be hooked on a class"
if hasattr(klass, ORIG(klass, name)):
return
def newfunc(*args, **kw):
for preMethod in getattr(klass, PRE(klass, name)):
preMethod(*args, **kw)
try:
return getattr(klass, ORIG(klass, name))(*args, **kw)
finally:
for postMethod in getattr(klass, POST(klass, name)):
postMethod(*args, **kw)
try:
newfunc.func_name = name
except TypeError:
# Older python's don't let you do this
pass
oldfunc = getattr(klass, name).im_func
setattr(klass, ORIG(klass, name), oldfunc)
setattr(klass, PRE(klass, name), [])
setattr(klass, POST(klass, name), [])
setattr(klass, name, newfunc)
def _dehook(klass, name):
"(private) causes a certain method name no longer to be hooked on a class"
if not hasattr(klass, ORIG(klass, name)):
raise HookError("Cannot unhook!")
setattr(klass, name, getattr(klass, ORIG(klass,name)))
delattr(klass, PRE(klass,name))
delattr(klass, POST(klass,name))
delattr(klass, ORIG(klass,name))

View file

@ -0,0 +1,91 @@
# -*- test-case-name: twisted.python.test.test_htmlizer -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
HTML rendering of Python source.
"""
import tokenize, cgi, keyword
import reflect
class TokenPrinter:
currentCol, currentLine = 0, 1
lastIdentifier = parameters = 0
def __init__(self, writer):
self.writer = writer
def printtoken(self, type, token, (srow, scol), (erow, ecol), line):
#print "printtoken(%r,%r,%r,(%r,%r),(%r,%r),%r), row=%r,col=%r" % (
# self, type, token, srow,scol, erow,ecol, line,
# self.currentLine, self.currentCol)
if self.currentLine < srow:
self.writer('\n'*(srow-self.currentLine))
self.currentLine, self.currentCol = srow, 0
self.writer(' '*(scol-self.currentCol))
if self.lastIdentifier:
type = "identifier"
self.parameters = 1
elif type == tokenize.NAME:
if keyword.iskeyword(token):
type = 'keyword'
else:
if self.parameters:
type = 'parameter'
else:
type = 'variable'
else:
type = tokenize.tok_name.get(type).lower()
self.writer(token, type)
self.currentCol = ecol
self.currentLine += token.count('\n')
if self.currentLine != erow:
self.currentCol = 0
self.lastIdentifier = token in ('def', 'class')
if token == ':':
self.parameters = 0
class HTMLWriter:
noSpan = []
def __init__(self, writer):
self.writer = writer
noSpan = []
reflect.accumulateClassList(self.__class__, "noSpan", noSpan)
self.noSpan = noSpan
def write(self, token, type=None):
token = cgi.escape(token)
if (type is None) or (type in self.noSpan):
self.writer(token)
else:
self.writer('<span class="py-src-%s">%s</span>' %
(type, token))
class SmallerHTMLWriter(HTMLWriter):
"""HTMLWriter that doesn't generate spans for some junk.
Results in much smaller HTML output.
"""
noSpan = ["endmarker", "indent", "dedent", "op", "newline", "nl"]
def filter(inp, out, writer=HTMLWriter):
out.write('<pre>')
printer = TokenPrinter(writer(out.write).write).printtoken
try:
tokenize.tokenize(inp.readline, printer)
except tokenize.TokenError:
pass
out.write('</pre>\n')
def main():
import sys
filter(open(sys.argv[1]), sys.stdout)
if __name__ == '__main__':
main()

View file

@ -0,0 +1,214 @@
# -*- test-case-name: twisted.test.test_lockfile -*-
# Copyright (c) 2005 Divmod, Inc.
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Filesystem-based interprocess mutex.
"""
__metaclass__ = type
import errno, os
from time import time as _uniquefloat
from twisted.python.runtime import platform
def unique():
return str(int(_uniquefloat() * 1000))
from os import rename
if not platform.isWindows():
from os import kill
from os import symlink
from os import readlink
from os import remove as rmlink
_windows = False
else:
_windows = True
try:
from win32api import OpenProcess
import pywintypes
except ImportError:
kill = None
else:
ERROR_ACCESS_DENIED = 5
ERROR_INVALID_PARAMETER = 87
def kill(pid, signal):
try:
OpenProcess(0, 0, pid)
except pywintypes.error as e:
if e.args[0] == ERROR_ACCESS_DENIED:
return
elif e.args[0] == ERROR_INVALID_PARAMETER:
raise OSError(errno.ESRCH, None)
raise
else:
raise RuntimeError("OpenProcess is required to fail.")
_open = file
# XXX Implement an atomic thingamajig for win32
def symlink(value, filename):
newlinkname = filename+"."+unique()+'.newlink'
newvalname = os.path.join(newlinkname,"symlink")
os.mkdir(newlinkname)
f = _open(newvalname,'wcb')
f.write(value)
f.flush()
f.close()
try:
rename(newlinkname, filename)
except:
os.remove(newvalname)
os.rmdir(newlinkname)
raise
def readlink(filename):
try:
fObj = _open(os.path.join(filename,'symlink'), 'rb')
except IOError as e:
if e.errno == errno.ENOENT or e.errno == errno.EIO:
raise OSError(e.errno, None)
raise
else:
result = fObj.read()
fObj.close()
return result
def rmlink(filename):
os.remove(os.path.join(filename, 'symlink'))
os.rmdir(filename)
class FilesystemLock:
"""
A mutex.
This relies on the filesystem property that creating
a symlink is an atomic operation and that it will
fail if the symlink already exists. Deleting the
symlink will release the lock.
@ivar name: The name of the file associated with this lock.
@ivar clean: Indicates whether this lock was released cleanly by its
last owner. Only meaningful after C{lock} has been called and
returns True.
@ivar locked: Indicates whether the lock is currently held by this
object.
"""
clean = None
locked = False
def __init__(self, name):
self.name = name
def lock(self):
"""
Acquire this lock.
@rtype: C{bool}
@return: True if the lock is acquired, false otherwise.
@raise: Any exception os.symlink() may raise, other than
EEXIST.
"""
clean = True
while True:
try:
symlink(str(os.getpid()), self.name)
except OSError as e:
if _windows and e.errno in (errno.EACCES, errno.EIO):
# The lock is in the middle of being deleted because we're
# on Windows where lock removal isn't atomic. Give up, we
# don't know how long this is going to take.
return False
if e.errno == errno.EEXIST:
try:
pid = readlink(self.name)
except OSError as e:
if e.errno == errno.ENOENT:
# The lock has vanished, try to claim it in the
# next iteration through the loop.
continue
raise
except IOError as e:
if _windows and e.errno == errno.EACCES:
# The lock is in the middle of being
# deleted because we're on Windows where
# lock removal isn't atomic. Give up, we
# don't know how long this is going to
# take.
return False
raise
try:
if kill is not None:
kill(int(pid), 0)
except OSError as e:
if e.errno == errno.ESRCH:
# The owner has vanished, try to claim it in the next
# iteration through the loop.
try:
rmlink(self.name)
except OSError as e:
if e.errno == errno.ENOENT:
# Another process cleaned up the lock.
# Race them to acquire it in the next
# iteration through the loop.
continue
raise
clean = False
continue
raise
return False
raise
self.locked = True
self.clean = clean
return True
def unlock(self):
"""
Release this lock.
This deletes the directory with the given name.
@raise: Any exception os.readlink() may raise, or
ValueError if the lock is not owned by this process.
"""
pid = readlink(self.name)
if int(pid) != os.getpid():
raise ValueError("Lock %r not owned by this process" % (self.name,))
rmlink(self.name)
self.locked = False
def isLocked(name):
"""Determine if the lock of the given name is held or not.
@type name: C{str}
@param name: The filesystem path to the lock to test
@rtype: C{bool}
@return: True if the lock is held, False otherwise.
"""
l = FilesystemLock(name)
result = None
try:
result = l.lock()
finally:
if result:
l.unlock()
return not result
__all__ = ['FilesystemLock', 'isLocked']

View file

@ -0,0 +1,624 @@
# -*- test-case-name: twisted.test.test_log -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Logging and metrics infrastructure.
"""
from __future__ import division, absolute_import
import sys
import time
import warnings
from datetime import datetime
import logging
from zope.interface import Interface
from twisted.python.compat import unicode, _PY3
from twisted.python import context
from twisted.python import reflect
from twisted.python import util
from twisted.python import failure
from twisted.python.threadable import synchronize
class ILogContext:
"""
Actually, this interface is just a synonym for the dictionary interface,
but it serves as a key for the default information in a log.
I do not inherit from C{Interface} because the world is a cruel place.
"""
class ILogObserver(Interface):
"""
An observer which can do something with log events.
Given that most log observers are actually bound methods, it's okay to not
explicitly declare provision of this interface.
"""
def __call__(eventDict):
"""
Log an event.
@type eventDict: C{dict} with C{str} keys.
@param eventDict: A dictionary with arbitrary keys. However, these
keys are often available:
- C{message}: A C{tuple} of C{str} containing messages to be
logged.
- C{system}: A C{str} which indicates the "system" which is
generating this event.
- C{isError}: A C{bool} indicating whether this event represents
an error.
- C{failure}: A L{failure.Failure} instance
- C{why}: Used as header of the traceback in case of errors.
- C{format}: A string format used in place of C{message} to
customize the event. The intent is for the observer to format
a message by doing something like C{format % eventDict}.
"""
context.setDefault(ILogContext,
{"isError": 0,
"system": "-"})
def callWithContext(ctx, func, *args, **kw):
newCtx = context.get(ILogContext).copy()
newCtx.update(ctx)
return context.call({ILogContext: newCtx}, func, *args, **kw)
def callWithLogger(logger, func, *args, **kw):
"""
Utility method which wraps a function in a try:/except:, logs a failure if
one occurrs, and uses the system's logPrefix.
"""
try:
lp = logger.logPrefix()
except KeyboardInterrupt:
raise
except:
lp = '(buggy logPrefix method)'
err(system=lp)
try:
return callWithContext({"system": lp}, func, *args, **kw)
except KeyboardInterrupt:
raise
except:
err(system=lp)
def err(_stuff=None, _why=None, **kw):
"""
Write a failure to the log.
The C{_stuff} and C{_why} parameters use an underscore prefix to lessen
the chance of colliding with a keyword argument the application wishes
to pass. It is intended that they be supplied with arguments passed
positionally, not by keyword.
@param _stuff: The failure to log. If C{_stuff} is C{None} a new
L{Failure} will be created from the current exception state. If
C{_stuff} is an C{Exception} instance it will be wrapped in a
L{Failure}.
@type _stuff: C{NoneType}, C{Exception}, or L{Failure}.
@param _why: The source of this failure. This will be logged along with
C{_stuff} and should describe the context in which the failure
occurred.
@type _why: C{str}
"""
if _stuff is None:
_stuff = failure.Failure()
if isinstance(_stuff, failure.Failure):
msg(failure=_stuff, why=_why, isError=1, **kw)
elif isinstance(_stuff, Exception):
msg(failure=failure.Failure(_stuff), why=_why, isError=1, **kw)
else:
msg(repr(_stuff), why=_why, isError=1, **kw)
deferr = err
class Logger:
"""
This represents a class which may 'own' a log. Used by subclassing.
"""
def logPrefix(self):
"""
Override this method to insert custom logging behavior. Its
return value will be inserted in front of every line. It may
be called more times than the number of output lines.
"""
return '-'
class LogPublisher:
"""
Class for singleton log message publishing.
"""
synchronized = ['msg']
def __init__(self):
self.observers = []
def addObserver(self, other):
"""
Add a new observer.
@type other: Provider of L{ILogObserver}
@param other: A callable object that will be called with each new log
message (a dict).
"""
assert callable(other)
self.observers.append(other)
def removeObserver(self, other):
"""
Remove an observer.
"""
self.observers.remove(other)
def msg(self, *message, **kw):
"""
Log a new message.
The message should be a native string, i.e. bytes on Python 2 and
Unicode on Python 3. For compatibility with both use the native string
syntax, for example::
>>> log.msg('Hello, world.')
You MUST avoid passing in Unicode on Python 2, and the form::
>>> log.msg('Hello ', 'world.')
This form only works (sometimes) by accident.
Keyword arguments will be converted into items in the event
dict that is passed to L{ILogObserver} implementations.
Each implementation, in turn, can define keys that are used
by it specifically, in addition to common keys listed at
L{ILogObserver.__call__}.
For example, to set the C{system} parameter while logging
a message::
>>> log.msg('Started', system='Foo')
"""
actualEventDict = (context.get(ILogContext) or {}).copy()
actualEventDict.update(kw)
actualEventDict['message'] = message
actualEventDict['time'] = time.time()
for i in range(len(self.observers) - 1, -1, -1):
try:
self.observers[i](actualEventDict)
except KeyboardInterrupt:
# Don't swallow keyboard interrupt!
raise
except UnicodeEncodeError:
raise
except:
observer = self.observers[i]
self.observers[i] = lambda event: None
try:
self._err(failure.Failure(),
"Log observer %s failed." % (observer,))
except:
# Sometimes err() will throw an exception,
# e.g. RuntimeError due to blowing the stack; if that
# happens, there's not much we can do...
pass
self.observers[i] = observer
def _err(self, failure, why):
"""
Log a failure.
Similar in functionality to the global {err} function, but the failure
gets published only to observers attached to this publisher.
@param failure: The failure to log.
@type failure: L{Failure}.
@param why: The source of this failure. This will be logged along with
the C{failure} and should describe the context in which the failure
occurred.
@type why: C{str}
"""
self.msg(failure=failure, why=why, isError=1)
def showwarning(self, message, category, filename, lineno, file=None,
line=None):
"""
Twisted-enabled wrapper around L{warnings.showwarning}.
If C{file} is C{None}, the default behaviour is to emit the warning to
the log system, otherwise the original L{warnings.showwarning} Python
function is called.
"""
if file is None:
self.msg(warning=message, category=reflect.qual(category),
filename=filename, lineno=lineno,
format="%(filename)s:%(lineno)s: %(category)s: %(warning)s")
else:
if sys.version_info < (2, 6):
_oldshowwarning(message, category, filename, lineno, file)
else:
_oldshowwarning(message, category, filename, lineno, file, line)
synchronize(LogPublisher)
try:
theLogPublisher
except NameError:
theLogPublisher = LogPublisher()
addObserver = theLogPublisher.addObserver
removeObserver = theLogPublisher.removeObserver
msg = theLogPublisher.msg
showwarning = theLogPublisher.showwarning
def _safeFormat(fmtString, fmtDict):
"""
Try to format the string C{fmtString} using C{fmtDict} arguments,
swallowing all errors to always return a string.
"""
# There's a way we could make this if not safer at least more
# informative: perhaps some sort of str/repr wrapper objects
# could be wrapped around the things inside of C{fmtDict}. That way
# if the event dict contains an object with a bad __repr__, we
# can only cry about that individual object instead of the
# entire event dict.
try:
text = fmtString % fmtDict
except KeyboardInterrupt:
raise
except:
try:
text = ('Invalid format string or unformattable object in log message: %r, %s' % (fmtString, fmtDict))
except:
try:
text = 'UNFORMATTABLE OBJECT WRITTEN TO LOG with fmt %r, MESSAGE LOST' % (fmtString,)
except:
text = 'PATHOLOGICAL ERROR IN BOTH FORMAT STRING AND MESSAGE DETAILS, MESSAGE LOST'
return text
def textFromEventDict(eventDict):
"""
Extract text from an event dict passed to a log observer. If it cannot
handle the dict, it returns None.
The possible keys of eventDict are:
- C{message}: by default, it holds the final text. It's required, but can
be empty if either C{isError} or C{format} is provided (the first
having the priority).
- C{isError}: boolean indicating the nature of the event.
- C{failure}: L{failure.Failure} instance, required if the event is an
error.
- C{why}: if defined, used as header of the traceback in case of errors.
- C{format}: string format used in place of C{message} to customize
the event. It uses all keys present in C{eventDict} to format
the text.
Other keys will be used when applying the C{format}, or ignored.
"""
edm = eventDict['message']
if not edm:
if eventDict['isError'] and 'failure' in eventDict:
text = ((eventDict.get('why') or 'Unhandled Error')
+ '\n' + eventDict['failure'].getTraceback())
elif 'format' in eventDict:
text = _safeFormat(eventDict['format'], eventDict)
else:
# we don't know how to log this
return
else:
text = ' '.join(map(reflect.safe_str, edm))
return text
class FileLogObserver:
"""
Log observer that writes to a file-like object.
@type timeFormat: C{str} or C{NoneType}
@ivar timeFormat: If not C{None}, the format string passed to strftime().
"""
timeFormat = None
def __init__(self, f):
self.write = f.write
self.flush = f.flush
def getTimezoneOffset(self, when):
"""
Return the current local timezone offset from UTC.
@type when: C{int}
@param when: POSIX (ie, UTC) timestamp for which to find the offset.
@rtype: C{int}
@return: The number of seconds offset from UTC. West is positive,
east is negative.
"""
offset = datetime.utcfromtimestamp(when) - datetime.fromtimestamp(when)
return offset.days * (60 * 60 * 24) + offset.seconds
def formatTime(self, when):
"""
Format the given UTC value as a string representing that time in the
local timezone.
By default it's formatted as a ISO8601-like string (ISO8601 date and
ISO8601 time separated by a space). It can be customized using the
C{timeFormat} attribute, which will be used as input for the underlying
L{datetime.datetime.strftime} call.
@type when: C{int}
@param when: POSIX (ie, UTC) timestamp for which to find the offset.
@rtype: C{str}
"""
if self.timeFormat is not None:
return datetime.fromtimestamp(when).strftime(self.timeFormat)
tzOffset = -self.getTimezoneOffset(when)
when = datetime.utcfromtimestamp(when + tzOffset)
tzHour = abs(int(tzOffset / 60 / 60))
tzMin = abs(int(tzOffset / 60 % 60))
if tzOffset < 0:
tzSign = '-'
else:
tzSign = '+'
return '%d-%02d-%02d %02d:%02d:%02d%s%02d%02d' % (
when.year, when.month, when.day,
when.hour, when.minute, when.second,
tzSign, tzHour, tzMin)
def emit(self, eventDict):
text = textFromEventDict(eventDict)
if text is None:
return
timeStr = self.formatTime(eventDict['time'])
fmtDict = {'system': eventDict['system'], 'text': text.replace("\n", "\n\t")}
msgStr = _safeFormat("[%(system)s] %(text)s\n", fmtDict)
util.untilConcludes(self.write, timeStr + " " + msgStr)
util.untilConcludes(self.flush) # Hoorj!
def start(self):
"""
Start observing log events.
"""
addObserver(self.emit)
def stop(self):
"""
Stop observing log events.
"""
removeObserver(self.emit)
class PythonLoggingObserver(object):
"""
Output twisted messages to Python standard library L{logging} module.
WARNING: specific logging configurations (example: network) can lead to
a blocking system. Nothing is done here to prevent that, so be sure to not
use this: code within Twisted, such as twisted.web, assumes that logging
does not block.
"""
def __init__(self, loggerName="twisted"):
"""
@param loggerName: identifier used for getting logger.
@type loggerName: C{str}
"""
self.logger = logging.getLogger(loggerName)
def emit(self, eventDict):
"""
Receive a twisted log entry, format it and bridge it to python.
By default the logging level used is info; log.err produces error
level, and you can customize the level by using the C{logLevel} key::
>>> log.msg('debugging', logLevel=logging.DEBUG)
"""
if 'logLevel' in eventDict:
level = eventDict['logLevel']
elif eventDict['isError']:
level = logging.ERROR
else:
level = logging.INFO
text = textFromEventDict(eventDict)
if text is None:
return
self.logger.log(level, text)
def start(self):
"""
Start observing log events.
"""
addObserver(self.emit)
def stop(self):
"""
Stop observing log events.
"""
removeObserver(self.emit)
class StdioOnnaStick:
"""
Class that pretends to be stdout/err, and turns writes into log messages.
@ivar isError: boolean indicating whether this is stderr, in which cases
log messages will be logged as errors.
@ivar encoding: unicode encoding used to encode any unicode strings
written to this object.
"""
closed = 0
softspace = 0
mode = 'wb'
name = '<stdio (log)>'
def __init__(self, isError=0, encoding=None):
self.isError = isError
if encoding is None:
encoding = sys.getdefaultencoding()
self.encoding = encoding
self.buf = ''
def close(self):
pass
def fileno(self):
return -1
def flush(self):
pass
def read(self):
raise IOError("can't read from the log!")
readline = read
readlines = read
seek = read
tell = read
def write(self, data):
if not _PY3 and isinstance(data, unicode):
data = data.encode(self.encoding)
d = (self.buf + data).split('\n')
self.buf = d[-1]
messages = d[0:-1]
for message in messages:
msg(message, printed=1, isError=self.isError)
def writelines(self, lines):
for line in lines:
if not _PY3 and isinstance(line, unicode):
line = line.encode(self.encoding)
msg(line, printed=1, isError=self.isError)
try:
_oldshowwarning
except NameError:
_oldshowwarning = None
def startLogging(file, *a, **kw):
"""
Initialize logging to a specified file.
@return: A L{FileLogObserver} if a new observer is added, None otherwise.
"""
if isinstance(file, StdioOnnaStick):
return
flo = FileLogObserver(file)
startLoggingWithObserver(flo.emit, *a, **kw)
return flo
def startLoggingWithObserver(observer, setStdout=1):
"""
Initialize logging to a specified observer. If setStdout is true
(defaults to yes), also redirect sys.stdout and sys.stderr
to the specified file.
"""
global defaultObserver, _oldshowwarning
if not _oldshowwarning:
_oldshowwarning = warnings.showwarning
warnings.showwarning = showwarning
if defaultObserver:
defaultObserver.stop()
defaultObserver = None
addObserver(observer)
msg("Log opened.")
if setStdout:
sys.stdout = logfile
sys.stderr = logerr
class NullFile:
softspace = 0
def read(self): pass
def write(self, bytes): pass
def flush(self): pass
def close(self): pass
def discardLogs():
"""
Throw away all logs.
"""
global logfile
logfile = NullFile()
# Prevent logfile from being erased on reload. This only works in cpython.
try:
logfile
except NameError:
logfile = StdioOnnaStick(0, getattr(sys.stdout, "encoding", None))
logerr = StdioOnnaStick(1, getattr(sys.stderr, "encoding", None))
class DefaultObserver:
"""
Default observer.
Will ignore all non-error messages and send error messages to sys.stderr.
Will be removed when startLogging() is called for the first time.
"""
stderr = sys.stderr
def _emit(self, eventDict):
if eventDict["isError"]:
if 'failure' in eventDict:
text = ((eventDict.get('why') or 'Unhandled Error')
+ '\n' + eventDict['failure'].getTraceback())
else:
text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
self.stderr.write(text)
self.stderr.flush()
def start(self):
addObserver(self._emit)
def stop(self):
removeObserver(self._emit)
try:
defaultObserver
except NameError:
defaultObserver = DefaultObserver()
defaultObserver.start()

View file

@ -0,0 +1,323 @@
# -*- test-case-name: twisted.test.test_logfile -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A rotating, browsable log file.
"""
# System Imports
import os, glob, time, stat
from twisted.python import threadable
class BaseLogFile:
"""
The base class for a log file that can be rotated.
"""
synchronized = ["write", "rotate"]
def __init__(self, name, directory, defaultMode=None):
"""
Create a log file.
@param name: name of the file
@param directory: directory holding the file
@param defaultMode: permissions used to create the file. Default to
current permissions of the file if the file exists.
"""
self.directory = directory
self.name = name
self.path = os.path.join(directory, name)
if defaultMode is None and os.path.exists(self.path):
self.defaultMode = stat.S_IMODE(os.stat(self.path)[stat.ST_MODE])
else:
self.defaultMode = defaultMode
self._openFile()
def fromFullPath(cls, filename, *args, **kwargs):
"""
Construct a log file from a full file path.
"""
logPath = os.path.abspath(filename)
return cls(os.path.basename(logPath),
os.path.dirname(logPath), *args, **kwargs)
fromFullPath = classmethod(fromFullPath)
def shouldRotate(self):
"""
Override with a method to that returns true if the log
should be rotated.
"""
raise NotImplementedError
def _openFile(self):
"""
Open the log file.
"""
self.closed = False
if os.path.exists(self.path):
self._file = file(self.path, "r+", 1)
self._file.seek(0, 2)
else:
if self.defaultMode is not None:
# Set the lowest permissions
oldUmask = os.umask(0777)
try:
self._file = file(self.path, "w+", 1)
finally:
os.umask(oldUmask)
else:
self._file = file(self.path, "w+", 1)
if self.defaultMode is not None:
try:
os.chmod(self.path, self.defaultMode)
except OSError:
# Probably /dev/null or something?
pass
def __getstate__(self):
state = self.__dict__.copy()
del state["_file"]
return state
def __setstate__(self, state):
self.__dict__ = state
self._openFile()
def write(self, data):
"""
Write some data to the file.
"""
if self.shouldRotate():
self.flush()
self.rotate()
self._file.write(data)
def flush(self):
"""
Flush the file.
"""
self._file.flush()
def close(self):
"""
Close the file.
The file cannot be used once it has been closed.
"""
self.closed = True
self._file.close()
self._file = None
def reopen(self):
"""
Reopen the log file. This is mainly useful if you use an external log
rotation tool, which moves under your feet.
Note that on Windows you probably need a specific API to rename the
file, as it's not supported to simply use os.rename, for example.
"""
self.close()
self._openFile()
def getCurrentLog(self):
"""
Return a LogReader for the current log file.
"""
return LogReader(self.path)
class LogFile(BaseLogFile):
"""
A log file that can be rotated.
A rotateLength of None disables automatic log rotation.
"""
def __init__(self, name, directory, rotateLength=1000000, defaultMode=None,
maxRotatedFiles=None):
"""
Create a log file rotating on length.
@param name: file name.
@type name: C{str}
@param directory: path of the log file.
@type directory: C{str}
@param rotateLength: size of the log file where it rotates. Default to
1M.
@type rotateLength: C{int}
@param defaultMode: mode used to create the file.
@type defaultMode: C{int}
@param maxRotatedFiles: if not None, max number of log files the class
creates. Warning: it removes all log files above this number.
@type maxRotatedFiles: C{int}
"""
BaseLogFile.__init__(self, name, directory, defaultMode)
self.rotateLength = rotateLength
self.maxRotatedFiles = maxRotatedFiles
def _openFile(self):
BaseLogFile._openFile(self)
self.size = self._file.tell()
def shouldRotate(self):
"""
Rotate when the log file size is larger than rotateLength.
"""
return self.rotateLength and self.size >= self.rotateLength
def getLog(self, identifier):
"""
Given an integer, return a LogReader for an old log file.
"""
filename = "%s.%d" % (self.path, identifier)
if not os.path.exists(filename):
raise ValueError, "no such logfile exists"
return LogReader(filename)
def write(self, data):
"""
Write some data to the file.
"""
BaseLogFile.write(self, data)
self.size += len(data)
def rotate(self):
"""
Rotate the file and create a new one.
If it's not possible to open new logfile, this will fail silently,
and continue logging to old logfile.
"""
if not (os.access(self.directory, os.W_OK) and os.access(self.path, os.W_OK)):
return
logs = self.listLogs()
logs.reverse()
for i in logs:
if self.maxRotatedFiles is not None and i >= self.maxRotatedFiles:
os.remove("%s.%d" % (self.path, i))
else:
os.rename("%s.%d" % (self.path, i), "%s.%d" % (self.path, i + 1))
self._file.close()
os.rename(self.path, "%s.1" % self.path)
self._openFile()
def listLogs(self):
"""
Return sorted list of integers - the old logs' identifiers.
"""
result = []
for name in glob.glob("%s.*" % self.path):
try:
counter = int(name.split('.')[-1])
if counter:
result.append(counter)
except ValueError:
pass
result.sort()
return result
def __getstate__(self):
state = BaseLogFile.__getstate__(self)
del state["size"]
return state
threadable.synchronize(LogFile)
class DailyLogFile(BaseLogFile):
"""A log file that is rotated daily (at or after midnight localtime)
"""
def _openFile(self):
BaseLogFile._openFile(self)
self.lastDate = self.toDate(os.stat(self.path)[8])
def shouldRotate(self):
"""Rotate when the date has changed since last write"""
return self.toDate() > self.lastDate
def toDate(self, *args):
"""Convert a unixtime to (year, month, day) localtime tuple,
or return the current (year, month, day) localtime tuple.
This function primarily exists so you may overload it with
gmtime, or some cruft to make unit testing possible.
"""
# primarily so this can be unit tested easily
return time.localtime(*args)[:3]
def suffix(self, tupledate):
"""Return the suffix given a (year, month, day) tuple or unixtime"""
try:
return '_'.join(map(str, tupledate))
except:
# try taking a float unixtime
return '_'.join(map(str, self.toDate(tupledate)))
def getLog(self, identifier):
"""Given a unix time, return a LogReader for an old log file."""
if self.toDate(identifier) == self.lastDate:
return self.getCurrentLog()
filename = "%s.%s" % (self.path, self.suffix(identifier))
if not os.path.exists(filename):
raise ValueError, "no such logfile exists"
return LogReader(filename)
def write(self, data):
"""Write some data to the log file"""
BaseLogFile.write(self, data)
# Guard against a corner case where time.time()
# could potentially run backwards to yesterday.
# Primarily due to network time.
self.lastDate = max(self.lastDate, self.toDate())
def rotate(self):
"""Rotate the file and create a new one.
If it's not possible to open new logfile, this will fail silently,
and continue logging to old logfile.
"""
if not (os.access(self.directory, os.W_OK) and os.access(self.path, os.W_OK)):
return
newpath = "%s.%s" % (self.path, self.suffix(self.lastDate))
if os.path.exists(newpath):
return
self._file.close()
os.rename(self.path, newpath)
self._openFile()
def __getstate__(self):
state = BaseLogFile.__getstate__(self)
del state["lastDate"]
return state
threadable.synchronize(DailyLogFile)
class LogReader:
"""Read from a log file."""
def __init__(self, name):
self._file = file(name, "r")
def readLines(self, lines=10):
"""Read a list of lines from the log file.
This doesn't returns all of the files lines - call it multiple times.
"""
result = []
for i in range(lines):
line = self._file.readline()
if not line:
break
result.append(line)
return result
def close(self):
self._file.close()

View file

@ -0,0 +1,775 @@
# -*- test-case-name: twisted.test.test_modules -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
This module aims to provide a unified, object-oriented view of Python's
runtime hierarchy.
Python is a very dynamic language with wide variety of introspection utilities.
However, these utilities can be hard to use, because there is no consistent
API. The introspection API in python is made up of attributes (__name__,
__module__, func_name, etc) on instances, modules, classes and functions which
vary between those four types, utility modules such as 'inspect' which provide
some functionality, the 'imp' module, the "compiler" module, the semantics of
PEP 302 support, and setuptools, among other things.
At the top, you have "PythonPath", an abstract representation of sys.path which
includes methods to locate top-level modules, with or without loading them.
The top-level exposed functions in this module for accessing the system path
are "walkModules", "iterModules", and "getModule".
From most to least specific, here are the objects provided::
PythonPath # sys.path
|
v
PathEntry # one entry on sys.path: an importer
|
v
PythonModule # a module or package that can be loaded
|
v
PythonAttribute # an attribute of a module (function or class)
|
v
PythonAttribute # an attribute of a function or class
|
v
...
Here's an example of idiomatic usage: this is what you would do to list all of
the modules outside the standard library's python-files directory::
import os
stdlibdir = os.path.dirname(os.__file__)
from twisted.python.modules import iterModules
for modinfo in iterModules():
if (modinfo.pathEntry.filePath.path != stdlibdir
and not modinfo.isPackage()):
print 'unpackaged: %s: %s' % (
modinfo.name, modinfo.filePath.path)
"""
__metaclass__ = type
# let's try to keep path imports to a minimum...
from os.path import dirname, split as splitpath
import sys
import zipimport
import inspect
import warnings
from zope.interface import Interface, implements
from twisted.python.components import registerAdapter
from twisted.python.filepath import FilePath, UnlistableError
from twisted.python.zippath import ZipArchive
from twisted.python.reflect import namedAny
_nothing = object()
PYTHON_EXTENSIONS = ['.py']
OPTIMIZED_MODE = __doc__ is None
if OPTIMIZED_MODE:
PYTHON_EXTENSIONS.append('.pyo')
else:
PYTHON_EXTENSIONS.append('.pyc')
def _isPythonIdentifier(string):
"""
cheezy fake test for proper identifier-ness.
@param string: a str which might or might not be a valid python identifier.
@return: True or False
"""
return (' ' not in string and
'.' not in string and
'-' not in string)
def _isPackagePath(fpath):
# Determine if a FilePath-like object is a Python package. TODO: deal with
# __init__module.(so|dll|pyd)?
extless = fpath.splitext()[0]
basend = splitpath(extless)[1]
return basend == "__init__"
class _ModuleIteratorHelper:
"""
This mixin provides common behavior between python module and path entries,
since the mechanism for searching sys.path and __path__ attributes is
remarkably similar.
"""
def iterModules(self):
"""
Loop over the modules present below this entry or package on PYTHONPATH.
For modules which are not packages, this will yield nothing.
For packages and path entries, this will only yield modules one level
down; i.e. if there is a package a.b.c, iterModules on a will only
return a.b. If you want to descend deeply, use walkModules.
@return: a generator which yields PythonModule instances that describe
modules which can be, or have been, imported.
"""
yielded = {}
if not self.filePath.exists():
return
for placeToLook in self._packagePaths():
try:
children = placeToLook.children()
except UnlistableError:
continue
children.sort()
for potentialTopLevel in children:
ext = potentialTopLevel.splitext()[1]
potentialBasename = potentialTopLevel.basename()[:-len(ext)]
if ext in PYTHON_EXTENSIONS:
# TODO: this should be a little choosier about which path entry
# it selects first, and it should do all the .so checking and
# crud
if not _isPythonIdentifier(potentialBasename):
continue
modname = self._subModuleName(potentialBasename)
if modname.split(".")[-1] == '__init__':
# This marks the directory as a package so it can't be
# a module.
continue
if modname not in yielded:
yielded[modname] = True
pm = PythonModule(modname, potentialTopLevel, self._getEntry())
assert pm != self
yield pm
else:
if (ext or not _isPythonIdentifier(potentialBasename)
or not potentialTopLevel.isdir()):
continue
modname = self._subModuleName(potentialTopLevel.basename())
for ext in PYTHON_EXTENSIONS:
initpy = potentialTopLevel.child("__init__"+ext)
if initpy.exists() and modname not in yielded:
yielded[modname] = True
pm = PythonModule(modname, initpy, self._getEntry())
assert pm != self
yield pm
break
def walkModules(self, importPackages=False):
"""
Similar to L{iterModules}, this yields self, and then every module in my
package or entry, and every submodule in each package or entry.
In other words, this is deep, and L{iterModules} is shallow.
"""
yield self
for package in self.iterModules():
for module in package.walkModules(importPackages=importPackages):
yield module
def _subModuleName(self, mn):
"""
This is a hook to provide packages with the ability to specify their names
as a prefix to submodules here.
"""
return mn
def _packagePaths(self):
"""
Implement in subclasses to specify where to look for modules.
@return: iterable of FilePath-like objects.
"""
raise NotImplementedError()
def _getEntry(self):
"""
Implement in subclasses to specify what path entry submodules will come
from.
@return: a PathEntry instance.
"""
raise NotImplementedError()
def __getitem__(self, modname):
"""
Retrieve a module from below this path or package.
@param modname: a str naming a module to be loaded. For entries, this
is a top-level, undotted package name, and for packages it is the name
of the module without the package prefix. For example, if you have a
PythonModule representing the 'twisted' package, you could use::
twistedPackageObj['python']['modules']
to retrieve this module.
@raise: KeyError if the module is not found.
@return: a PythonModule.
"""
for module in self.iterModules():
if module.name == self._subModuleName(modname):
return module
raise KeyError(modname)
def __iter__(self):
"""
Implemented to raise NotImplementedError for clarity, so that attempting to
loop over this object won't call __getitem__.
Note: in the future there might be some sensible default for iteration,
like 'walkEverything', so this is deliberately untested and undefined
behavior.
"""
raise NotImplementedError()
class PythonAttribute:
"""
I represent a function, class, or other object that is present.
@ivar name: the fully-qualified python name of this attribute.
@ivar onObject: a reference to a PythonModule or other PythonAttribute that
is this attribute's logical parent.
@ivar name: the fully qualified python name of the attribute represented by
this class.
"""
def __init__(self, name, onObject, loaded, pythonValue):
"""
Create a PythonAttribute. This is a private constructor. Do not construct
me directly, use PythonModule.iterAttributes.
@param name: the FQPN
@param onObject: see ivar
@param loaded: always True, for now
@param pythonValue: the value of the attribute we're pointing to.
"""
self.name = name
self.onObject = onObject
self._loaded = loaded
self.pythonValue = pythonValue
def __repr__(self):
return 'PythonAttribute<%r>'%(self.name,)
def isLoaded(self):
"""
Return a boolean describing whether the attribute this describes has
actually been loaded into memory by importing its module.
Note: this currently always returns true; there is no Python parser
support in this module yet.
"""
return self._loaded
def load(self, default=_nothing):
"""
Load the value associated with this attribute.
@return: an arbitrary Python object, or 'default' if there is an error
loading it.
"""
return self.pythonValue
def iterAttributes(self):
for name, val in inspect.getmembers(self.load()):
yield PythonAttribute(self.name+'.'+name, self, True, val)
class PythonModule(_ModuleIteratorHelper):
"""
Representation of a module which could be imported from sys.path.
@ivar name: the fully qualified python name of this module.
@ivar filePath: a FilePath-like object which points to the location of this
module.
@ivar pathEntry: a L{PathEntry} instance which this module was located
from.
"""
def __init__(self, name, filePath, pathEntry):
"""
Create a PythonModule. Do not construct this directly, instead inspect a
PythonPath or other PythonModule instances.
@param name: see ivar
@param filePath: see ivar
@param pathEntry: see ivar
"""
assert not name.endswith(".__init__")
self.name = name
self.filePath = filePath
self.parentPath = filePath.parent()
self.pathEntry = pathEntry
def _getEntry(self):
return self.pathEntry
def __repr__(self):
"""
Return a string representation including the module name.
"""
return 'PythonModule<%r>' % (self.name,)
def isLoaded(self):
"""
Determine if the module is loaded into sys.modules.
@return: a boolean: true if loaded, false if not.
"""
return self.pathEntry.pythonPath.moduleDict.get(self.name) is not None
def iterAttributes(self):
"""
List all the attributes defined in this module.
Note: Future work is planned here to make it possible to list python
attributes on a module without loading the module by inspecting ASTs or
bytecode, but currently any iteration of PythonModule objects insists
they must be loaded, and will use inspect.getmodule.
@raise NotImplementedError: if this module is not loaded.
@return: a generator yielding PythonAttribute instances describing the
attributes of this module.
"""
if not self.isLoaded():
raise NotImplementedError(
"You can't load attributes from non-loaded modules yet.")
for name, val in inspect.getmembers(self.load()):
yield PythonAttribute(self.name+'.'+name, self, True, val)
def isPackage(self):
"""
Returns true if this module is also a package, and might yield something
from iterModules.
"""
return _isPackagePath(self.filePath)
def load(self, default=_nothing):
"""
Load this module.
@param default: if specified, the value to return in case of an error.
@return: a genuine python module.
@raise: any type of exception. Importing modules is a risky business;
the erorrs of any code run at module scope may be raised from here, as
well as ImportError if something bizarre happened to the system path
between the discovery of this PythonModule object and the attempt to
import it. If you specify a default, the error will be swallowed
entirely, and not logged.
@rtype: types.ModuleType.
"""
try:
return self.pathEntry.pythonPath.moduleLoader(self.name)
except: # this needs more thought...
if default is not _nothing:
return default
raise
def __eq__(self, other):
"""
PythonModules with the same name are equal.
"""
if not isinstance(other, PythonModule):
return False
return other.name == self.name
def __ne__(self, other):
"""
PythonModules with different names are not equal.
"""
if not isinstance(other, PythonModule):
return True
return other.name != self.name
def walkModules(self, importPackages=False):
if importPackages and self.isPackage():
self.load()
return super(PythonModule, self).walkModules(importPackages=importPackages)
def _subModuleName(self, mn):
"""
submodules of this module are prefixed with our name.
"""
return self.name + '.' + mn
def _packagePaths(self):
"""
Yield a sequence of FilePath-like objects which represent path segments.
"""
if not self.isPackage():
return
if self.isLoaded():
load = self.load()
if hasattr(load, '__path__'):
for fn in load.__path__:
if fn == self.parentPath.path:
# this should _really_ exist.
assert self.parentPath.exists()
yield self.parentPath
else:
smp = self.pathEntry.pythonPath._smartPath(fn)
if smp.exists():
yield smp
else:
yield self.parentPath
class PathEntry(_ModuleIteratorHelper):
"""
I am a proxy for a single entry on sys.path.
@ivar filePath: a FilePath-like object pointing at the filesystem location
or archive file where this path entry is stored.
@ivar pythonPath: a PythonPath instance.
"""
def __init__(self, filePath, pythonPath):
"""
Create a PathEntry. This is a private constructor.
"""
self.filePath = filePath
self.pythonPath = pythonPath
def _getEntry(self):
return self
def __repr__(self):
return 'PathEntry<%r>' % (self.filePath,)
def _packagePaths(self):
yield self.filePath
class IPathImportMapper(Interface):
"""
This is an internal interface, used to map importers to factories for
FilePath-like objects.
"""
def mapPath(self, pathLikeString):
"""
Return a FilePath-like object.
@param pathLikeString: a path-like string, like one that might be
passed to an import hook.
@return: a L{FilePath}, or something like it (currently only a
L{ZipPath}, but more might be added later).
"""
class _DefaultMapImpl:
""" Wrapper for the default importer, i.e. None. """
implements(IPathImportMapper)
def mapPath(self, fsPathString):
return FilePath(fsPathString)
_theDefaultMapper = _DefaultMapImpl()
class _ZipMapImpl:
""" IPathImportMapper implementation for zipimport.ZipImporter. """
implements(IPathImportMapper)
def __init__(self, importer):
self.importer = importer
def mapPath(self, fsPathString):
"""
Map the given FS path to a ZipPath, by looking at the ZipImporter's
"archive" attribute and using it as our ZipArchive root, then walking
down into the archive from there.
@return: a L{zippath.ZipPath} or L{zippath.ZipArchive} instance.
"""
za = ZipArchive(self.importer.archive)
myPath = FilePath(self.importer.archive)
itsPath = FilePath(fsPathString)
if myPath == itsPath:
return za
# This is NOT a general-purpose rule for sys.path or __file__:
# zipimport specifically uses regular OS path syntax in its pathnames,
# even though zip files specify that slashes are always the separator,
# regardless of platform.
segs = itsPath.segmentsFrom(myPath)
zp = za
for seg in segs:
zp = zp.child(seg)
return zp
registerAdapter(_ZipMapImpl, zipimport.zipimporter, IPathImportMapper)
def _defaultSysPathFactory():
"""
Provide the default behavior of PythonPath's sys.path factory, which is to
return the current value of sys.path.
@return: L{sys.path}
"""
return sys.path
class PythonPath:
"""
I represent the very top of the Python object-space, the module list in
C{sys.path} and the modules list in C{sys.modules}.
@ivar _sysPath: A sequence of strings like C{sys.path}. This attribute is
read-only.
@ivar sysPath: The current value of the module search path list.
@type sysPath: C{list}
@ivar moduleDict: A dictionary mapping string module names to module
objects, like C{sys.modules}.
@ivar sysPathHooks: A list of PEP-302 path hooks, like C{sys.path_hooks}.
@ivar moduleLoader: A function that takes a fully-qualified python name and
returns a module, like L{twisted.python.reflect.namedAny}.
"""
def __init__(self,
sysPath=None,
moduleDict=sys.modules,
sysPathHooks=sys.path_hooks,
importerCache=sys.path_importer_cache,
moduleLoader=namedAny,
sysPathFactory=None):
"""
Create a PythonPath. You almost certainly want to use
modules.theSystemPath, or its aliased methods, rather than creating a
new instance yourself, though.
All parameters are optional, and if unspecified, will use 'system'
equivalents that makes this PythonPath like the global L{theSystemPath}
instance.
@param sysPath: a sys.path-like list to use for this PythonPath, to
specify where to load modules from.
@param moduleDict: a sys.modules-like dictionary to use for keeping
track of what modules this PythonPath has loaded.
@param sysPathHooks: sys.path_hooks-like list of PEP-302 path hooks to
be used for this PythonPath, to determie which importers should be
used.
@param importerCache: a sys.path_importer_cache-like list of PEP-302
importers. This will be used in conjunction with the given
sysPathHooks.
@param moduleLoader: a module loader function which takes a string and
returns a module. That is to say, it is like L{namedAny} - *not* like
L{__import__}.
@param sysPathFactory: a 0-argument callable which returns the current
value of a sys.path-like list of strings. Specify either this, or
sysPath, not both. This alternative interface is provided because the
way the Python import mechanism works, you can re-bind the 'sys.path'
name and that is what is used for current imports, so it must be a
factory rather than a value to deal with modification by rebinding
rather than modification by mutation. Note: it is not recommended to
rebind sys.path. Although this mechanism can deal with that, it is a
subtle point which some tools that it is easy for tools which interact
with sys.path to miss.
"""
if sysPath is not None:
sysPathFactory = lambda : sysPath
elif sysPathFactory is None:
sysPathFactory = _defaultSysPathFactory
self._sysPathFactory = sysPathFactory
self._sysPath = sysPath
self.moduleDict = moduleDict
self.sysPathHooks = sysPathHooks
self.importerCache = importerCache
self.moduleLoader = moduleLoader
def _getSysPath(self):
"""
Retrieve the current value of the module search path list.
"""
return self._sysPathFactory()
sysPath = property(_getSysPath)
def _findEntryPathString(self, modobj):
"""
Determine where a given Python module object came from by looking at path
entries.
"""
topPackageObj = modobj
while '.' in topPackageObj.__name__:
topPackageObj = self.moduleDict['.'.join(
topPackageObj.__name__.split('.')[:-1])]
if _isPackagePath(FilePath(topPackageObj.__file__)):
# if package 'foo' is on sys.path at /a/b/foo, package 'foo's
# __file__ will be /a/b/foo/__init__.py, and we are looking for
# /a/b here, the path-entry; so go up two steps.
rval = dirname(dirname(topPackageObj.__file__))
else:
# the module is completely top-level, not within any packages. The
# path entry it's on is just its dirname.
rval = dirname(topPackageObj.__file__)
# There are probably some awful tricks that an importer could pull
# which would break this, so let's just make sure... it's a loaded
# module after all, which means that its path MUST be in
# path_importer_cache according to PEP 302 -glyph
if rval not in self.importerCache:
warnings.warn(
"%s (for module %s) not in path importer cache "
"(PEP 302 violation - check your local configuration)." % (
rval, modobj.__name__),
stacklevel=3)
return rval
def _smartPath(self, pathName):
"""
Given a path entry from sys.path which may refer to an importer,
return the appropriate FilePath-like instance.
@param pathName: a str describing the path.
@return: a FilePath-like object.
"""
importr = self.importerCache.get(pathName, _nothing)
if importr is _nothing:
for hook in self.sysPathHooks:
try:
importr = hook(pathName)
except ImportError:
pass
if importr is _nothing: # still
importr = None
return IPathImportMapper(importr, _theDefaultMapper).mapPath(pathName)
def iterEntries(self):
"""
Iterate the entries on my sysPath.
@return: a generator yielding PathEntry objects
"""
for pathName in self.sysPath:
fp = self._smartPath(pathName)
yield PathEntry(fp, self)
def __getitem__(self, modname):
"""
Get a python module by its given fully-qualified name.
@param modname: The fully-qualified Python module name to load.
@type modname: C{str}
@return: an object representing the module identified by C{modname}
@rtype: L{PythonModule}
@raise KeyError: if the module name is not a valid module name, or no
such module can be identified as loadable.
"""
# See if the module is already somewhere in Python-land.
moduleObject = self.moduleDict.get(modname)
if moduleObject is not None:
# we need 2 paths; one of the path entry and one for the module.
pe = PathEntry(
self._smartPath(
self._findEntryPathString(moduleObject)),
self)
mp = self._smartPath(moduleObject.__file__)
return PythonModule(modname, mp, pe)
# Recurse if we're trying to get a submodule.
if '.' in modname:
pkg = self
for name in modname.split('.'):
pkg = pkg[name]
return pkg
# Finally do the slowest possible thing and iterate
for module in self.iterModules():
if module.name == modname:
return module
raise KeyError(modname)
def __contains__(self, module):
"""
Check to see whether or not a module exists on my import path.
@param module: The name of the module to look for on my import path.
@type module: C{str}
"""
try:
self.__getitem__(module)
return True
except KeyError:
return False
def __repr__(self):
"""
Display my sysPath and moduleDict in a string representation.
"""
return "PythonPath(%r,%r)" % (self.sysPath, self.moduleDict)
def iterModules(self):
"""
Yield all top-level modules on my sysPath.
"""
for entry in self.iterEntries():
for module in entry.iterModules():
yield module
def walkModules(self, importPackages=False):
"""
Similar to L{iterModules}, this yields every module on the path, then every
submodule in each package or entry.
"""
for package in self.iterModules():
for module in package.walkModules(importPackages=False):
yield module
theSystemPath = PythonPath()
def walkModules(importPackages=False):
"""
Deeply iterate all modules on the global python path.
@param importPackages: Import packages as they are seen.
"""
return theSystemPath.walkModules(importPackages=importPackages)
def iterModules():
"""
Iterate all modules and top-level packages on the global Python path, but
do not descend into packages.
@param importPackages: Import packages as they are seen.
"""
return theSystemPath.iterModules()
def getModule(moduleName):
"""
Retrieve a module from the system path.
"""
return theSystemPath[moduleName]

View file

@ -0,0 +1,75 @@
# -*- test-case-name: twisted.test.test_monkey -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from __future__ import division, absolute_import
class MonkeyPatcher(object):
"""
Cover up attributes with new objects. Neat for monkey-patching things for
unit-testing purposes.
"""
def __init__(self, *patches):
# List of patches to apply in (obj, name, value).
self._patchesToApply = []
# List of the original values for things that have been patched.
# (obj, name, value) format.
self._originals = []
for patch in patches:
self.addPatch(*patch)
def addPatch(self, obj, name, value):
"""
Add a patch so that the attribute C{name} on C{obj} will be assigned to
C{value} when C{patch} is called or during C{runWithPatches}.
You can restore the original values with a call to restore().
"""
self._patchesToApply.append((obj, name, value))
def _alreadyPatched(self, obj, name):
"""
Has the C{name} attribute of C{obj} already been patched by this
patcher?
"""
for o, n, v in self._originals:
if (o, n) == (obj, name):
return True
return False
def patch(self):
"""
Apply all of the patches that have been specified with L{addPatch}.
Reverse this operation using L{restore}.
"""
for obj, name, value in self._patchesToApply:
if not self._alreadyPatched(obj, name):
self._originals.append((obj, name, getattr(obj, name)))
setattr(obj, name, value)
def restore(self):
"""
Restore all original values to any patched objects.
"""
while self._originals:
obj, name, value = self._originals.pop()
setattr(obj, name, value)
def runWithPatches(self, f, *args, **kw):
"""
Apply each patch already specified. Then run the function f with the
given args and kwargs. Restore everything when done.
"""
self.patch()
try:
return f(*args, **kw)
finally:
self.restore()

View file

@ -0,0 +1,45 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Utilities for dealing with processes.
"""
import os
def which(name, flags=os.X_OK):
"""Search PATH for executable files with the given name.
On newer versions of MS-Windows, the PATHEXT environment variable will be
set to the list of file extensions for files considered executable. This
will normally include things like ".EXE". This fuction will also find files
with the given name ending with any of these extensions.
On MS-Windows the only flag that has any meaning is os.F_OK. Any other
flags will be ignored.
@type name: C{str}
@param name: The name for which to search.
@type flags: C{int}
@param flags: Arguments to L{os.access}.
@rtype: C{list}
@param: A list of the full paths to files found, in the
order in which they were found.
"""
result = []
exts = filter(None, os.environ.get('PATHEXT', '').split(os.pathsep))
path = os.environ.get('PATH', None)
if path is None:
return []
for p in os.environ.get('PATH', '').split(os.pathsep):
p = os.path.join(p, name)
if os.access(p, flags):
result.append(p)
for e in exts:
pext = p + e
if os.access(pext, flags):
result.append(pext)
return result

View file

@ -0,0 +1,150 @@
# -*- test-case-name: twisted.test.test_randbytes -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Cryptographically secure random implementation, with fallback on normal random.
"""
from __future__ import division, absolute_import
import warnings, os, random, string
from twisted.python.compat import _PY3
getrandbits = getattr(random, 'getrandbits', None)
if _PY3:
_fromhex = bytes.fromhex
else:
def _fromhex(hexBytes):
return hexBytes.decode('hex')
class SecureRandomNotAvailable(RuntimeError):
"""
Exception raised when no secure random algorithm is found.
"""
class SourceNotAvailable(RuntimeError):
"""
Internal exception used when a specific random source is not available.
"""
class RandomFactory(object):
"""
Factory providing L{secureRandom} and L{insecureRandom} methods.
You shouldn't have to instantiate this class, use the module level
functions instead: it is an implementation detail and could be removed or
changed arbitrarily.
"""
# This variable is no longer used, and will eventually be removed.
randomSources = ()
getrandbits = getrandbits
def _osUrandom(self, nbytes):
"""
Wrapper around C{os.urandom} that cleanly manage its absence.
"""
try:
return os.urandom(nbytes)
except (AttributeError, NotImplementedError) as e:
raise SourceNotAvailable(e)
def secureRandom(self, nbytes, fallback=False):
"""
Return a number of secure random bytes.
@param nbytes: number of bytes to generate.
@type nbytes: C{int}
@param fallback: Whether the function should fallback on non-secure
random or not. Default to C{False}.
@type fallback: C{bool}
@return: a string of random bytes.
@rtype: C{str}
"""
try:
return self._osUrandom(nbytes)
except SourceNotAvailable:
pass
if fallback:
warnings.warn(
"urandom unavailable - "
"proceeding with non-cryptographically secure random source",
category=RuntimeWarning,
stacklevel=2)
return self.insecureRandom(nbytes)
else:
raise SecureRandomNotAvailable("No secure random source available")
def _randBits(self, nbytes):
"""
Wrapper around C{os.getrandbits}.
"""
if self.getrandbits is not None:
n = self.getrandbits(nbytes * 8)
hexBytes = ("%%0%dx" % (nbytes * 2)) % n
return _fromhex(hexBytes)
raise SourceNotAvailable("random.getrandbits is not available")
if _PY3:
_maketrans = bytes.maketrans
def _randModule(self, nbytes):
"""
Wrapper around the C{random} module.
"""
return b"".join([
bytes([random.choice(self._BYTES)]) for i in range(nbytes)])
else:
_maketrans = string.maketrans
def _randModule(self, nbytes):
"""
Wrapper around the C{random} module.
"""
return b"".join([
random.choice(self._BYTES) for i in range(nbytes)])
_BYTES = _maketrans(b'', b'')
def insecureRandom(self, nbytes):
"""
Return a number of non secure random bytes.
@param nbytes: number of bytes to generate.
@type nbytes: C{int}
@return: a string of random bytes.
@rtype: C{str}
"""
for src in ("_randBits", "_randModule"):
try:
return getattr(self, src)(nbytes)
except SourceNotAvailable:
pass
factory = RandomFactory()
secureRandom = factory.secureRandom
insecureRandom = factory.insecureRandom
del factory
__all__ = ["secureRandom", "insecureRandom", "SecureRandomNotAvailable"]

View file

@ -0,0 +1,271 @@
# -*- test-case-name: twisted.test.test_rebuild -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
*Real* reloading support for Python.
"""
# System Imports
import sys
import types
import time
import linecache
# Sibling Imports
from twisted.python import log, reflect
lastRebuild = time.time()
class Sensitive:
"""
A utility mixin that's sensitive to rebuilds.
This is a mixin for classes (usually those which represent collections of
callbacks) to make sure that their code is up-to-date before running.
"""
lastRebuild = lastRebuild
def needRebuildUpdate(self):
yn = (self.lastRebuild < lastRebuild)
return yn
def rebuildUpToDate(self):
self.lastRebuild = time.time()
def latestVersionOf(self, anObject):
"""
Get the latest version of an object.
This can handle just about anything callable; instances, functions,
methods, and classes.
"""
t = type(anObject)
if t == types.FunctionType:
return latestFunction(anObject)
elif t == types.MethodType:
if anObject.im_self is None:
return getattr(anObject.im_class, anObject.__name__)
else:
return getattr(anObject.im_self, anObject.__name__)
elif t == types.InstanceType:
# Kick it, if it's out of date.
getattr(anObject, 'nothing', None)
return anObject
elif t == types.ClassType:
return latestClass(anObject)
else:
log.msg('warning returning anObject!')
return anObject
_modDictIDMap = {}
def latestFunction(oldFunc):
"""
Get the latest version of a function.
"""
# This may be CPython specific, since I believe jython instantiates a new
# module upon reload.
dictID = id(oldFunc.func_globals)
module = _modDictIDMap.get(dictID)
if module is None:
return oldFunc
return getattr(module, oldFunc.__name__)
def latestClass(oldClass):
"""
Get the latest version of a class.
"""
module = reflect.namedModule(oldClass.__module__)
newClass = getattr(module, oldClass.__name__)
newBases = [latestClass(base) for base in newClass.__bases__]
try:
# This makes old-style stuff work
newClass.__bases__ = tuple(newBases)
return newClass
except TypeError:
if newClass.__module__ == "__builtin__":
# __builtin__ members can't be reloaded sanely
return newClass
ctor = getattr(newClass, '__metaclass__', type)
return ctor(newClass.__name__, tuple(newBases), dict(newClass.__dict__))
class RebuildError(Exception):
"""
Exception raised when trying to rebuild a class whereas it's not possible.
"""
def updateInstance(self):
"""
Updates an instance to be current.
"""
try:
self.__class__ = latestClass(self.__class__)
except TypeError:
if hasattr(self.__class__, '__slots__'):
raise RebuildError("Can't rebuild class with __slots__ on Python < 2.6")
else:
raise
def __getattr__(self, name):
"""
A getattr method to cause a class to be refreshed.
"""
if name == '__del__':
raise AttributeError("Without this, Python segfaults.")
updateInstance(self)
log.msg("(rebuilding stale %s instance (%s))" % (reflect.qual(self.__class__), name))
result = getattr(self, name)
return result
def rebuild(module, doLog=1):
"""
Reload a module and do as much as possible to replace its references.
"""
global lastRebuild
lastRebuild = time.time()
if hasattr(module, 'ALLOW_TWISTED_REBUILD'):
# Is this module allowed to be rebuilt?
if not module.ALLOW_TWISTED_REBUILD:
raise RuntimeError("I am not allowed to be rebuilt.")
if doLog:
log.msg('Rebuilding %s...' % str(module.__name__))
## Safely handle adapter re-registration
from twisted.python import components
components.ALLOW_DUPLICATES = True
d = module.__dict__
_modDictIDMap[id(d)] = module
newclasses = {}
classes = {}
functions = {}
values = {}
if doLog:
log.msg(' (scanning %s): ' % str(module.__name__))
for k, v in d.items():
if type(v) == types.ClassType:
# Failure condition -- instances of classes with buggy
# __hash__/__cmp__ methods referenced at the module level...
if v.__module__ == module.__name__:
classes[v] = 1
if doLog:
log.logfile.write("c")
log.logfile.flush()
elif type(v) == types.FunctionType:
if v.func_globals is module.__dict__:
functions[v] = 1
if doLog:
log.logfile.write("f")
log.logfile.flush()
elif isinstance(v, type):
if v.__module__ == module.__name__:
newclasses[v] = 1
if doLog:
log.logfile.write("o")
log.logfile.flush()
values.update(classes)
values.update(functions)
fromOldModule = values.__contains__
newclasses = newclasses.keys()
classes = classes.keys()
functions = functions.keys()
if doLog:
log.msg('')
log.msg(' (reload %s)' % str(module.__name__))
# Boom.
reload(module)
# Make sure that my traceback printing will at least be recent...
linecache.clearcache()
if doLog:
log.msg(' (cleaning %s): ' % str(module.__name__))
for clazz in classes:
if getattr(module, clazz.__name__) is clazz:
log.msg("WARNING: class %s not replaced by reload!" % reflect.qual(clazz))
else:
if doLog:
log.logfile.write("x")
log.logfile.flush()
clazz.__bases__ = ()
clazz.__dict__.clear()
clazz.__getattr__ = __getattr__
clazz.__module__ = module.__name__
if newclasses:
import gc
for nclass in newclasses:
ga = getattr(module, nclass.__name__)
if ga is nclass:
log.msg("WARNING: new-class %s not replaced by reload!" % reflect.qual(nclass))
else:
for r in gc.get_referrers(nclass):
if getattr(r, '__class__', None) is nclass:
r.__class__ = ga
if doLog:
log.msg('')
log.msg(' (fixing %s): ' % str(module.__name__))
modcount = 0
for mk, mod in sys.modules.items():
modcount = modcount + 1
if mod == module or mod is None:
continue
if not hasattr(mod, '__file__'):
# It's a builtin module; nothing to replace here.
continue
if hasattr(mod, '__bundle__'):
# PyObjC has a few buggy objects which segfault if you hash() them.
# It doesn't make sense to try rebuilding extension modules like
# this anyway, so don't try.
continue
changed = 0
for k, v in mod.__dict__.items():
try:
hash(v)
except Exception:
continue
if fromOldModule(v):
if type(v) == types.ClassType:
if doLog:
log.logfile.write("c")
log.logfile.flush()
nv = latestClass(v)
else:
if doLog:
log.logfile.write("f")
log.logfile.flush()
nv = latestFunction(v)
changed = 1
setattr(mod, k, nv)
else:
# Replace bases of non-module classes just to be sure.
if type(v) == types.ClassType:
for base in v.__bases__:
if fromOldModule(base):
latestClass(v)
if doLog and not changed and ((modcount % 10) ==0) :
log.logfile.write(".")
log.logfile.flush()
components.ALLOW_DUPLICATES = False
if doLog:
log.msg('')
log.msg(' Rebuilt %s.' % str(module.__name__))
return module

View file

@ -0,0 +1,707 @@
# -*- test-case-name: twisted.test.test_reflect -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Standardized versions of various cool and/or strange things that you can do
with Python's reflection capabilities.
"""
from __future__ import division, absolute_import
import sys
import types
import os
import pickle
import weakref
import re
import traceback
import warnings
from collections import deque
RegexType = type(re.compile(""))
from twisted.python.compat import reraise, nativeString, NativeStringIO
from twisted.python.compat import _PY3
from twisted.python.deprecate import deprecated
from twisted.python import compat
from twisted.python.deprecate import _fullyQualifiedName as fullyQualifiedName
from twisted.python.versions import Version
def prefixedMethodNames(classObj, prefix):
"""
Given a class object C{classObj}, returns a list of method names that match
the string C{prefix}.
@param classObj: A class object from which to collect method names.
@param prefix: A native string giving a prefix. Each method with a name
which begins with this prefix will be returned.
@type prefix: L{str}
@return: A list of the names of matching methods of C{classObj} (and base
classes of C{classObj}).
@rtype: L{list} of L{str}
"""
dct = {}
addMethodNamesToDict(classObj, dct, prefix)
return list(dct.keys())
def addMethodNamesToDict(classObj, dict, prefix, baseClass=None):
"""
This goes through C{classObj} (and its bases) and puts method names
starting with 'prefix' in 'dict' with a value of 1. if baseClass isn't
None, methods will only be added if classObj is-a baseClass
If the class in question has the methods 'prefix_methodname' and
'prefix_methodname2', the resulting dict should look something like:
{"methodname": 1, "methodname2": 1}.
@param classObj: A class object from which to collect method names.
@param dict: A L{dict} which will be updated with the results of the
accumulation. Items are added to this dictionary, with method names as
keys and C{1} as values.
@type dict: L{dict}
@param prefix: A native string giving a prefix. Each method of C{classObj}
(and base classes of C{classObj}) with a name which begins with this
prefix will be returned.
@type prefix: L{str}
@param baseClass: A class object at which to stop searching upwards for new
methods. To collect all method names, do not pass a value for this
parameter.
@return: C{None}
"""
for base in classObj.__bases__:
addMethodNamesToDict(base, dict, prefix, baseClass)
if baseClass is None or baseClass in classObj.__bases__:
for name, method in classObj.__dict__.items():
optName = name[len(prefix):]
if ((type(method) is types.FunctionType)
and (name[:len(prefix)] == prefix)
and (len(optName))):
dict[optName] = 1
def prefixedMethods(obj, prefix=''):
"""
Given an object C{obj}, returns a list of method objects that match the
string C{prefix}.
@param obj: An arbitrary object from which to collect methods.
@param prefix: A native string giving a prefix. Each method of C{obj} with
a name which begins with this prefix will be returned.
@type prefix: L{str}
@return: A list of the matching method objects.
@rtype: L{list}
"""
dct = {}
accumulateMethods(obj, dct, prefix)
return list(dct.values())
def accumulateMethods(obj, dict, prefix='', curClass=None):
"""
Given an object C{obj}, add all methods that begin with C{prefix}.
@param obj: An arbitrary object to collect methods from.
@param dict: A L{dict} which will be updated with the results of the
accumulation. Items are added to this dictionary, with method names as
keys and corresponding instance method objects as values.
@type dict: L{dict}
@param prefix: A native string giving a prefix. Each method of C{obj} with
a name which begins with this prefix will be returned.
@type prefix: L{str}
@param curClass: The class in the inheritance hierarchy at which to start
collecting methods. Collection proceeds up. To collect all methods
from C{obj}, do not pass a value for this parameter.
@return: C{None}
"""
if not curClass:
curClass = obj.__class__
for base in curClass.__bases__:
accumulateMethods(obj, dict, prefix, base)
for name, method in curClass.__dict__.items():
optName = name[len(prefix):]
if ((type(method) is types.FunctionType)
and (name[:len(prefix)] == prefix)
and (len(optName))):
dict[optName] = getattr(obj, name)
def namedModule(name):
"""
Return a module given its name.
"""
topLevel = __import__(name)
packages = name.split(".")[1:]
m = topLevel
for p in packages:
m = getattr(m, p)
return m
def namedObject(name):
"""
Get a fully named module-global object.
"""
classSplit = name.split('.')
module = namedModule('.'.join(classSplit[:-1]))
return getattr(module, classSplit[-1])
namedClass = namedObject # backwards compat
def requireModule(name, default=None):
"""
Try to import a module given its name, returning C{default} value if
C{ImportError} is raised during import.
@param name: Module name as it would have been passed to C{import}.
@type name: C{str}.
@param default: Value returned in case C{ImportError} is raised while
importing the module.
@return: Module or default value.
"""
try:
return namedModule(name)
except ImportError:
return default
class _NoModuleFound(Exception):
"""
No module was found because none exists.
"""
class InvalidName(ValueError):
"""
The given name is not a dot-separated list of Python objects.
"""
class ModuleNotFound(InvalidName):
"""
The module associated with the given name doesn't exist and it can't be
imported.
"""
class ObjectNotFound(InvalidName):
"""
The object associated with the given name doesn't exist and it can't be
imported.
"""
def _importAndCheckStack(importName):
"""
Import the given name as a module, then walk the stack to determine whether
the failure was the module not existing, or some code in the module (for
example a dependent import) failing. This can be helpful to determine
whether any actual application code was run. For example, to distiguish
administrative error (entering the wrong module name), from programmer
error (writing buggy code in a module that fails to import).
@param importName: The name of the module to import.
@type importName: C{str}
@raise Exception: if something bad happens. This can be any type of
exception, since nobody knows what loading some arbitrary code might
do.
@raise _NoModuleFound: if no module was found.
"""
try:
return __import__(importName)
except ImportError:
excType, excValue, excTraceback = sys.exc_info()
while excTraceback:
execName = excTraceback.tb_frame.f_globals["__name__"]
# in Python 2 execName is None when an ImportError is encountered,
# where in Python 3 execName is equal to the importName.
if execName is None or execName == importName:
reraise(excValue, excTraceback)
excTraceback = excTraceback.tb_next
raise _NoModuleFound()
def namedAny(name):
"""
Retrieve a Python object by its fully qualified name from the global Python
module namespace. The first part of the name, that describes a module,
will be discovered and imported. Each subsequent part of the name is
treated as the name of an attribute of the object specified by all of the
name which came before it. For example, the fully-qualified name of this
object is 'twisted.python.reflect.namedAny'.
@type name: L{str}
@param name: The name of the object to return.
@raise InvalidName: If the name is an empty string, starts or ends with
a '.', or is otherwise syntactically incorrect.
@raise ModuleNotFound: If the name is syntactically correct but the
module it specifies cannot be imported because it does not appear to
exist.
@raise ObjectNotFound: If the name is syntactically correct, includes at
least one '.', but the module it specifies cannot be imported because
it does not appear to exist.
@raise AttributeError: If an attribute of an object along the way cannot be
accessed, or a module along the way is not found.
@return: the Python object identified by 'name'.
"""
if not name:
raise InvalidName('Empty module name')
names = name.split('.')
# if the name starts or ends with a '.' or contains '..', the __import__
# will raise an 'Empty module name' error. This will provide a better error
# message.
if '' in names:
raise InvalidName(
"name must be a string giving a '.'-separated list of Python "
"identifiers, not %r" % (name,))
topLevelPackage = None
moduleNames = names[:]
while not topLevelPackage:
if moduleNames:
trialname = '.'.join(moduleNames)
try:
topLevelPackage = _importAndCheckStack(trialname)
except _NoModuleFound:
moduleNames.pop()
else:
if len(names) == 1:
raise ModuleNotFound("No module named %r" % (name,))
else:
raise ObjectNotFound('%r does not name an object' % (name,))
obj = topLevelPackage
for n in names[1:]:
obj = getattr(obj, n)
return obj
def filenameToModuleName(fn):
"""
Convert a name in the filesystem to the name of the Python module it is.
This is aggressive about getting a module name back from a file; it will
always return a string. Aggressive means 'sometimes wrong'; it won't look
at the Python path or try to do any error checking: don't use this method
unless you already know that the filename you're talking about is a Python
module.
@param fn: A filesystem path to a module or package; C{bytes} on Python 2,
C{bytes} or C{unicode} on Python 3.
@return: A hopefully importable module name.
@rtype: C{str}
"""
if isinstance(fn, bytes):
initPy = b"__init__.py"
else:
initPy = "__init__.py"
fullName = os.path.abspath(fn)
base = os.path.basename(fn)
if not base:
# this happens when fn ends with a path separator, just skit it
base = os.path.basename(fn[:-1])
modName = nativeString(os.path.splitext(base)[0])
while 1:
fullName = os.path.dirname(fullName)
if os.path.exists(os.path.join(fullName, initPy)):
modName = "%s.%s" % (
nativeString(os.path.basename(fullName)),
nativeString(modName))
else:
break
return modName
def qual(clazz):
"""
Return full import path of a class.
"""
return clazz.__module__ + '.' + clazz.__name__
def _determineClass(x):
try:
return x.__class__
except:
return type(x)
def _determineClassName(x):
c = _determineClass(x)
try:
return c.__name__
except:
try:
return str(c)
except:
return '<BROKEN CLASS AT 0x%x>' % id(c)
def _safeFormat(formatter, o):
"""
Helper function for L{safe_repr} and L{safe_str}.
"""
try:
return formatter(o)
except:
io = NativeStringIO()
traceback.print_exc(file=io)
className = _determineClassName(o)
tbValue = io.getvalue()
return "<%s instance at 0x%x with %s error:\n %s>" % (
className, id(o), formatter.__name__, tbValue)
def safe_repr(o):
"""
Returns a string representation of an object, or a string containing a
traceback, if that object's __repr__ raised an exception.
@param o: Any object.
@rtype: C{str}
"""
return _safeFormat(repr, o)
def safe_str(o):
"""
Returns a string representation of an object, or a string containing a
traceback, if that object's __str__ raised an exception.
@param o: Any object.
@rtype: C{str}
"""
return _safeFormat(str, o)
class QueueMethod:
"""
I represent a method that doesn't exist yet.
"""
def __init__(self, name, calls):
self.name = name
self.calls = calls
def __call__(self, *args):
self.calls.append((self.name, args))
def funcinfo(function):
"""
this is more documentation for myself than useful code.
"""
warnings.warn(
"[v2.5] Use inspect.getargspec instead of twisted.python.reflect.funcinfo",
DeprecationWarning,
stacklevel=2)
code=function.func_code
name=function.func_name
argc=code.co_argcount
argv=code.co_varnames[:argc]
defaults=function.func_defaults
out = []
out.append('The function %s accepts %s arguments' % (name ,argc))
if defaults:
required=argc-len(defaults)
out.append('It requires %s arguments' % required)
out.append('The arguments required are: %s' % argv[:required])
out.append('additional arguments are:')
for i in range(argc-required):
j=i+required
out.append('%s which has a default of' % (argv[j], defaults[i]))
return out
ISNT=0
WAS=1
IS=2
def fullFuncName(func):
qualName = (str(pickle.whichmodule(func, func.__name__)) + '.' + func.__name__)
if namedObject(qualName) is not func:
raise Exception("Couldn't find %s as %s." % (func, qualName))
return qualName
def getClass(obj):
"""
Return the class or type of object 'obj'.
Returns sensible result for oldstyle and newstyle instances and types.
"""
if hasattr(obj, '__class__'):
return obj.__class__
else:
return type(obj)
## the following were factored out of usage
if not _PY3:
# The following functions aren't documented, nor tested, have much simpler
# builtin implementations and are not used within Twisted or "known"
# projects.
@deprecated(Version("Twisted", 14, 0, 0))
def getcurrent(clazz):
assert type(clazz) == types.ClassType, 'must be a class...'
module = namedModule(clazz.__module__)
currclass = getattr(module, clazz.__name__, None)
if currclass is None:
return clazz
return currclass
# Class graph nonsense
# I should really have a better name for this...
@deprecated(Version("Twisted", 14, 0, 0), "isinstance")
def isinst(inst,clazz):
if type(inst) != compat.InstanceType or type(clazz)!= types.ClassType:
return isinstance(inst,clazz)
cl = inst.__class__
cl2 = getcurrent(cl)
clazz = getcurrent(clazz)
if issubclass(cl2,clazz):
if cl == cl2:
return WAS
else:
inst.__class__ = cl2
return IS
else:
return ISNT
# These functions are still imported by libraries used in turn by the
# Twisted unit tests, like Nevow 0.10. Since they are deprecated,
# there's no need to port them to Python 3 (hence the condition above).
# https://bazaar.launchpad.net/~divmod-dev/divmod.org/trunk/revision/2716
# removed the dependency in Nevow. Once that is released, these functions
# can be safely removed from Twisted.
@deprecated(Version("Twisted", 11, 0, 0), "inspect.getmro")
def allYourBase(classObj, baseClass=None):
"""
allYourBase(classObj, baseClass=None) -> list of all base
classes that are subclasses of baseClass, unless it is None,
in which case all bases will be added.
"""
l = []
_accumulateBases(classObj, l, baseClass)
return l
@deprecated(Version("Twisted", 11, 0, 0), "inspect.getmro")
def accumulateBases(classObj, l, baseClass=None):
_accumulateBases(classObj, l, baseClass)
def _accumulateBases(classObj, l, baseClass=None):
for base in classObj.__bases__:
if baseClass is None or issubclass(base, baseClass):
l.append(base)
_accumulateBases(base, l, baseClass)
def accumulateClassDict(classObj, attr, adict, baseClass=None):
"""
Accumulate all attributes of a given name in a class hierarchy into a single dictionary.
Assuming all class attributes of this name are dictionaries.
If any of the dictionaries being accumulated have the same key, the
one highest in the class heirarchy wins.
(XXX: If \"higest\" means \"closest to the starting class\".)
Ex::
class Soy:
properties = {\"taste\": \"bland\"}
class Plant:
properties = {\"colour\": \"green\"}
class Seaweed(Plant):
pass
class Lunch(Soy, Seaweed):
properties = {\"vegan\": 1 }
dct = {}
accumulateClassDict(Lunch, \"properties\", dct)
print dct
{\"taste\": \"bland\", \"colour\": \"green\", \"vegan\": 1}
"""
for base in classObj.__bases__:
accumulateClassDict(base, attr, adict)
if baseClass is None or baseClass in classObj.__bases__:
adict.update(classObj.__dict__.get(attr, {}))
def accumulateClassList(classObj, attr, listObj, baseClass=None):
"""
Accumulate all attributes of a given name in a class heirarchy into a single list.
Assuming all class attributes of this name are lists.
"""
for base in classObj.__bases__:
accumulateClassList(base, attr, listObj)
if baseClass is None or baseClass in classObj.__bases__:
listObj.extend(classObj.__dict__.get(attr, []))
def isSame(a, b):
return (a is b)
def isLike(a, b):
return (a == b)
def modgrep(goal):
return objgrep(sys.modules, goal, isLike, 'sys.modules')
def isOfType(start, goal):
return ((type(start) is goal) or
(isinstance(start, compat.InstanceType) and
start.__class__ is goal))
def findInstances(start, t):
return objgrep(start, t, isOfType)
if not _PY3:
# The function objgrep() currently doesn't work on Python 3 due to some
# edge cases, as described in #6986.
# twisted.python.reflect is quite important and objgrep is not used in
# Twisted itself, so in #5929, we decided to port everything but objgrep()
# and to finish the porting in #6986
def objgrep(start, goal, eq=isLike, path='', paths=None, seen=None,
showUnknowns=0, maxDepth=None):
"""
An insanely CPU-intensive process for finding stuff.
"""
if paths is None:
paths = []
if seen is None:
seen = {}
if eq(start, goal):
paths.append(path)
if id(start) in seen:
if seen[id(start)] is start:
return
if maxDepth is not None:
if maxDepth == 0:
return
maxDepth -= 1
seen[id(start)] = start
# Make an alias for those arguments which are passed recursively to
# objgrep for container objects.
args = (paths, seen, showUnknowns, maxDepth)
if isinstance(start, dict):
for k, v in start.items():
objgrep(k, goal, eq, path+'{'+repr(v)+'}', *args)
objgrep(v, goal, eq, path+'['+repr(k)+']', *args)
elif isinstance(start, (list, tuple, deque)):
for idx, _elem in enumerate(start):
objgrep(start[idx], goal, eq, path+'['+str(idx)+']', *args)
elif isinstance(start, types.MethodType):
objgrep(start.__self__, goal, eq, path+'.__self__', *args)
objgrep(start.__func__, goal, eq, path+'.__func__', *args)
objgrep(start.__self__.__class__, goal, eq,
path+'.__self__.__class__', *args)
elif hasattr(start, '__dict__'):
for k, v in start.__dict__.items():
objgrep(v, goal, eq, path+'.'+k, *args)
if isinstance(start, compat.InstanceType):
objgrep(start.__class__, goal, eq, path+'.__class__', *args)
elif isinstance(start, weakref.ReferenceType):
objgrep(start(), goal, eq, path+'()', *args)
elif (isinstance(start, (compat.StringType,
int, types.FunctionType,
types.BuiltinMethodType, RegexType, float,
type(None), compat.FileType)) or
type(start).__name__ in ('wrapper_descriptor',
'method_descriptor', 'member_descriptor',
'getset_descriptor')):
pass
elif showUnknowns:
print('unknown type', type(start), start)
return paths
__all__ = [
'InvalidName', 'ModuleNotFound', 'ObjectNotFound',
'ISNT', 'WAS', 'IS',
'QueueMethod',
'funcinfo', 'fullFuncName', 'qual', 'getcurrent', 'getClass', 'isinst',
'namedModule', 'namedObject', 'namedClass', 'namedAny', 'requireModule',
'safe_repr', 'safe_str', 'allYourBase', 'accumulateBases',
'prefixedMethodNames', 'addMethodNamesToDict', 'prefixedMethods',
'accumulateMethods',
'accumulateClassDict', 'accumulateClassList', 'isSame', 'isLike',
'modgrep', 'isOfType', 'findInstances', 'objgrep', 'filenameToModuleName',
'fullyQualifiedName']
if _PY3:
# This is to be removed when fixing #6986
__all__.remove('objgrep')

View file

@ -0,0 +1,63 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A release-automation toolkit.
Don't use this outside of Twisted.
Maintainer: Christopher Armstrong
"""
import os
# errors
class DirectoryExists(OSError):
"""
Some directory exists when it shouldn't.
"""
pass
class DirectoryDoesntExist(OSError):
"""
Some directory doesn't exist when it should.
"""
pass
class CommandFailed(OSError):
pass
# utilities
def sh(command, null=True, prompt=False):
"""
I'll try to execute C{command}, and if C{prompt} is true, I'll
ask before running it. If the command returns something other
than 0, I'll raise C{CommandFailed(command)}.
"""
print "--$", command
if prompt:
if raw_input("run ?? ").startswith('n'):
return
if null:
command = "%s > /dev/null" % command
if os.system(command) != 0:
raise CommandFailed(command)
def runChdirSafe(f, *args, **kw):
origdir = os.path.abspath('.')
try:
return f(*args, **kw)
finally:
os.chdir(origdir)

View file

@ -0,0 +1,248 @@
# -*- test-case-name: twisted.test.test_roots -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Twisted Python Roots: an abstract hierarchy representation for Twisted.
Maintainer: Glyph Lefkowitz
"""
# System imports
import types
from twisted.python import reflect
class NotSupportedError(NotImplementedError):
"""
An exception meaning that the tree-manipulation operation
you're attempting to perform is not supported.
"""
class Request:
"""I am an abstract representation of a request for an entity.
I also function as the response. The request is responded to by calling
self.write(data) until there is no data left and then calling
self.finish().
"""
# This attribute should be set to the string name of the protocol being
# responded to (e.g. HTTP or FTP)
wireProtocol = None
def write(self, data):
"""Add some data to the response to this request.
"""
raise NotImplementedError("%s.write" % reflect.qual(self.__class__))
def finish(self):
"""The response to this request is finished; flush all data to the network stream.
"""
raise NotImplementedError("%s.finish" % reflect.qual(self.__class__))
class Entity:
"""I am a terminal object in a hierarchy, with no children.
I represent a null interface; certain non-instance objects (strings and
integers, notably) are Entities.
Methods on this class are suggested to be implemented, but are not
required, and will be emulated on a per-protocol basis for types which do
not handle them.
"""
def render(self, request):
"""
I produce a stream of bytes for the request, by calling request.write()
and request.finish().
"""
raise NotImplementedError("%s.render" % reflect.qual(self.__class__))
class Collection:
"""I represent a static collection of entities.
I contain methods designed to represent collections that can be dynamically
created.
"""
def __init__(self, entities=None):
"""Initialize me.
"""
if entities is not None:
self.entities = entities
else:
self.entities = {}
def getStaticEntity(self, name):
"""Get an entity that was added to me using putEntity.
This method will return 'None' if it fails.
"""
return self.entities.get(name)
def getDynamicEntity(self, name, request):
"""Subclass this to generate an entity on demand.
This method should return 'None' if it fails.
"""
def getEntity(self, name, request):
"""Retrieve an entity from me.
I will first attempt to retrieve an entity statically; static entities
will obscure dynamic ones. If that fails, I will retrieve the entity
dynamically.
If I cannot retrieve an entity, I will return 'None'.
"""
ent = self.getStaticEntity(name)
if ent is not None:
return ent
ent = self.getDynamicEntity(name, request)
if ent is not None:
return ent
return None
def putEntity(self, name, entity):
"""Store a static reference on 'name' for 'entity'.
Raises a KeyError if the operation fails.
"""
self.entities[name] = entity
def delEntity(self, name):
"""Remove a static reference for 'name'.
Raises a KeyError if the operation fails.
"""
del self.entities[name]
def storeEntity(self, name, request):
"""Store an entity for 'name', based on the content of 'request'.
"""
raise NotSupportedError("%s.storeEntity" % reflect.qual(self.__class__))
def removeEntity(self, name, request):
"""Remove an entity for 'name', based on the content of 'request'.
"""
raise NotSupportedError("%s.removeEntity" % reflect.qual(self.__class__))
def listStaticEntities(self):
"""Retrieve a list of all name, entity pairs that I store references to.
See getStaticEntity.
"""
return self.entities.items()
def listDynamicEntities(self, request):
"""A list of all name, entity that I can generate on demand.
See getDynamicEntity.
"""
return []
def listEntities(self, request):
"""Retrieve a list of all name, entity pairs I contain.
See getEntity.
"""
return self.listStaticEntities() + self.listDynamicEntities(request)
def listStaticNames(self):
"""Retrieve a list of the names of entities that I store references to.
See getStaticEntity.
"""
return self.entities.keys()
def listDynamicNames(self):
"""Retrieve a list of the names of entities that I store references to.
See getDynamicEntity.
"""
return []
def listNames(self, request):
"""Retrieve a list of all names for entities that I contain.
See getEntity.
"""
return self.listStaticNames()
class ConstraintViolation(Exception):
"""An exception raised when a constraint is violated.
"""
class Constrained(Collection):
"""A collection that has constraints on its names and/or entities."""
def nameConstraint(self, name):
"""A method that determines whether an entity may be added to me with a given name.
If the constraint is satisfied, return 1; if the constraint is not
satisfied, either return 0 or raise a descriptive ConstraintViolation.
"""
return 1
def entityConstraint(self, entity):
"""A method that determines whether an entity may be added to me.
If the constraint is satisfied, return 1; if the constraint is not
satisfied, either return 0 or raise a descriptive ConstraintViolation.
"""
return 1
def reallyPutEntity(self, name, entity):
Collection.putEntity(self, name, entity)
def putEntity(self, name, entity):
"""Store an entity if it meets both constraints.
Otherwise raise a ConstraintViolation.
"""
if self.nameConstraint(name):
if self.entityConstraint(entity):
self.reallyPutEntity(name, entity)
else:
raise ConstraintViolation("Entity constraint violated.")
else:
raise ConstraintViolation("Name constraint violated.")
class Locked(Constrained):
"""A collection that can be locked from adding entities."""
locked = 0
def lock(self):
self.locked = 1
def entityConstraint(self, entity):
return not self.locked
class Homogenous(Constrained):
"""A homogenous collection of entities.
I will only contain entities that are an instance of the class or type
specified by my 'entityType' attribute.
"""
entityType = types.InstanceType
def entityConstraint(self, entity):
if isinstance(entity, self.entityType):
return 1
else:
raise ConstraintViolation("%s of incorrect type (%s)" %
(entity, self.entityType))
def getNameType(self):
return "Name"
def getEntityType(self):
return self.entityType.__name__

View file

@ -0,0 +1,178 @@
# -*- test-case-name: twisted.python.test.test_runtime -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from __future__ import division, absolute_import
import os
import sys
import time
import imp
import warnings
from twisted.python import compat
if compat._PY3:
_threadModule = "_thread"
else:
_threadModule = "thread"
def shortPythonVersion():
"""
Returns the Python version as a dot-separated string.
"""
return "%s.%s.%s" % sys.version_info[:3]
knownPlatforms = {
'nt': 'win32',
'ce': 'win32',
'posix': 'posix',
'java': 'java',
'org.python.modules.os': 'java',
}
_timeFunctions = {
#'win32': time.clock,
'win32': time.time,
}
class Platform:
"""
Gives us information about the platform we're running on.
"""
type = knownPlatforms.get(os.name)
seconds = staticmethod(_timeFunctions.get(type, time.time))
_platform = sys.platform
def __init__(self, name=None, platform=None):
if name is not None:
self.type = knownPlatforms.get(name)
self.seconds = _timeFunctions.get(self.type, time.time)
if platform is not None:
self._platform = platform
def isKnown(self):
"""
Do we know about this platform?
@return: Boolean indicating whether this is a known platform or not.
@rtype: C{bool}
"""
return self.type != None
def getType(self):
"""
Get platform type.
@return: Either 'posix', 'win32' or 'java'
@rtype: C{str}
"""
return self.type
def isMacOSX(self):
"""
Check if current platform is Mac OS X.
@return: C{True} if the current platform has been detected as OS X.
@rtype: C{bool}
"""
return self._platform == "darwin"
def isWinNT(self):
"""
Are we running in Windows NT?
This is deprecated and always returns C{True} on win32 because
Twisted only supports Windows NT-derived platforms at this point.
@return: C{True} if the current platform has been detected as
Windows NT.
@rtype: C{bool}
"""
warnings.warn(
"twisted.python.runtime.Platform.isWinNT was deprecated in "
"Twisted 13.0. Use Platform.isWindows instead.",
DeprecationWarning, stacklevel=2)
return self.isWindows()
def isWindows(self):
"""
Are we running in Windows?
@return: C{True} if the current platform has been detected as
Windows.
@rtype: C{bool}
"""
return self.getType() == 'win32'
def isVista(self):
"""
Check if current platform is Windows Vista or Windows Server 2008.
@return: C{True} if the current platform has been detected as Vista
@rtype: C{bool}
"""
if getattr(sys, "getwindowsversion", None) is not None:
return sys.getwindowsversion()[0] == 6
else:
return False
def isLinux(self):
"""
Check if current platform is Linux.
@return: C{True} if the current platform has been detected as Linux.
@rtype: C{bool}
"""
return self._platform.startswith("linux")
def supportsThreads(self):
"""
Can threads be created?
@return: C{True} if the threads are supported on the current platform.
@rtype: C{bool}
"""
try:
return imp.find_module(_threadModule)[0] is None
except ImportError:
return False
def supportsINotify(self):
"""
Return C{True} if we can use the inotify API on this platform.
@since: 10.1
"""
try:
from twisted.python._inotify import INotifyError, init
except ImportError:
return False
try:
os.close(init())
except INotifyError:
return False
return True
platform = Platform()
platformType = platform.getType()
seconds = platform.seconds

View file

@ -0,0 +1,511 @@
/*
* Copyright (c) Twisted Matrix Laboratories.
* See LICENSE for details.
*/
#define PY_SSIZE_T_CLEAN 1
#include <Python.h>
#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
/* This may cause some warnings, but if you want to get rid of them, upgrade
* your Python version. */
typedef int Py_ssize_t;
#endif
#include <sys/types.h>
#include <sys/socket.h>
#include <signal.h>
#include <sys/param.h>
#ifdef BSD
#include <sys/uio.h>
#endif
/*
* As per
* <http://pubs.opengroup.org/onlinepubs/007904875/basedefs/sys/socket.h.html
* #tag_13_61_05>:
*
* "To forestall portability problems, it is recommended that applications
* not use values larger than (2**31)-1 for the socklen_t type."
*/
#define SOCKLEN_MAX 0x7FFFFFFF
PyObject *sendmsg_socket_error;
static PyObject *sendmsg_sendmsg(PyObject *self, PyObject *args, PyObject *keywds);
static PyObject *sendmsg_recvmsg(PyObject *self, PyObject *args, PyObject *keywds);
static PyObject *sendmsg_getsockfam(PyObject *self, PyObject *args, PyObject *keywds);
static char sendmsg_doc[] = "\
Bindings for sendmsg(2), recvmsg(2), and a minimal helper for inspecting\n\
address family of a socket.\n\
";
static char sendmsg_sendmsg_doc[] = "\
Wrap the C sendmsg(2) function for sending \"messages\" on a socket.\n\
\n\
@param fd: The file descriptor of the socket over which to send a message.\n\
@type fd: C{int}\n\
\n\
@param data: Bytes to write to the socket.\n\
@type data: C{str}\n\
\n\
@param flags: Flags to affect how the message is sent. See the C{MSG_}\n\
constants in the sendmsg(2) manual page. By default no flags are set.\n\
@type flags: C{int}\n\
\n\
@param ancillary: Extra data to send over the socket outside of the normal\n\
datagram or stream mechanism. By default no ancillary data is sent.\n\
@type ancillary: C{list} of C{tuple} of C{int}, C{int}, and C{str}.\n\
\n\
@raise OverflowError: Raised if too much ancillary data is given.\n\
@raise socket.error: Raised if the underlying syscall indicates an error.\n\
\n\
@return: The return value of the underlying syscall, if it succeeds.\n\
";
static char sendmsg_recvmsg_doc[] = "\
Wrap the C recvmsg(2) function for receiving \"messages\" on a socket.\n\
\n\
@param fd: The file descriptor of the socket over which to receve a message.\n\
@type fd: C{int}\n\
\n\
@param flags: Flags to affect how the message is sent. See the C{MSG_}\n\
constants in the sendmsg(2) manual page. By default no flags are set.\n\
@type flags: C{int}\n\
\n\
@param maxsize: The maximum number of bytes to receive from the socket\n\
using the datagram or stream mechanism. The default maximum is 8192.\n\
@type maxsize: C{int}\n\
\n\
@param cmsg_size: The maximum number of bytes to receive from the socket\n\
outside of the normal datagram or stream mechanism. The default maximum is 4096.\n\
\n\
@raise OverflowError: Raised if too much ancillary data is given.\n\
@raise socket.error: Raised if the underlying syscall indicates an error.\n\
\n\
@return: A C{tuple} of three elements: the bytes received using the\n\
datagram/stream mechanism, flags as an C{int} describing the data\n\
received, and a C{list} of C{tuples} giving ancillary received data.\n\
";
static char sendmsg_getsockfam_doc[] = "\
Retrieve the address family of a given socket.\n\
\n\
@param fd: The file descriptor of the socket the address family of which\n\
to retrieve.\n\
@type fd: C{int}\n\
\n\
@raise socket.error: Raised if the underlying getsockname call indicates\n\
an error.\n\
\n\
@return: A C{int} representing the address family of the socket. For\n\
example, L{socket.AF_INET}, L{socket.AF_INET6}, or L{socket.AF_UNIX}.\n\
";
static PyMethodDef sendmsg_methods[] = {
{"send1msg", (PyCFunction) sendmsg_sendmsg, METH_VARARGS | METH_KEYWORDS,
sendmsg_sendmsg_doc},
{"recv1msg", (PyCFunction) sendmsg_recvmsg, METH_VARARGS | METH_KEYWORDS,
sendmsg_recvmsg_doc},
{"getsockfam", (PyCFunction) sendmsg_getsockfam,
METH_VARARGS | METH_KEYWORDS, sendmsg_getsockfam_doc},
{NULL, NULL, 0, NULL}
};
PyMODINIT_FUNC initsendmsg(void) {
PyObject *module;
sendmsg_socket_error = NULL; /* Make sure that this has a known value
before doing anything that might exit. */
module = Py_InitModule3("sendmsg", sendmsg_methods, sendmsg_doc);
if (!module) {
return;
}
/*
The following is the only value mentioned by POSIX:
http://www.opengroup.org/onlinepubs/9699919799/basedefs/sys_socket.h.html
*/
if (-1 == PyModule_AddIntConstant(module, "SCM_RIGHTS", SCM_RIGHTS)) {
return;
}
/* BSD, Darwin, Hurd */
#if defined(SCM_CREDS)
if (-1 == PyModule_AddIntConstant(module, "SCM_CREDS", SCM_CREDS)) {
return;
}
#endif
/* Linux */
#if defined(SCM_CREDENTIALS)
if (-1 == PyModule_AddIntConstant(module, "SCM_CREDENTIALS", SCM_CREDENTIALS)) {
return;
}
#endif
/* Apparently everywhere, but not standardized. */
#if defined(SCM_TIMESTAMP)
if (-1 == PyModule_AddIntConstant(module, "SCM_TIMESTAMP", SCM_TIMESTAMP)) {
return;
}
#endif
module = PyImport_ImportModule("socket");
if (!module) {
return;
}
sendmsg_socket_error = PyObject_GetAttrString(module, "error");
if (!sendmsg_socket_error) {
return;
}
}
static PyObject *sendmsg_sendmsg(PyObject *self, PyObject *args, PyObject *keywds) {
int fd;
int flags = 0;
Py_ssize_t sendmsg_result, iovec_length;
struct msghdr message_header;
struct iovec iov[1];
PyObject *ancillary = NULL;
PyObject *iterator = NULL;
PyObject *item = NULL;
PyObject *result_object = NULL;
static char *kwlist[] = {"fd", "data", "flags", "ancillary", NULL};
if (!PyArg_ParseTupleAndKeywords(
args, keywds, "it#|iO:sendmsg", kwlist,
&fd,
&iov[0].iov_base,
&iovec_length,
&flags,
&ancillary)) {
return NULL;
}
iov[0].iov_len = iovec_length;
message_header.msg_name = NULL;
message_header.msg_namelen = 0;
message_header.msg_iov = iov;
message_header.msg_iovlen = 1;
message_header.msg_control = NULL;
message_header.msg_controllen = 0;
message_header.msg_flags = 0;
if (ancillary) {
if (!PyList_Check(ancillary)) {
PyErr_Format(PyExc_TypeError,
"send1msg argument 3 expected list, got %s",
ancillary->ob_type->tp_name);
goto finished;
}
iterator = PyObject_GetIter(ancillary);
if (iterator == NULL) {
goto finished;
}
size_t all_data_len = 0;
/* First we need to know how big the buffer needs to be in order to
have enough space for all of the messages. */
while ( (item = PyIter_Next(iterator)) ) {
int type, level;
Py_ssize_t data_len;
size_t prev_all_data_len;
char *data;
if (!PyTuple_Check(item)) {
PyErr_Format(PyExc_TypeError,
"send1msg argument 3 expected list of tuple, "
"got list containing %s",
item->ob_type->tp_name);
goto finished;
}
if (!PyArg_ParseTuple(
item, "iit#:sendmsg ancillary data (level, type, data)",
&level, &type, &data, &data_len)) {
goto finished;
}
prev_all_data_len = all_data_len;
all_data_len += CMSG_SPACE(data_len);
Py_DECREF(item);
item = NULL;
if (all_data_len < prev_all_data_len) {
PyErr_Format(PyExc_OverflowError,
"Too much msg_control to fit in a size_t: %zu",
prev_all_data_len);
goto finished;
}
}
Py_DECREF(iterator);
iterator = NULL;
/* Allocate the buffer for all of the ancillary elements, if we have
* any. */
if (all_data_len) {
if (all_data_len > SOCKLEN_MAX) {
PyErr_Format(PyExc_OverflowError,
"Too much msg_control to fit in a socklen_t: %zu",
all_data_len);
goto finished;
}
message_header.msg_control = PyMem_Malloc(all_data_len);
if (!message_header.msg_control) {
PyErr_NoMemory();
goto finished;
}
} else {
message_header.msg_control = NULL;
}
message_header.msg_controllen = (socklen_t) all_data_len;
iterator = PyObject_GetIter(ancillary); /* again */
if (!iterator) {
goto finished;
}
/* Unpack the tuples into the control message. */
struct cmsghdr *control_message = CMSG_FIRSTHDR(&message_header);
while ( (item = PyIter_Next(iterator)) ) {
int type, level;
Py_ssize_t data_len;
size_t data_size;
unsigned char *data, *cmsg_data;
/* We explicitly allocated enough space for all ancillary data
above; if there isn't enough room, all bets are off. */
assert(control_message);
if (!PyArg_ParseTuple(item,
"iit#:sendmsg ancillary data (level, type, data)",
&level,
&type,
&data,
&data_len)) {
goto finished;
}
control_message->cmsg_level = level;
control_message->cmsg_type = type;
data_size = CMSG_LEN(data_len);
if (data_size > SOCKLEN_MAX) {
PyErr_Format(PyExc_OverflowError,
"CMSG_LEN(%zd) > SOCKLEN_MAX", data_len);
goto finished;
}
control_message->cmsg_len = (socklen_t) data_size;
cmsg_data = CMSG_DATA(control_message);
memcpy(cmsg_data, data, data_len);
Py_DECREF(item);
item = NULL;
control_message = CMSG_NXTHDR(&message_header, control_message);
}
Py_DECREF(iterator);
iterator = NULL;
if (PyErr_Occurred()) {
goto finished;
}
}
sendmsg_result = sendmsg(fd, &message_header, flags);
if (sendmsg_result < 0) {
PyErr_SetFromErrno(sendmsg_socket_error);
goto finished;
}
result_object = Py_BuildValue("n", sendmsg_result);
finished:
if (item) {
Py_DECREF(item);
item = NULL;
}
if (iterator) {
Py_DECREF(iterator);
iterator = NULL;
}
if (message_header.msg_control) {
PyMem_Free(message_header.msg_control);
message_header.msg_control = NULL;
}
return result_object;
}
static PyObject *sendmsg_recvmsg(PyObject *self, PyObject *args, PyObject *keywds) {
int fd = -1;
int flags = 0;
int maxsize = 8192;
int cmsg_size = 4096;
size_t cmsg_space;
size_t cmsg_overhead;
Py_ssize_t recvmsg_result;
struct msghdr message_header;
struct cmsghdr *control_message;
struct iovec iov[1];
char *cmsgbuf;
PyObject *ancillary;
PyObject *final_result = NULL;
static char *kwlist[] = {"fd", "flags", "maxsize", "cmsg_size", NULL};
if (!PyArg_ParseTupleAndKeywords(args, keywds, "i|iii:recvmsg", kwlist,
&fd, &flags, &maxsize, &cmsg_size)) {
return NULL;
}
cmsg_space = CMSG_SPACE(cmsg_size);
/* overflow check */
if (cmsg_space > SOCKLEN_MAX) {
PyErr_Format(PyExc_OverflowError,
"CMSG_SPACE(cmsg_size) greater than SOCKLEN_MAX: %d",
cmsg_size);
return NULL;
}
message_header.msg_name = NULL;
message_header.msg_namelen = 0;
iov[0].iov_len = maxsize;
iov[0].iov_base = PyMem_Malloc(maxsize);
if (!iov[0].iov_base) {
PyErr_NoMemory();
return NULL;
}
message_header.msg_iov = iov;
message_header.msg_iovlen = 1;
cmsgbuf = PyMem_Malloc(cmsg_space);
if (!cmsgbuf) {
PyMem_Free(iov[0].iov_base);
PyErr_NoMemory();
return NULL;
}
memset(cmsgbuf, 0, cmsg_space);
message_header.msg_control = cmsgbuf;
/* see above for overflow check */
message_header.msg_controllen = (socklen_t) cmsg_space;
recvmsg_result = recvmsg(fd, &message_header, flags);
if (recvmsg_result < 0) {
PyErr_SetFromErrno(sendmsg_socket_error);
goto finished;
}
ancillary = PyList_New(0);
if (!ancillary) {
goto finished;
}
for (control_message = CMSG_FIRSTHDR(&message_header);
control_message;
control_message = CMSG_NXTHDR(&message_header,
control_message)) {
PyObject *entry;
/* Some platforms apparently always fill out the ancillary data
structure with a single bogus value if none is provided; ignore it,
if that is the case. */
if ((!(control_message->cmsg_level)) &&
(!(control_message->cmsg_type))) {
continue;
}
/*
* Figure out how much of the cmsg size is cmsg structure overhead - in
* other words, how much is not part of the application data. This lets
* us compute the right application data size below. There should
* really be a CMSG_ macro for this.
*/
cmsg_overhead = (char*)CMSG_DATA(control_message) - (char*)control_message;
entry = Py_BuildValue(
"(iis#)",
control_message->cmsg_level,
control_message->cmsg_type,
CMSG_DATA(control_message),
(Py_ssize_t) (control_message->cmsg_len - cmsg_overhead));
if (!entry) {
Py_DECREF(ancillary);
goto finished;
}
if (PyList_Append(ancillary, entry) < 0) {
Py_DECREF(ancillary);
Py_DECREF(entry);
goto finished;
} else {
Py_DECREF(entry);
}
}
final_result = Py_BuildValue(
"s#iO",
iov[0].iov_base,
recvmsg_result,
message_header.msg_flags,
ancillary);
Py_DECREF(ancillary);
finished:
PyMem_Free(iov[0].iov_base);
PyMem_Free(cmsgbuf);
return final_result;
}
static PyObject *sendmsg_getsockfam(PyObject *self, PyObject *args,
PyObject *keywds) {
int fd;
struct sockaddr sa;
static char *kwlist[] = {"fd", NULL};
if (!PyArg_ParseTupleAndKeywords(args, keywds, "i", kwlist, &fd)) {
return NULL;
}
socklen_t sz = sizeof(sa);
if (getsockname(fd, &sa, &sz)) {
PyErr_SetFromErrno(sendmsg_socket_error);
return NULL;
}
return Py_BuildValue("i", sa.sa_family);
}

View file

@ -0,0 +1,76 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""Creation of Windows shortcuts.
Requires win32all.
"""
from win32com.shell import shell
import pythoncom
import os
def open(filename):
"""Open an existing shortcut for reading.
@return: The shortcut object
@rtype: Shortcut
"""
sc=Shortcut()
sc.load(filename)
return sc
class Shortcut:
"""A shortcut on Win32.
>>> sc=Shortcut(path, arguments, description, workingdir, iconpath, iconidx)
@param path: Location of the target
@param arguments: If path points to an executable, optional arguments to
pass
@param description: Human-readable decription of target
@param workingdir: Directory from which target is launched
@param iconpath: Filename that contains an icon for the shortcut
@param iconidx: If iconpath is set, optional index of the icon desired
"""
def __init__(self,
path=None,
arguments=None,
description=None,
workingdir=None,
iconpath=None,
iconidx=0):
self._base = pythoncom.CoCreateInstance(
shell.CLSID_ShellLink, None,
pythoncom.CLSCTX_INPROC_SERVER, shell.IID_IShellLink
)
data = map(None,
['"%s"' % os.path.abspath(path), arguments, description,
os.path.abspath(workingdir), os.path.abspath(iconpath)],
("SetPath", "SetArguments", "SetDescription",
"SetWorkingDirectory") )
for value, function in data:
if value and function:
# call function on each non-null value
getattr(self, function)(value)
if iconpath:
self.SetIconLocation(iconpath, iconidx)
def load( self, filename ):
"""Read a shortcut file from disk."""
self._base.QueryInterface(pythoncom.IID_IPersistFile).Load(filename)
def save( self, filename ):
"""Write the shortcut to disk.
The file should be named something.lnk.
"""
self._base.QueryInterface(pythoncom.IID_IPersistFile).Save(filename, 0)
def __getattr__( self, name ):
if name != "_base":
return getattr(self._base, name)
raise AttributeError, "%s instance has no attribute %s" % \
(self.__class__.__name__, name)

View file

@ -0,0 +1,107 @@
# -*- test-case-name: twisted.python.test.test_syslog -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Classes and utility functions for integrating Twisted and syslog.
You probably want to call L{startLogging}.
"""
syslog = __import__('syslog')
from twisted.python import log
# These defaults come from the Python syslog docs.
DEFAULT_OPTIONS = 0
DEFAULT_FACILITY = syslog.LOG_USER
class SyslogObserver:
"""
A log observer for logging to syslog.
See L{twisted.python.log} for context.
This logObserver will automatically use LOG_ALERT priority for logged
failures (such as from C{log.err()}), but you can use any priority and
facility by setting the 'C{syslogPriority}' and 'C{syslogFacility}' keys in
the event dict.
"""
openlog = syslog.openlog
syslog = syslog.syslog
def __init__(self, prefix, options=DEFAULT_OPTIONS,
facility=DEFAULT_FACILITY):
"""
@type prefix: C{str}
@param prefix: The syslog prefix to use.
@type options: C{int}
@param options: A bitvector represented as an integer of the syslog
options to use.
@type facility: C{int}
@param facility: An indication to the syslog daemon of what sort of
program this is (essentially, an additional arbitrary metadata
classification for messages sent to syslog by this observer).
"""
self.openlog(prefix, options, facility)
def emit(self, eventDict):
"""
Send a message event to the I{syslog}.
@param eventDict: The event to send. If it has no C{'message'} key, it
will be ignored. Otherwise, if it has C{'syslogPriority'} and/or
C{'syslogFacility'} keys, these will be used as the syslog priority
and facility. If it has no C{'syslogPriority'} key but a true
value for the C{'isError'} key, the B{LOG_ALERT} priority will be
used; if it has a false value for C{'isError'}, B{LOG_INFO} will be
used. If the C{'message'} key is multiline, each line will be sent
to the syslog separately.
"""
# Figure out what the message-text is.
text = log.textFromEventDict(eventDict)
if text is None:
return
# Figure out what syslog parameters we might need to use.
priority = syslog.LOG_INFO
facility = 0
if eventDict['isError']:
priority = syslog.LOG_ALERT
if 'syslogPriority' in eventDict:
priority = int(eventDict['syslogPriority'])
if 'syslogFacility' in eventDict:
facility = int(eventDict['syslogFacility'])
# Break the message up into lines and send them.
lines = text.split('\n')
while lines[-1:] == ['']:
lines.pop()
firstLine = True
for line in lines:
if firstLine:
firstLine = False
else:
line = '\t' + line
self.syslog(priority | facility,
'[%s] %s' % (eventDict['system'], line))
def startLogging(prefix='Twisted', options=DEFAULT_OPTIONS,
facility=DEFAULT_FACILITY, setStdout=1):
"""
Send all Twisted logging output to syslog from now on.
The prefix, options and facility arguments are passed to
C{syslog.openlog()}, see the Python syslog documentation for details. For
other parameters, see L{twisted.python.log.startLoggingWithObserver}.
"""
obs = SyslogObserver(prefix, options, facility)
log.startLoggingWithObserver(obs.emit, setStdout=setStdout)

View file

@ -0,0 +1,87 @@
# -*- test-case-name: twisted.python.test.test_systemd -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Integration with systemd.
Currently only the minimum APIs necessary for using systemd's socket activation
feature are supported.
"""
__all__ = ['ListenFDs']
from os import getpid
class ListenFDs(object):
"""
L{ListenFDs} provides access to file descriptors inherited from systemd.
Typically L{ListenFDs.fromEnvironment} should be used to construct a new
instance of L{ListenFDs}.
@cvar _START: File descriptors inherited from systemd are always
consecutively numbered, with a fixed lowest "starting" descriptor. This
gives the default starting descriptor. Since this must agree with the
value systemd is using, it typically should not be overridden.
@type _START: C{int}
@ivar _descriptors: A C{list} of C{int} giving the descriptors which were
inherited.
"""
_START = 3
def __init__(self, descriptors):
"""
@param descriptors: The descriptors which will be returned from calls to
C{inheritedDescriptors}.
"""
self._descriptors = descriptors
@classmethod
def fromEnvironment(cls, environ=None, start=None):
"""
@param environ: A dictionary-like object to inspect to discover
inherited descriptors. By default, C{None}, indicating that the
real process environment should be inspected. The default is
suitable for typical usage.
@param start: An integer giving the lowest value of an inherited
descriptor systemd will give us. By default, C{None}, indicating
the known correct (that is, in agreement with systemd) value will be
used. The default is suitable for typical usage.
@return: A new instance of C{cls} which can be used to look up the
descriptors which have been inherited.
"""
if environ is None:
from os import environ
if start is None:
start = cls._START
descriptors = []
try:
pid = int(environ['LISTEN_PID'])
except (KeyError, ValueError):
pass
else:
if pid == getpid():
try:
count = int(environ['LISTEN_FDS'])
except (KeyError, ValueError):
pass
else:
descriptors = range(start, start + count)
del environ['LISTEN_PID'], environ['LISTEN_FDS']
return cls(descriptors)
def inheritedDescriptors(self):
"""
@return: The configured list of descriptors.
"""
return list(self._descriptors)

View file

@ -0,0 +1,3 @@
"""
Unit tests for L{twisted.python}.
"""

View file

@ -0,0 +1,28 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A module that is deprecated, used by L{twisted.python.test.test_deprecate} for
testing purposes.
"""
from __future__ import division, absolute_import
from twisted.python.versions import Version
from twisted.python.deprecate import deprecatedModuleAttribute
# Known module-level attributes.
DEPRECATED_ATTRIBUTE = 42
ANOTHER_ATTRIBUTE = 'hello'
version = Version('Twisted', 8, 0, 0)
message = 'Oh noes!'
deprecatedModuleAttribute(
version,
message,
__name__,
'DEPRECATED_ATTRIBUTE')

View file

@ -0,0 +1,59 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Facilities for helping test code which interacts with Python's module system
to load code.
"""
from __future__ import division, absolute_import
import sys
from twisted.python.filepath import FilePath
class TwistedModulesMixin:
"""
A mixin for C{twisted.trial.unittest.SynchronousTestCase} providing useful
methods for manipulating Python's module system.
"""
def replaceSysPath(self, sysPath):
"""
Replace sys.path, for the duration of the test, with the given value.
"""
originalSysPath = sys.path[:]
def cleanUpSysPath():
sys.path[:] = originalSysPath
self.addCleanup(cleanUpSysPath)
sys.path[:] = sysPath
def replaceSysModules(self, sysModules):
"""
Replace sys.modules, for the duration of the test, with the given value.
"""
originalSysModules = sys.modules.copy()
def cleanUpSysModules():
sys.modules.clear()
sys.modules.update(originalSysModules)
self.addCleanup(cleanUpSysModules)
sys.modules.clear()
sys.modules.update(sysModules)
def pathEntryWithOnePackage(self, pkgname=b"test_package"):
"""
Generate a L{FilePath} with one package, named C{pkgname}, on it, and
return the L{FilePath} of the path entry.
"""
# Remove utf-8 encode and bytes for path segments when Filepath
# supports Unicode paths on Python 3 (#2366, #4736, #5203).
entry = FilePath(self.mktemp().encode("utf-8"))
pkg = entry.child(b"test_package")
pkg.makedirs()
pkg.child(b"__init__.py").setContent(b"")
return entry

View file

@ -0,0 +1,40 @@
#!/usr/bin/python
# -*- test-case-name: twisted.python.test.test_sendmsg -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
import sys, os
from struct import unpack
# This makes me sad. Why aren't things nice?
sys.path.insert(0, __file__.rsplit('/', 4)[0])
from twisted.python.sendmsg import recv1msg
def recvfd(socketfd):
"""
Receive a file descriptor from a L{send1msg} message on the given C{AF_UNIX}
socket.
@param socketfd: An C{AF_UNIX} socket, attached to another process waiting
to send sockets via the ancillary data mechanism in L{send1msg}.
@param fd: C{int}
@return: a 2-tuple of (new file descriptor, description).
@rtype: 2-tuple of (C{int}, C{str})
"""
data, flags, ancillary = recv1msg(socketfd)
[(cmsg_level, cmsg_type, packedFD)] = ancillary
# cmsg_level and cmsg_type really need to be SOL_SOCKET / SCM_RIGHTS, but
# since those are the *only* standard values, there's not much point in
# checking.
[unpackedFD] = unpack("i", packedFD)
return (unpackedFD, data)
if __name__ == '__main__':
fd, description = recvfd(int(sys.argv[1]))
os.write(fd, "Test fixture data: %s.\n" % (description,))
os.close(fd)

View file

@ -0,0 +1,839 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases for Twisted component architecture.
"""
from __future__ import division, absolute_import
from zope.interface import Interface, implementer, Attribute
from zope.interface.adapter import AdapterRegistry
from twisted.python.compat import comparable, cmp
from twisted.trial import unittest
from twisted.python import components
from twisted.python.components import _addHook, _removeHook, proxyForInterface
class Compo(components.Componentized):
num = 0
def inc(self):
self.num = self.num + 1
return self.num
class IAdept(Interface):
def adaptorFunc():
raise NotImplementedError()
class IElapsed(Interface):
def elapsedFunc():
"""
1!
"""
@implementer(IAdept)
class Adept(components.Adapter):
def __init__(self, orig):
self.original = orig
self.num = 0
def adaptorFunc(self):
self.num = self.num + 1
return self.num, self.original.inc()
@implementer(IElapsed)
class Elapsed(components.Adapter):
def elapsedFunc(self):
return 1
class AComp(components.Componentized):
pass
class BComp(AComp):
pass
class CComp(BComp):
pass
class ITest(Interface):
pass
class ITest2(Interface):
pass
class ITest3(Interface):
pass
class ITest4(Interface):
pass
@implementer(ITest, ITest3, ITest4)
class Test(components.Adapter):
def __init__(self, orig):
pass
@implementer(ITest2)
class Test2:
temporaryAdapter = 1
def __init__(self, orig):
pass
class RegistryUsingMixin(object):
"""
Mixin for test cases which modify the global registry somehow.
"""
def setUp(self):
"""
Configure L{twisted.python.components.registerAdapter} to mutate an
alternate registry to improve test isolation.
"""
# Create a brand new, empty registry and put it onto the components
# module where registerAdapter will use it. Also ensure that it goes
# away at the end of the test.
scratchRegistry = AdapterRegistry()
self.patch(components, 'globalRegistry', scratchRegistry)
# Hook the new registry up to the adapter lookup system and ensure that
# association is also discarded after the test.
hook = _addHook(scratchRegistry)
self.addCleanup(_removeHook, hook)
class ComponentizedTestCase(unittest.SynchronousTestCase, RegistryUsingMixin):
"""
Simple test case for caching in Componentized.
"""
def setUp(self):
RegistryUsingMixin.setUp(self)
components.registerAdapter(Test, AComp, ITest)
components.registerAdapter(Test, AComp, ITest3)
components.registerAdapter(Test2, AComp, ITest2)
def testComponentized(self):
components.registerAdapter(Adept, Compo, IAdept)
components.registerAdapter(Elapsed, Compo, IElapsed)
c = Compo()
assert c.getComponent(IAdept).adaptorFunc() == (1, 1)
assert c.getComponent(IAdept).adaptorFunc() == (2, 2)
assert IElapsed(IAdept(c)).elapsedFunc() == 1
def testInheritanceAdaptation(self):
c = CComp()
co1 = c.getComponent(ITest)
co2 = c.getComponent(ITest)
co3 = c.getComponent(ITest2)
co4 = c.getComponent(ITest2)
assert co1 is co2
assert co3 is not co4
c.removeComponent(co1)
co5 = c.getComponent(ITest)
co6 = c.getComponent(ITest)
assert co5 is co6
assert co1 is not co5
def testMultiAdapter(self):
c = CComp()
co1 = c.getComponent(ITest)
co2 = c.getComponent(ITest2)
co3 = c.getComponent(ITest3)
co4 = c.getComponent(ITest4)
self.assertIdentical(None, co4)
self.assertIdentical(co1, co3)
def test_getComponentDefaults(self):
"""
Test that a default value specified to Componentized.getComponent if
there is no component for the requested interface.
"""
componentized = components.Componentized()
default = object()
self.assertIdentical(
componentized.getComponent(ITest, default),
default)
self.assertIdentical(
componentized.getComponent(ITest, default=default),
default)
self.assertIdentical(
componentized.getComponent(ITest),
None)
def test_setAdapter(self):
"""
C{Componentized.setAdapter} sets a component for an interface by
wrapping the instance with the given adapter class.
"""
componentized = components.Componentized()
componentized.setAdapter(IAdept, Adept)
component = componentized.getComponent(IAdept)
self.assertEqual(component.original, componentized)
self.assertIsInstance(component, Adept)
def test_addAdapter(self):
"""
C{Componentized.setAdapter} adapts the instance by wrapping it with
given adapter class, then stores it using C{addComponent}.
"""
componentized = components.Componentized()
componentized.addAdapter(Adept, ignoreClass=True)
component = componentized.getComponent(IAdept)
self.assertEqual(component.original, componentized)
self.assertIsInstance(component, Adept)
def test_setComponent(self):
"""
C{Componentized.setComponent} stores the given component using the
given interface as the key.
"""
componentized = components.Componentized()
obj = object()
componentized.setComponent(ITest, obj)
self.assertIdentical(componentized.getComponent(ITest), obj)
def test_unsetComponent(self):
"""
C{Componentized.setComponent} removes the cached component for the
given interface.
"""
componentized = components.Componentized()
obj = object()
componentized.setComponent(ITest, obj)
componentized.unsetComponent(ITest)
self.assertIdentical(componentized.getComponent(ITest), None)
def test_reprableComponentized(self):
"""
C{ReprableComponentized} has a C{__repr__} that lists its cache.
"""
rc = components.ReprableComponentized()
rc.setComponent(ITest, "hello")
result = repr(rc)
self.assertIn("ITest", result)
self.assertIn("hello", result)
class AdapterTestCase(unittest.SynchronousTestCase):
"""Test adapters."""
def testAdapterGetComponent(self):
o = object()
a = Adept(o)
self.assertRaises(components.CannotAdapt, ITest, a)
self.assertEqual(ITest(a, None), None)
class IMeta(Interface):
pass
@implementer(IMeta)
class MetaAdder(components.Adapter):
def add(self, num):
return self.original.num + num
@implementer(IMeta)
class BackwardsAdder(components.Adapter):
def add(self, num):
return self.original.num - num
class MetaNumber:
def __init__(self, num):
self.num = num
class FakeAdder:
def add(self, num):
return num + 5
class FakeNumber:
num = 3
class ComponentNumber(components.Componentized):
def __init__(self):
self.num = 0
components.Componentized.__init__(self)
implementer(IMeta)
class ComponentMeta(components.Adapter):
def __init__(self, original):
components.Adapter.__init__(self, original)
self.num = self.original.num
class ComponentAdder(ComponentMeta):
def add(self, num):
self.num += num
return self.num
class ComponentDoubler(ComponentMeta):
def add(self, num):
self.num += (num * 2)
return self.original.num
class IAttrX(Interface):
def x():
pass
class IAttrXX(Interface):
def xx():
pass
@implementer(IAttrX)
class Xcellent:
def x(self):
return 'x!'
@comparable
class DoubleXAdapter:
num = 42
def __init__(self, original):
self.original = original
def xx(self):
return (self.original.x(), self.original.x())
def __cmp__(self, other):
return cmp(self.num, other.num)
class TestMetaInterface(RegistryUsingMixin, unittest.SynchronousTestCase):
def testBasic(self):
components.registerAdapter(MetaAdder, MetaNumber, IMeta)
n = MetaNumber(1)
self.assertEqual(IMeta(n).add(1), 2)
def testComponentizedInteraction(self):
components.registerAdapter(ComponentAdder, ComponentNumber, IMeta)
c = ComponentNumber()
IMeta(c).add(1)
IMeta(c).add(1)
self.assertEqual(IMeta(c).add(1), 3)
def testAdapterWithCmp(self):
# Make sure that a __cmp__ on an adapter doesn't break anything
components.registerAdapter(DoubleXAdapter, IAttrX, IAttrXX)
xx = IAttrXX(Xcellent())
self.assertEqual(('x!', 'x!'), xx.xx())
class RegistrationTestCase(RegistryUsingMixin, unittest.SynchronousTestCase):
"""
Tests for adapter registration.
"""
def _registerAdapterForClassOrInterface(self, original):
"""
Register an adapter with L{components.registerAdapter} for the given
class or interface and verify that the adapter can be looked up with
L{components.getAdapterFactory}.
"""
adapter = lambda o: None
components.registerAdapter(adapter, original, ITest)
self.assertIdentical(
components.getAdapterFactory(original, ITest, None),
adapter)
def test_registerAdapterForClass(self):
"""
Test that an adapter from a class can be registered and then looked
up.
"""
class TheOriginal(object):
pass
return self._registerAdapterForClassOrInterface(TheOriginal)
def test_registerAdapterForInterface(self):
"""
Test that an adapter from an interface can be registered and then
looked up.
"""
return self._registerAdapterForClassOrInterface(ITest2)
def _duplicateAdapterForClassOrInterface(self, original):
"""
Verify that L{components.registerAdapter} raises L{ValueError} if the
from-type/interface and to-interface pair is not unique.
"""
firstAdapter = lambda o: False
secondAdapter = lambda o: True
components.registerAdapter(firstAdapter, original, ITest)
self.assertRaises(
ValueError,
components.registerAdapter,
secondAdapter, original, ITest)
# Make sure that the original adapter is still around as well
self.assertIdentical(
components.getAdapterFactory(original, ITest, None),
firstAdapter)
def test_duplicateAdapterForClass(self):
"""
Test that attempting to register a second adapter from a class
raises the appropriate exception.
"""
class TheOriginal(object):
pass
return self._duplicateAdapterForClassOrInterface(TheOriginal)
def test_duplicateAdapterForInterface(self):
"""
Test that attempting to register a second adapter from an interface
raises the appropriate exception.
"""
return self._duplicateAdapterForClassOrInterface(ITest2)
def _duplicateAdapterForClassOrInterfaceAllowed(self, original):
"""
Verify that when C{components.ALLOW_DUPLICATES} is set to C{True}, new
adapter registrations for a particular from-type/interface and
to-interface pair replace older registrations.
"""
firstAdapter = lambda o: False
secondAdapter = lambda o: True
class TheInterface(Interface):
pass
components.registerAdapter(firstAdapter, original, TheInterface)
components.ALLOW_DUPLICATES = True
try:
components.registerAdapter(secondAdapter, original, TheInterface)
self.assertIdentical(
components.getAdapterFactory(original, TheInterface, None),
secondAdapter)
finally:
components.ALLOW_DUPLICATES = False
# It should be rejected again at this point
self.assertRaises(
ValueError,
components.registerAdapter,
firstAdapter, original, TheInterface)
self.assertIdentical(
components.getAdapterFactory(original, TheInterface, None),
secondAdapter)
def test_duplicateAdapterForClassAllowed(self):
"""
Test that when L{components.ALLOW_DUPLICATES} is set to a true
value, duplicate registrations from classes are allowed to override
the original registration.
"""
class TheOriginal(object):
pass
return self._duplicateAdapterForClassOrInterfaceAllowed(TheOriginal)
def test_duplicateAdapterForInterfaceAllowed(self):
"""
Test that when L{components.ALLOW_DUPLICATES} is set to a true
value, duplicate registrations from interfaces are allowed to
override the original registration.
"""
class TheOriginal(Interface):
pass
return self._duplicateAdapterForClassOrInterfaceAllowed(TheOriginal)
def _multipleInterfacesForClassOrInterface(self, original):
"""
Verify that an adapter can be registered for multiple to-interfaces at a
time.
"""
adapter = lambda o: None
components.registerAdapter(adapter, original, ITest, ITest2)
self.assertIdentical(
components.getAdapterFactory(original, ITest, None), adapter)
self.assertIdentical(
components.getAdapterFactory(original, ITest2, None), adapter)
def test_multipleInterfacesForClass(self):
"""
Test the registration of an adapter from a class to several
interfaces at once.
"""
class TheOriginal(object):
pass
return self._multipleInterfacesForClassOrInterface(TheOriginal)
def test_multipleInterfacesForInterface(self):
"""
Test the registration of an adapter from an interface to several
interfaces at once.
"""
return self._multipleInterfacesForClassOrInterface(ITest3)
def _subclassAdapterRegistrationForClassOrInterface(self, original):
"""
Verify that a new adapter can be registered for a particular
to-interface from a subclass of a type or interface which already has an
adapter registered to that interface and that the subclass adapter takes
precedence over the base class adapter.
"""
firstAdapter = lambda o: True
secondAdapter = lambda o: False
class TheSubclass(original):
pass
components.registerAdapter(firstAdapter, original, ITest)
components.registerAdapter(secondAdapter, TheSubclass, ITest)
self.assertIdentical(
components.getAdapterFactory(original, ITest, None),
firstAdapter)
self.assertIdentical(
components.getAdapterFactory(TheSubclass, ITest, None),
secondAdapter)
def test_subclassAdapterRegistrationForClass(self):
"""
Test that an adapter to a particular interface can be registered
from both a class and its subclass.
"""
class TheOriginal(object):
pass
return self._subclassAdapterRegistrationForClassOrInterface(TheOriginal)
def test_subclassAdapterRegistrationForInterface(self):
"""
Test that an adapter to a particular interface can be registered
from both an interface and its subclass.
"""
return self._subclassAdapterRegistrationForClassOrInterface(ITest2)
class IProxiedInterface(Interface):
"""
An interface class for use by L{proxyForInterface}.
"""
ifaceAttribute = Attribute("""
An example declared attribute, which should be proxied.""")
def yay(*a, **kw):
"""
A sample method which should be proxied.
"""
class IProxiedSubInterface(IProxiedInterface):
"""
An interface that derives from another for use with L{proxyForInterface}.
"""
def boo(self):
"""
A different sample method which should be proxied.
"""
@implementer(IProxiedInterface)
class Yayable(object):
"""
A provider of L{IProxiedInterface} which increments a counter for
every call to C{yay}.
@ivar yays: The number of times C{yay} has been called.
"""
def __init__(self):
self.yays = 0
self.yayArgs = []
def yay(self, *a, **kw):
"""
Increment C{self.yays}.
"""
self.yays += 1
self.yayArgs.append((a, kw))
return self.yays
@implementer(IProxiedSubInterface)
class Booable(object):
"""
An implementation of IProxiedSubInterface
"""
yayed = False
booed = False
def yay(self):
"""
Mark the fact that 'yay' has been called.
"""
self.yayed = True
def boo(self):
"""
Mark the fact that 'boo' has been called.1
"""
self.booed = True
class IMultipleMethods(Interface):
"""
An interface with multiple methods.
"""
def methodOne():
"""
The first method. Should return 1.
"""
def methodTwo():
"""
The second method. Should return 2.
"""
class MultipleMethodImplementor(object):
"""
A precise implementation of L{IMultipleMethods}.
"""
def methodOne(self):
"""
@return: 1
"""
return 1
def methodTwo(self):
"""
@return: 2
"""
return 2
class ProxyForInterfaceTests(unittest.SynchronousTestCase):
"""
Tests for L{proxyForInterface}.
"""
def test_original(self):
"""
Proxy objects should have an C{original} attribute which refers to the
original object passed to the constructor.
"""
original = object()
proxy = proxyForInterface(IProxiedInterface)(original)
self.assertIdentical(proxy.original, original)
def test_proxyMethod(self):
"""
The class created from L{proxyForInterface} passes methods on an
interface to the object which is passed to its constructor.
"""
klass = proxyForInterface(IProxiedInterface)
yayable = Yayable()
proxy = klass(yayable)
proxy.yay()
self.assertEqual(proxy.yay(), 2)
self.assertEqual(yayable.yays, 2)
def test_proxyAttribute(self):
"""
Proxy objects should proxy declared attributes, but not other
attributes.
"""
yayable = Yayable()
yayable.ifaceAttribute = object()
proxy = proxyForInterface(IProxiedInterface)(yayable)
self.assertIdentical(proxy.ifaceAttribute, yayable.ifaceAttribute)
self.assertRaises(AttributeError, lambda: proxy.yays)
def test_proxySetAttribute(self):
"""
The attributes that proxy objects proxy should be assignable and affect
the original object.
"""
yayable = Yayable()
proxy = proxyForInterface(IProxiedInterface)(yayable)
thingy = object()
proxy.ifaceAttribute = thingy
self.assertIdentical(yayable.ifaceAttribute, thingy)
def test_proxyDeleteAttribute(self):
"""
The attributes that proxy objects proxy should be deletable and affect
the original object.
"""
yayable = Yayable()
yayable.ifaceAttribute = None
proxy = proxyForInterface(IProxiedInterface)(yayable)
del proxy.ifaceAttribute
self.assertFalse(hasattr(yayable, 'ifaceAttribute'))
def test_multipleMethods(self):
"""
[Regression test] The proxy should send its method calls to the correct
method, not the incorrect one.
"""
multi = MultipleMethodImplementor()
proxy = proxyForInterface(IMultipleMethods)(multi)
self.assertEqual(proxy.methodOne(), 1)
self.assertEqual(proxy.methodTwo(), 2)
def test_subclassing(self):
"""
It is possible to subclass the result of L{proxyForInterface}.
"""
class SpecializedProxy(proxyForInterface(IProxiedInterface)):
"""
A specialized proxy which can decrement the number of yays.
"""
def boo(self):
"""
Decrement the number of yays.
"""
self.original.yays -= 1
yayable = Yayable()
special = SpecializedProxy(yayable)
self.assertEqual(yayable.yays, 0)
special.boo()
self.assertEqual(yayable.yays, -1)
def test_proxyName(self):
"""
The name of a proxy class indicates which interface it proxies.
"""
proxy = proxyForInterface(IProxiedInterface)
self.assertEqual(
proxy.__name__,
"(Proxy for "
"twisted.python.test.test_components.IProxiedInterface)")
def test_implements(self):
"""
The resulting proxy implements the interface that it proxies.
"""
proxy = proxyForInterface(IProxiedInterface)
self.assertTrue(IProxiedInterface.implementedBy(proxy))
def test_proxyDescriptorGet(self):
"""
_ProxyDescriptor's __get__ method should return the appropriate
attribute of its argument's 'original' attribute if it is invoked with
an object. If it is invoked with None, it should return a false
class-method emulator instead.
For some reason, Python's documentation recommends to define
descriptors' __get__ methods with the 'type' parameter as optional,
despite the fact that Python itself never actually calls the descriptor
that way. This is probably do to support 'foo.__get__(bar)' as an
idiom. Let's make sure that the behavior is correct. Since we don't
actually use the 'type' argument at all, this test calls it the
idiomatic way to ensure that signature works; test_proxyInheritance
verifies the how-Python-actually-calls-it signature.
"""
class Sample:
called = False
def hello(self):
self.called = True
fakeProxy = Sample()
testObject = Sample()
fakeProxy.original = testObject
pd = components._ProxyDescriptor("hello", "original")
self.assertEqual(pd.__get__(fakeProxy), testObject.hello)
fakeClassMethod = pd.__get__(None)
fakeClassMethod(fakeProxy)
self.failUnless(testObject.called)
def test_proxyInheritance(self):
"""
Subclasses of the class returned from L{proxyForInterface} should be
able to upcall methods by reference to their superclass, as any normal
Python class can.
"""
class YayableWrapper(proxyForInterface(IProxiedInterface)):
"""
This class does not override any functionality.
"""
class EnhancedWrapper(YayableWrapper):
"""
This class overrides the 'yay' method.
"""
wrappedYays = 1
def yay(self, *a, **k):
self.wrappedYays += 1
return YayableWrapper.yay(self, *a, **k) + 7
yayable = Yayable()
wrapper = EnhancedWrapper(yayable)
self.assertEqual(wrapper.yay(3, 4, x=5, y=6), 8)
self.assertEqual(yayable.yayArgs,
[((3, 4), dict(x=5, y=6))])
def test_interfaceInheritance(self):
"""
Proxies of subinterfaces generated with proxyForInterface should allow
access to attributes of both the child and the base interfaces.
"""
proxyClass = proxyForInterface(IProxiedSubInterface)
booable = Booable()
proxy = proxyClass(booable)
proxy.yay()
proxy.boo()
self.failUnless(booable.yayed)
self.failUnless(booable.booed)
def test_attributeCustomization(self):
"""
The original attribute name can be customized via the
C{originalAttribute} argument of L{proxyForInterface}: the attribute
should change, but the methods of the original object should still be
callable, and the attributes still accessible.
"""
yayable = Yayable()
yayable.ifaceAttribute = object()
proxy = proxyForInterface(
IProxiedInterface, originalAttribute='foo')(yayable)
self.assertIdentical(proxy.foo, yayable)
# Check the behavior
self.assertEqual(proxy.yay(), 1)
self.assertIdentical(proxy.ifaceAttribute, yayable.ifaceAttribute)
thingy = object()
proxy.ifaceAttribute = thingy
self.assertIdentical(yayable.ifaceAttribute, thingy)
del proxy.ifaceAttribute
self.assertFalse(hasattr(yayable, 'ifaceAttribute'))

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,917 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for Twisted's deprecation framework, L{twisted.python.deprecate}.
"""
from __future__ import division, absolute_import
import sys, types, warnings, inspect
from os.path import normcase
from warnings import simplefilter, catch_warnings
try:
from importlib import invalidate_caches
except ImportError:
invalidate_caches = None
from twisted.python import deprecate
from twisted.python.deprecate import _getDeprecationWarningString
from twisted.python.deprecate import DEPRECATION_WARNING_FORMAT
from twisted.python.deprecate import (
getDeprecationWarningString,
deprecated, _appendToDocstring, _getDeprecationDocstring,
_fullyQualifiedName as fullyQualifiedName,
_passed, _mutuallyExclusiveArguments
)
from twisted.python.versions import Version
from twisted.python.filepath import FilePath
from twisted.python.test import deprecatedattributes
from twisted.python.test.modules_helpers import TwistedModulesMixin
from twisted.trial.unittest import SynchronousTestCase
# Note that various tests in this module require manual encoding of paths to
# utf-8. This can be fixed once FilePath supports Unicode; see #2366, #4736,
# #5203.
class _MockDeprecatedAttribute(object):
"""
Mock of L{twisted.python.deprecate._DeprecatedAttribute}.
@ivar value: The value of the attribute.
"""
def __init__(self, value):
self.value = value
def get(self):
"""
Get a known value.
"""
return self.value
class ModuleProxyTests(SynchronousTestCase):
"""
Tests for L{twisted.python.deprecate._ModuleProxy}, which proxies
access to module-level attributes, intercepting access to deprecated
attributes and passing through access to normal attributes.
"""
def _makeProxy(self, **attrs):
"""
Create a temporary module proxy object.
@param **kw: Attributes to initialise on the temporary module object
@rtype: L{twistd.python.deprecate._ModuleProxy}
"""
mod = types.ModuleType('foo')
for key, value in attrs.items():
setattr(mod, key, value)
return deprecate._ModuleProxy(mod)
def test_getattrPassthrough(self):
"""
Getting a normal attribute on a L{twisted.python.deprecate._ModuleProxy}
retrieves the underlying attribute's value, and raises C{AttributeError}
if a non-existant attribute is accessed.
"""
proxy = self._makeProxy(SOME_ATTRIBUTE='hello')
self.assertIdentical(proxy.SOME_ATTRIBUTE, 'hello')
self.assertRaises(AttributeError, getattr, proxy, 'DOES_NOT_EXIST')
def test_getattrIntercept(self):
"""
Getting an attribute marked as being deprecated on
L{twisted.python.deprecate._ModuleProxy} results in calling the
deprecated wrapper's C{get} method.
"""
proxy = self._makeProxy()
_deprecatedAttributes = object.__getattribute__(
proxy, '_deprecatedAttributes')
_deprecatedAttributes['foo'] = _MockDeprecatedAttribute(42)
self.assertEqual(proxy.foo, 42)
def test_privateAttributes(self):
"""
Private attributes of L{twisted.python.deprecate._ModuleProxy} are
inaccessible when regular attribute access is used.
"""
proxy = self._makeProxy()
self.assertRaises(AttributeError, getattr, proxy, '_module')
self.assertRaises(
AttributeError, getattr, proxy, '_deprecatedAttributes')
def test_setattr(self):
"""
Setting attributes on L{twisted.python.deprecate._ModuleProxy} proxies
them through to the wrapped module.
"""
proxy = self._makeProxy()
proxy._module = 1
self.assertNotEquals(object.__getattribute__(proxy, '_module'), 1)
self.assertEqual(proxy._module, 1)
def test_repr(self):
"""
L{twisted.python.deprecated._ModuleProxy.__repr__} produces a string
containing the proxy type and a representation of the wrapped module
object.
"""
proxy = self._makeProxy()
realModule = object.__getattribute__(proxy, '_module')
self.assertEqual(
repr(proxy), '<%s module=%r>' % (type(proxy).__name__, realModule))
class DeprecatedAttributeTests(SynchronousTestCase):
"""
Tests for L{twisted.python.deprecate._DeprecatedAttribute} and
L{twisted.python.deprecate.deprecatedModuleAttribute}, which issue
warnings for deprecated module-level attributes.
"""
def setUp(self):
self.version = deprecatedattributes.version
self.message = deprecatedattributes.message
self._testModuleName = __name__ + '.foo'
def _getWarningString(self, attr):
"""
Create the warning string used by deprecated attributes.
"""
return _getDeprecationWarningString(
deprecatedattributes.__name__ + '.' + attr,
deprecatedattributes.version,
DEPRECATION_WARNING_FORMAT + ': ' + deprecatedattributes.message)
def test_deprecatedAttributeHelper(self):
"""
L{twisted.python.deprecate._DeprecatedAttribute} correctly sets its
__name__ to match that of the deprecated attribute and emits a warning
when the original attribute value is accessed.
"""
name = 'ANOTHER_DEPRECATED_ATTRIBUTE'
setattr(deprecatedattributes, name, 42)
attr = deprecate._DeprecatedAttribute(
deprecatedattributes, name, self.version, self.message)
self.assertEqual(attr.__name__, name)
# Since we're accessing the value getter directly, as opposed to via
# the module proxy, we need to match the warning's stack level.
def addStackLevel():
attr.get()
# Access the deprecated attribute.
addStackLevel()
warningsShown = self.flushWarnings([
self.test_deprecatedAttributeHelper])
self.assertIdentical(warningsShown[0]['category'], DeprecationWarning)
self.assertEqual(
warningsShown[0]['message'],
self._getWarningString(name))
self.assertEqual(len(warningsShown), 1)
def test_deprecatedAttribute(self):
"""
L{twisted.python.deprecate.deprecatedModuleAttribute} wraps a
module-level attribute in an object that emits a deprecation warning
when it is accessed the first time only, while leaving other unrelated
attributes alone.
"""
# Accessing non-deprecated attributes does not issue a warning.
deprecatedattributes.ANOTHER_ATTRIBUTE
warningsShown = self.flushWarnings([self.test_deprecatedAttribute])
self.assertEqual(len(warningsShown), 0)
name = 'DEPRECATED_ATTRIBUTE'
# Access the deprecated attribute. This uses getattr to avoid repeating
# the attribute name.
getattr(deprecatedattributes, name)
warningsShown = self.flushWarnings([self.test_deprecatedAttribute])
self.assertEqual(len(warningsShown), 1)
self.assertIdentical(warningsShown[0]['category'], DeprecationWarning)
self.assertEqual(
warningsShown[0]['message'],
self._getWarningString(name))
def test_wrappedModule(self):
"""
Deprecating an attribute in a module replaces and wraps that module
instance, in C{sys.modules}, with a
L{twisted.python.deprecate._ModuleProxy} instance but only if it hasn't
already been wrapped.
"""
sys.modules[self._testModuleName] = mod = types.ModuleType('foo')
self.addCleanup(sys.modules.pop, self._testModuleName)
setattr(mod, 'first', 1)
setattr(mod, 'second', 2)
deprecate.deprecatedModuleAttribute(
Version('Twisted', 8, 0, 0),
'message',
self._testModuleName,
'first')
proxy = sys.modules[self._testModuleName]
self.assertNotEqual(proxy, mod)
deprecate.deprecatedModuleAttribute(
Version('Twisted', 8, 0, 0),
'message',
self._testModuleName,
'second')
self.assertIdentical(proxy, sys.modules[self._testModuleName])
class ImportedModuleAttributeTests(TwistedModulesMixin, SynchronousTestCase):
"""
Tests for L{deprecatedModuleAttribute} which involve loading a module via
'import'.
"""
_packageInit = """\
from twisted.python.deprecate import deprecatedModuleAttribute
from twisted.python.versions import Version
deprecatedModuleAttribute(
Version('Package', 1, 2, 3), 'message', __name__, 'module')
"""
def pathEntryTree(self, tree):
"""
Create some files in a hierarchy, based on a dictionary describing those
files. The resulting hierarchy will be placed onto sys.path for the
duration of the test.
@param tree: A dictionary representing a directory structure. Keys are
strings, representing filenames, dictionary values represent
directories, string values represent file contents.
@return: another dictionary similar to the input, with file content
strings replaced with L{FilePath} objects pointing at where those
contents are now stored.
"""
def makeSomeFiles(pathobj, dirdict):
pathdict = {}
for (key, value) in dirdict.items():
child = pathobj.child(key)
if isinstance(value, bytes):
pathdict[key] = child
child.setContent(value)
elif isinstance(value, dict):
child.createDirectory()
pathdict[key] = makeSomeFiles(child, value)
else:
raise ValueError("only strings and dicts allowed as values")
return pathdict
base = FilePath(self.mktemp().encode("utf-8"))
base.makedirs()
result = makeSomeFiles(base, tree)
# On Python 3, sys.path cannot include byte paths:
self.replaceSysPath([base.path.decode("utf-8")] + sys.path)
self.replaceSysModules(sys.modules.copy())
return result
def simpleModuleEntry(self):
"""
Add a sample module and package to the path, returning a L{FilePath}
pointing at the module which will be loadable as C{package.module}.
"""
paths = self.pathEntryTree(
{b"package": {b"__init__.py": self._packageInit.encode("utf-8"),
b"module.py": b""}})
return paths[b'package'][b'module.py']
def checkOneWarning(self, modulePath):
"""
Verification logic for L{test_deprecatedModule}.
"""
from package import module
self.assertEqual(FilePath(module.__file__.encode("utf-8")),
modulePath)
emitted = self.flushWarnings([self.checkOneWarning])
self.assertEqual(len(emitted), 1)
self.assertEqual(emitted[0]['message'],
'package.module was deprecated in Package 1.2.3: '
'message')
self.assertEqual(emitted[0]['category'], DeprecationWarning)
def test_deprecatedModule(self):
"""
If L{deprecatedModuleAttribute} is used to deprecate a module attribute
of a package, only one deprecation warning is emitted when the
deprecated module is imported.
"""
self.checkOneWarning(self.simpleModuleEntry())
def test_deprecatedModuleMultipleTimes(self):
"""
If L{deprecatedModuleAttribute} is used to deprecate a module attribute
of a package, only one deprecation warning is emitted when the
deprecated module is subsequently imported.
"""
mp = self.simpleModuleEntry()
# The first time, the code needs to be loaded.
self.checkOneWarning(mp)
# The second time, things are slightly different; the object's already
# in the namespace.
self.checkOneWarning(mp)
# The third and fourth times, things things should all be exactly the
# same, but this is a sanity check to make sure the implementation isn't
# special casing the second time. Also, putting these cases into a loop
# means that the stack will be identical, to make sure that the
# implementation doesn't rely too much on stack-crawling.
for x in range(2):
self.checkOneWarning(mp)
class WarnAboutFunctionTests(SynchronousTestCase):
"""
Tests for L{twisted.python.deprecate.warnAboutFunction} which allows the
callers of a function to issue a C{DeprecationWarning} about that function.
"""
def setUp(self):
"""
Create a file that will have known line numbers when emitting warnings.
"""
self.package = FilePath(self.mktemp().encode("utf-8")
).child(b'twisted_private_helper')
self.package.makedirs()
self.package.child(b'__init__.py').setContent(b'')
self.package.child(b'module.py').setContent(b'''
"A module string"
from twisted.python import deprecate
def testFunction():
"A doc string"
a = 1 + 2
return a
def callTestFunction():
b = testFunction()
if b == 3:
deprecate.warnAboutFunction(testFunction, "A Warning String")
''')
# Python 3 doesn't accept bytes in sys.path:
packagePath = self.package.parent().path.decode("utf-8")
sys.path.insert(0, packagePath)
self.addCleanup(sys.path.remove, packagePath)
modules = sys.modules.copy()
self.addCleanup(
lambda: (sys.modules.clear(), sys.modules.update(modules)))
def test_warning(self):
"""
L{deprecate.warnAboutFunction} emits a warning the file and line number
of which point to the beginning of the implementation of the function
passed to it.
"""
def aFunc():
pass
deprecate.warnAboutFunction(aFunc, 'A Warning Message')
warningsShown = self.flushWarnings()
filename = __file__
if filename.lower().endswith('.pyc'):
filename = filename[:-1]
self.assertSamePath(
FilePath(warningsShown[0]["filename"]), FilePath(filename))
self.assertEqual(warningsShown[0]["message"], "A Warning Message")
def test_warningLineNumber(self):
"""
L{deprecate.warnAboutFunction} emits a C{DeprecationWarning} with the
number of a line within the implementation of the function passed to it.
"""
from twisted_private_helper import module
module.callTestFunction()
warningsShown = self.flushWarnings()
self.assertSamePath(
FilePath(warningsShown[0]["filename"].encode("utf-8")),
self.package.sibling(b'twisted_private_helper').child(b'module.py'))
# Line number 9 is the last line in the testFunction in the helper
# module.
self.assertEqual(warningsShown[0]["lineno"], 9)
self.assertEqual(warningsShown[0]["message"], "A Warning String")
self.assertEqual(len(warningsShown), 1)
def assertSamePath(self, first, second):
"""
Assert that the two paths are the same, considering case normalization
appropriate for the current platform.
@type first: L{FilePath}
@type second: L{FilePath}
@raise C{self.failureType}: If the paths are not the same.
"""
self.assertTrue(
normcase(first.path) == normcase(second.path),
"%r != %r" % (first, second))
def test_renamedFile(self):
"""
Even if the implementation of a deprecated function is moved around on
the filesystem, the line number in the warning emitted by
L{deprecate.warnAboutFunction} points to a line in the implementation of
the deprecated function.
"""
from twisted_private_helper import module
# Clean up the state resulting from that import; we're not going to use
# this module, so it should go away.
del sys.modules['twisted_private_helper']
del sys.modules[module.__name__]
# Rename the source directory
self.package.moveTo(self.package.sibling(b'twisted_renamed_helper'))
# Make sure importlib notices we've changed importable packages:
if invalidate_caches:
invalidate_caches()
# Import the newly renamed version
from twisted_renamed_helper import module
self.addCleanup(sys.modules.pop, 'twisted_renamed_helper')
self.addCleanup(sys.modules.pop, module.__name__)
module.callTestFunction()
warningsShown = self.flushWarnings()
warnedPath = FilePath(warningsShown[0]["filename"].encode("utf-8"))
expectedPath = self.package.sibling(
b'twisted_renamed_helper').child(b'module.py')
self.assertSamePath(warnedPath, expectedPath)
self.assertEqual(warningsShown[0]["lineno"], 9)
self.assertEqual(warningsShown[0]["message"], "A Warning String")
self.assertEqual(len(warningsShown), 1)
def test_filteredWarning(self):
"""
L{deprecate.warnAboutFunction} emits a warning that will be filtered if
L{warnings.filterwarning} is called with the module name of the
deprecated function.
"""
# Clean up anything *else* that might spuriously filter out the warning,
# such as the "always" simplefilter set up by unittest._collectWarnings.
# We'll also rely on trial to restore the original filters afterwards.
del warnings.filters[:]
warnings.filterwarnings(
action="ignore", module="twisted_private_helper")
from twisted_private_helper import module
module.callTestFunction()
warningsShown = self.flushWarnings()
self.assertEqual(len(warningsShown), 0)
def test_filteredOnceWarning(self):
"""
L{deprecate.warnAboutFunction} emits a warning that will be filtered
once if L{warnings.filterwarning} is called with the module name of the
deprecated function and an action of once.
"""
# Clean up anything *else* that might spuriously filter out the warning,
# such as the "always" simplefilter set up by unittest._collectWarnings.
# We'll also rely on trial to restore the original filters afterwards.
del warnings.filters[:]
warnings.filterwarnings(
action="module", module="twisted_private_helper")
from twisted_private_helper import module
module.callTestFunction()
module.callTestFunction()
warningsShown = self.flushWarnings()
self.assertEqual(len(warningsShown), 1)
message = warningsShown[0]['message']
category = warningsShown[0]['category']
filename = warningsShown[0]['filename']
lineno = warningsShown[0]['lineno']
msg = warnings.formatwarning(message, category, filename, lineno)
self.assertTrue(
msg.endswith("module.py:9: DeprecationWarning: A Warning String\n"
" return a\n"),
"Unexpected warning string: %r" % (msg,))
def dummyCallable():
"""
Do nothing.
This is used to test the deprecation decorators.
"""
def dummyReplacementMethod():
"""
Do nothing.
This is used to test the replacement parameter to L{deprecated}.
"""
class TestDeprecationWarnings(SynchronousTestCase):
def test_getDeprecationWarningString(self):
"""
L{getDeprecationWarningString} returns a string that tells us that a
callable was deprecated at a certain released version of Twisted.
"""
version = Version('Twisted', 8, 0, 0)
self.assertEqual(
getDeprecationWarningString(self.test_getDeprecationWarningString,
version),
"%s.TestDeprecationWarnings.test_getDeprecationWarningString "
"was deprecated in Twisted 8.0.0" % (__name__,))
def test_getDeprecationWarningStringWithFormat(self):
"""
L{getDeprecationWarningString} returns a string that tells us that a
callable was deprecated at a certain released version of Twisted, with
a message containing additional information about the deprecation.
"""
version = Version('Twisted', 8, 0, 0)
format = DEPRECATION_WARNING_FORMAT + ': This is a message'
self.assertEqual(
getDeprecationWarningString(self.test_getDeprecationWarningString,
version, format),
'%s.TestDeprecationWarnings.test_getDeprecationWarningString was '
'deprecated in Twisted 8.0.0: This is a message' % (__name__,))
def test_deprecateEmitsWarning(self):
"""
Decorating a callable with L{deprecated} emits a warning.
"""
version = Version('Twisted', 8, 0, 0)
dummy = deprecated(version)(dummyCallable)
def addStackLevel():
dummy()
with catch_warnings(record=True) as caught:
simplefilter("always")
addStackLevel()
self.assertEqual(caught[0].category, DeprecationWarning)
self.assertEqual(str(caught[0].message), getDeprecationWarningString(dummyCallable, version))
# rstrip in case .pyc/.pyo
self.assertEqual(caught[0].filename.rstrip('co'), __file__.rstrip('co'))
def test_deprecatedPreservesName(self):
"""
The decorated function has the same name as the original.
"""
version = Version('Twisted', 8, 0, 0)
dummy = deprecated(version)(dummyCallable)
self.assertEqual(dummyCallable.__name__, dummy.__name__)
self.assertEqual(fullyQualifiedName(dummyCallable),
fullyQualifiedName(dummy))
def test_getDeprecationDocstring(self):
"""
L{_getDeprecationDocstring} returns a note about the deprecation to go
into a docstring.
"""
version = Version('Twisted', 8, 0, 0)
self.assertEqual(
"Deprecated in Twisted 8.0.0.",
_getDeprecationDocstring(version, ''))
def test_deprecatedUpdatesDocstring(self):
"""
The docstring of the deprecated function is appended with information
about the deprecation.
"""
def localDummyCallable():
"""
Do nothing.
This is used to test the deprecation decorators.
"""
version = Version('Twisted', 8, 0, 0)
dummy = deprecated(version)(localDummyCallable)
_appendToDocstring(
localDummyCallable,
_getDeprecationDocstring(version, ''))
self.assertEqual(localDummyCallable.__doc__, dummy.__doc__)
def test_versionMetadata(self):
"""
Deprecating a function adds version information to the decorated
version of that function.
"""
version = Version('Twisted', 8, 0, 0)
dummy = deprecated(version)(dummyCallable)
self.assertEqual(version, dummy.deprecatedVersion)
def test_getDeprecationWarningStringReplacement(self):
"""
L{getDeprecationWarningString} takes an additional replacement parameter
that can be used to add information to the deprecation. If the
replacement parameter is a string, it will be interpolated directly into
the result.
"""
version = Version('Twisted', 8, 0, 0)
warningString = getDeprecationWarningString(
self.test_getDeprecationWarningString, version,
replacement="something.foobar")
self.assertEqual(
warningString,
"%s was deprecated in Twisted 8.0.0; please use something.foobar "
"instead" % (
fullyQualifiedName(self.test_getDeprecationWarningString),))
def test_getDeprecationWarningStringReplacementWithCallable(self):
"""
L{getDeprecationWarningString} takes an additional replacement parameter
that can be used to add information to the deprecation. If the
replacement parameter is a callable, its fully qualified name will be
interpolated into the result.
"""
version = Version('Twisted', 8, 0, 0)
warningString = getDeprecationWarningString(
self.test_getDeprecationWarningString, version,
replacement=dummyReplacementMethod)
self.assertEqual(
warningString,
"%s was deprecated in Twisted 8.0.0; please use "
"%s.dummyReplacementMethod instead" % (
fullyQualifiedName(self.test_getDeprecationWarningString),
__name__))
def test_deprecatedReplacement(self):
"""
L{deprecated} takes an additional replacement parameter that can be used
to indicate the new, non-deprecated method developers should use. If
the replacement parameter is a string, it will be interpolated directly
into the warning message.
"""
version = Version('Twisted', 8, 0, 0)
dummy = deprecated(version, "something.foobar")(dummyCallable)
self.assertEqual(dummy.__doc__,
"\n"
" Do nothing.\n\n"
" This is used to test the deprecation decorators.\n\n"
" Deprecated in Twisted 8.0.0; please use "
"something.foobar"
" instead.\n"
" ")
def test_deprecatedReplacementWithCallable(self):
"""
L{deprecated} takes an additional replacement parameter that can be used
to indicate the new, non-deprecated method developers should use. If
the replacement parameter is a callable, its fully qualified name will
be interpolated into the warning message.
"""
version = Version('Twisted', 8, 0, 0)
decorator = deprecated(version, replacement=dummyReplacementMethod)
dummy = decorator(dummyCallable)
self.assertEqual(dummy.__doc__,
"\n"
" Do nothing.\n\n"
" This is used to test the deprecation decorators.\n\n"
" Deprecated in Twisted 8.0.0; please use "
"%s.dummyReplacementMethod instead.\n"
" " % (__name__,))
class TestAppendToDocstring(SynchronousTestCase):
"""
Test the _appendToDocstring function.
_appendToDocstring is used to add text to a docstring.
"""
def test_appendToEmptyDocstring(self):
"""
Appending to an empty docstring simply replaces the docstring.
"""
def noDocstring():
pass
_appendToDocstring(noDocstring, "Appended text.")
self.assertEqual("Appended text.", noDocstring.__doc__)
def test_appendToSingleLineDocstring(self):
"""
Appending to a single line docstring places the message on a new line,
with a blank line separating it from the rest of the docstring.
The docstring ends with a newline, conforming to Twisted and PEP 8
standards. Unfortunately, the indentation is incorrect, since the
existing docstring doesn't have enough info to help us indent
properly.
"""
def singleLineDocstring():
"""This doesn't comply with standards, but is here for a test."""
_appendToDocstring(singleLineDocstring, "Appended text.")
self.assertEqual(
["This doesn't comply with standards, but is here for a test.",
"",
"Appended text."],
singleLineDocstring.__doc__.splitlines())
self.assertTrue(singleLineDocstring.__doc__.endswith('\n'))
def test_appendToMultilineDocstring(self):
"""
Appending to a multi-line docstring places the messade on a new line,
with a blank line separating it from the rest of the docstring.
Because we have multiple lines, we have enough information to do
indentation.
"""
def multiLineDocstring():
"""
This is a multi-line docstring.
"""
def expectedDocstring():
"""
This is a multi-line docstring.
Appended text.
"""
_appendToDocstring(multiLineDocstring, "Appended text.")
self.assertEqual(
expectedDocstring.__doc__, multiLineDocstring.__doc__)
class MutualArgumentExclusionTests(SynchronousTestCase):
"""
Tests for L{mutuallyExclusiveArguments}.
"""
def checkPassed(self, func, *args, **kw):
"""
Test an invocation of L{passed} with the given function, arguments, and
keyword arguments.
@param func: A function whose argspec to pass to L{_passed}.
@type func: A callable.
@param args: The arguments which could be passed to L{func}.
@param kw: The keyword arguments which could be passed to L{func}.
@return: L{_passed}'s return value
@rtype: L{dict}
"""
return _passed(inspect.getargspec(func), args, kw)
def test_passed_simplePositional(self):
"""
L{passed} identifies the arguments passed by a simple
positional test.
"""
def func(a, b):
pass
self.assertEqual(self.checkPassed(func, 1, 2), dict(a=1, b=2))
def test_passed_tooManyArgs(self):
"""
L{passed} raises a L{TypeError} if too many arguments are
passed.
"""
def func(a, b):
pass
self.assertRaises(TypeError, self.checkPassed, func, 1, 2, 3)
def test_passed_doublePassKeyword(self):
"""
L{passed} raises a L{TypeError} if a argument is passed both
positionally and by keyword.
"""
def func(a):
pass
self.assertRaises(TypeError, self.checkPassed, func, 1, a=2)
def test_passed_unspecifiedKeyword(self):
"""
L{passed} raises a L{TypeError} if a keyword argument not
present in the function's declaration is passed.
"""
def func(a):
pass
self.assertRaises(TypeError, self.checkPassed, func, 1, z=2)
def test_passed_star(self):
"""
L{passed} places additional positional arguments into a tuple
under the name of the star argument.
"""
def func(a, *b):
pass
self.assertEqual(self.checkPassed(func, 1, 2, 3),
dict(a=1, b=(2, 3)))
def test_passed_starStar(self):
"""
Additional keyword arguments are passed as a dict to the star star
keyword argument.
"""
def func(a, **b):
pass
self.assertEqual(self.checkPassed(func, 1, x=2, y=3, z=4),
dict(a=1, b=dict(x=2, y=3, z=4)))
def test_passed_noDefaultValues(self):
"""
The results of L{passed} only include arguments explicitly
passed, not default values.
"""
def func(a, b, c=1, d=2, e=3):
pass
self.assertEqual(self.checkPassed(func, 1, 2, e=7),
dict(a=1, b=2, e=7))
def test_mutualExclusionPrimeDirective(self):
"""
L{mutuallyExclusiveArguments} does not interfere in its
decoratee's operation, either its receipt of arguments or its return
value.
"""
@_mutuallyExclusiveArguments([('a', 'b')])
def func(x, y, a=3, b=4):
return x + y + a + b
self.assertEqual(func(1, 2), 10)
self.assertEqual(func(1, 2, 7), 14)
self.assertEqual(func(1, 2, b=7), 13)
def test_mutualExclusionExcludesByKeyword(self):
"""
L{mutuallyExclusiveArguments} raises a L{TypeError}n if its
decoratee is passed a pair of mutually exclusive arguments.
"""
@_mutuallyExclusiveArguments([['a', 'b']])
def func(a=3, b=4):
return a + b
self.assertRaises(TypeError, func, a=3, b=4)

View file

@ -0,0 +1,454 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for parts of our release automation system.
"""
import os
import sys
from distutils.core import Distribution
from twisted.trial.unittest import TestCase
from twisted.python import dist
from twisted.python.dist import (get_setup_args, ConditionalExtension,
build_scripts_twisted)
from twisted.python.filepath import FilePath
class SetupTest(TestCase):
"""
Tests for L{get_setup_args}.
"""
def test_conditionalExtensions(self):
"""
Passing C{conditionalExtensions} as a list of L{ConditionalExtension}
objects to get_setup_args inserts a custom build_ext into the result
which knows how to check whether they should be built.
"""
good_ext = ConditionalExtension("whatever", ["whatever.c"],
condition=lambda b: True)
bad_ext = ConditionalExtension("whatever", ["whatever.c"],
condition=lambda b: False)
args = get_setup_args(conditionalExtensions=[good_ext, bad_ext])
# ext_modules should be set even though it's not used. See comment
# in get_setup_args
self.assertEqual(args["ext_modules"], [good_ext, bad_ext])
cmdclass = args["cmdclass"]
build_ext = cmdclass["build_ext"]
builder = build_ext(Distribution())
builder.prepare_extensions()
self.assertEqual(builder.extensions, [good_ext])
def test_win32Definition(self):
"""
When building on Windows NT, the WIN32 macro will be defined as 1.
"""
ext = ConditionalExtension("whatever", ["whatever.c"],
define_macros=[("whatever", 2)])
args = get_setup_args(conditionalExtensions=[ext])
builder = args["cmdclass"]["build_ext"](Distribution())
self.patch(os, "name", "nt")
builder.prepare_extensions()
self.assertEqual(ext.define_macros, [("whatever", 2), ("WIN32", 1)])
class GetExtensionsTest(TestCase):
"""
Tests for L{dist.getExtensions}.
"""
setupTemplate = (
"from twisted.python.dist import ConditionalExtension\n"
"extensions = [\n"
" ConditionalExtension(\n"
" '%s', ['twisted/some/thing.c'],\n"
" condition=lambda builder: True)\n"
" ]\n")
def setUp(self):
self.basedir = FilePath(self.mktemp()).child("twisted")
self.basedir.makedirs()
self.addCleanup(os.chdir, os.getcwd())
os.chdir(self.basedir.parent().path)
def writeSetup(self, name, *path):
"""
Write out a C{setup.py} file to a location determined by
L{self.basedir} and L{path}. L{self.setupTemplate} is used to
generate its contents.
"""
outdir = self.basedir.descendant(path)
outdir.makedirs()
setup = outdir.child("setup.py")
setup.setContent(self.setupTemplate % (name,))
def writeEmptySetup(self, *path):
"""
Write out an empty C{setup.py} file to a location determined by
L{self.basedir} and L{path}.
"""
outdir = self.basedir.descendant(path)
outdir.makedirs()
outdir.child("setup.py").setContent("")
def assertExtensions(self, expected):
"""
Assert that the given names match the (sorted) names of discovered
extensions.
"""
extensions = dist.getExtensions()
names = [extension.name for extension in extensions]
self.assertEqual(sorted(names), expected)
def test_getExtensions(self):
"""
Files named I{setup.py} in I{twisted/topfiles} and I{twisted/*/topfiles}
are executed with L{execfile} in order to discover the extensions they
declare.
"""
self.writeSetup("twisted.transmutate", "topfiles")
self.writeSetup("twisted.tele.port", "tele", "topfiles")
self.assertExtensions(["twisted.tele.port", "twisted.transmutate"])
def test_getExtensionsTooDeep(self):
"""
Files named I{setup.py} in I{topfiles} directories are not considered if
they are too deep in the directory hierarchy.
"""
self.writeSetup("twisted.trans.mog.rify", "trans", "mog", "topfiles")
self.assertExtensions([])
def test_getExtensionsNotTopfiles(self):
"""
The folder in which I{setup.py} is discovered must be called I{topfiles}
otherwise it is ignored.
"""
self.writeSetup("twisted.metamorphosis", "notfiles")
self.assertExtensions([])
def test_getExtensionsNotSupportedOnJava(self):
"""
Extensions are not supported on Java-based platforms.
"""
self.addCleanup(setattr, sys, "platform", sys.platform)
sys.platform = "java"
self.writeSetup("twisted.sorcery", "topfiles")
self.assertExtensions([])
def test_getExtensionsExtensionsLocalIsOptional(self):
"""
It is acceptable for extensions to not define the C{extensions} local
variable.
"""
self.writeEmptySetup("twisted.necromancy", "topfiles")
self.assertExtensions([])
class GetVersionTest(TestCase):
"""
Tests for L{dist.getVersion}.
"""
def setUp(self):
self.dirname = self.mktemp()
os.mkdir(self.dirname)
def test_getVersionCore(self):
"""
Test that getting the version of core reads from the
[base]/_version.py file.
"""
f = open(os.path.join(self.dirname, "_version.py"), "w")
f.write("""
from twisted.python import versions
version = versions.Version("twisted", 0, 1, 2)
""")
f.close()
self.assertEqual(dist.getVersion("core", base=self.dirname), "0.1.2")
def test_getVersionOther(self):
"""
Test that getting the version of a non-core project reads from
the [base]/[projname]/_version.py file.
"""
os.mkdir(os.path.join(self.dirname, "blat"))
f = open(os.path.join(self.dirname, "blat", "_version.py"), "w")
f.write("""
from twisted.python import versions
version = versions.Version("twisted.blat", 9, 8, 10)
""")
f.close()
self.assertEqual(dist.getVersion("blat", base=self.dirname), "9.8.10")
class GetScriptsTest(TestCase):
"""
Tests for L{dist.getScripts} which returns the scripts which should be
included in the distribution of a project.
"""
def test_scriptsInSVN(self):
"""
getScripts should return the scripts associated with a project
in the context of Twisted SVN.
"""
basedir = self.mktemp()
os.mkdir(basedir)
os.mkdir(os.path.join(basedir, 'bin'))
os.mkdir(os.path.join(basedir, 'bin', 'proj'))
f = open(os.path.join(basedir, 'bin', 'proj', 'exy'), 'w')
f.write('yay')
f.close()
scripts = dist.getScripts('proj', basedir=basedir)
self.assertEqual(len(scripts), 1)
self.assertEqual(os.path.basename(scripts[0]), 'exy')
def test_excludedPreamble(self):
"""
L{dist.getScripts} includes neither C{"_preamble.py"} nor
C{"_preamble.pyc"}.
"""
basedir = FilePath(self.mktemp())
bin = basedir.child('bin')
bin.makedirs()
bin.child('_preamble.py').setContent('some preamble code\n')
bin.child('_preamble.pyc').setContent('some preamble byte code\n')
bin.child('program').setContent('good program code\n')
scripts = dist.getScripts("", basedir=basedir.path)
self.assertEqual(scripts, [bin.child('program').path])
def test_scriptsInRelease(self):
"""
getScripts should return the scripts associated with a project
in the context of a released subproject tarball.
"""
basedir = self.mktemp()
os.mkdir(basedir)
os.mkdir(os.path.join(basedir, 'bin'))
f = open(os.path.join(basedir, 'bin', 'exy'), 'w')
f.write('yay')
f.close()
scripts = dist.getScripts('proj', basedir=basedir)
self.assertEqual(len(scripts), 1)
self.assertEqual(os.path.basename(scripts[0]), 'exy')
def test_noScriptsInSVN(self):
"""
When calling getScripts for a project which doesn't actually
have any scripts, in the context of an SVN checkout, an
empty list should be returned.
"""
basedir = self.mktemp()
os.mkdir(basedir)
os.mkdir(os.path.join(basedir, 'bin'))
os.mkdir(os.path.join(basedir, 'bin', 'otherproj'))
scripts = dist.getScripts('noscripts', basedir=basedir)
self.assertEqual(scripts, [])
def test_getScriptsTopLevel(self):
"""
Passing the empty string to getScripts returns scripts that are (only)
in the top level bin directory.
"""
basedir = FilePath(self.mktemp())
basedir.createDirectory()
bindir = basedir.child("bin")
bindir.createDirectory()
included = bindir.child("included")
included.setContent("yay included")
subdir = bindir.child("subdir")
subdir.createDirectory()
subdir.child("not-included").setContent("not included")
scripts = dist.getScripts("", basedir=basedir.path)
self.assertEqual(scripts, [included.path])
def test_noScriptsInSubproject(self):
"""
When calling getScripts for a project which doesn't actually
have any scripts in the context of that project's individual
project structure, an empty list should be returned.
"""
basedir = self.mktemp()
os.mkdir(basedir)
scripts = dist.getScripts('noscripts', basedir=basedir)
self.assertEqual(scripts, [])
class DummyCommand:
"""
A fake Command.
"""
def __init__(self, **kwargs):
for kw, val in kwargs.items():
setattr(self, kw, val)
def ensure_finalized(self):
pass
class BuildScriptsTest(TestCase):
"""
Tests for L{dist.build_scripts_twisted}.
"""
def setUp(self):
self.source = FilePath(self.mktemp())
self.target = FilePath(self.mktemp())
self.source.makedirs()
self.addCleanup(os.chdir, os.getcwd())
os.chdir(self.source.path)
def buildScripts(self):
"""
Write 3 types of scripts and run the L{build_scripts_twisted}
command.
"""
self.writeScript(self.source, "script1",
("#! /usr/bin/env python2.7\n"
"# bogus script w/ Python sh-bang\n"
"pass\n"))
self.writeScript(self.source, "script2.py",
("#!/usr/bin/python\n"
"# bogus script w/ Python sh-bang\n"
"pass\n"))
self.writeScript(self.source, "shell.sh",
("#!/bin/sh\n"
"# bogus shell script w/ sh-bang\n"
"exit 0\n"))
expected = ['script1', 'script2.py', 'shell.sh']
cmd = self.getBuildScriptsCmd(self.target,
[self.source.child(fn).path
for fn in expected])
cmd.finalize_options()
cmd.run()
return self.target.listdir()
def getBuildScriptsCmd(self, target, scripts):
"""
Create a distutils L{Distribution} with a L{DummyCommand} and wrap it
in L{build_scripts_twisted}.
@type target: L{FilePath}
"""
dist = Distribution()
dist.scripts = scripts
dist.command_obj["build"] = DummyCommand(
build_scripts = target.path,
force = 1,
executable = sys.executable
)
return build_scripts_twisted(dist)
def writeScript(self, dir, name, text):
"""
Write the script to disk.
"""
with open(dir.child(name).path, "w") as f:
f.write(text)
def test_notWindows(self):
"""
L{build_scripts_twisted} does not rename scripts on non-Windows
platforms.
"""
self.patch(os, "name", "twisted")
built = self.buildScripts()
for name in ['script1', 'script2.py', 'shell.sh']:
self.assertTrue(name in built)
def test_windows(self):
"""
L{build_scripts_twisted} renames scripts so they end with '.py' on
the Windows platform.
"""
self.patch(os, "name", "nt")
built = self.buildScripts()
for name in ['script1.py', 'script2.py', 'shell.sh.py']:
self.assertTrue(name in built)
class FakeModule(object):
"""
A fake module, suitable for dependency injection in testing.
"""
def __init__(self, attrs):
"""
Initializes a fake module.
@param attrs: The attrs that will be accessible on the module.
@type attrs: C{dict} of C{str} (Python names) to objects
"""
self._attrs = attrs
def __getattr__(self, name):
"""
Gets an attribute of this fake module from its attrs.
@raise AttributeError: When the requested attribute is missing.
"""
try:
return self._attrs[name]
except KeyError:
raise AttributeError()
fakeCPythonPlatform = FakeModule({"python_implementation": lambda: "CPython"})
fakeOtherPlatform = FakeModule({"python_implementation": lambda: "lvhpy"})
class WithPlatformTests(TestCase):
"""
Tests for L{_checkCPython} when used with a (fake) C{platform} module.
"""
def test_cpython(self):
"""
L{_checkCPython} returns C{True} when C{platform.python_implementation}
says we're running on CPython.
"""
self.assertTrue(dist._checkCPython(platform=fakeCPythonPlatform))
def test_other(self):
"""
L{_checkCPython} returns C{False} when C{platform.python_implementation}
says we're not running on CPython.
"""
self.assertFalse(dist._checkCPython(platform=fakeOtherPlatform))

View file

@ -0,0 +1,42 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.dist3}.
"""
from __future__ import division
import os
from twisted.trial.unittest import TestCase
from twisted.python.dist3 import modulesToInstall
class ModulesToInstallTests(TestCase):
"""
Tests for L{modulesToInstall}.
"""
def test_sanityCheck(self):
"""
L{modulesToInstall} includes some obvious module names.
"""
self.assertIn("twisted.internet.reactor", modulesToInstall)
self.assertIn("twisted.python.test.test_dist3", modulesToInstall)
def test_exist(self):
"""
All modules listed in L{modulesToInstall} exist.
"""
import twisted
root = os.path.dirname(os.path.dirname(twisted.__file__))
for module in modulesToInstall:
segments = module.split(".")
segments[-1] += ".py"
path = os.path.join(root, *segments)
alternateSegments = module.split(".") + ["__init__.py"]
packagePath = os.path.join(root, *alternateSegments)
self.assertTrue(os.path.exists(path) or
os.path.exists(packagePath),
"Module {0} does not exist".format(module))

View file

@ -0,0 +1,414 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.fakepwd}.
"""
try:
import pwd
except ImportError:
pwd = None
try:
import spwd
except ImportError:
spwd = None
import os
from operator import getitem
from twisted.trial.unittest import TestCase
from twisted.python.fakepwd import UserDatabase, ShadowDatabase
SYSTEM_UID_MAX = 999
def findInvalidUID():
"""
By convention, UIDs less than 1000 are reserved for the system. A system
which allocated every single one of those UIDs would likely have practical
problems with allocating new ones, so let's assume that we'll be able to
find one. (If we don't, this will wrap around to negative values and
I{eventually} find something.)
@return: a user ID which does not exist on the local system. Or, on
systems without a L{pwd} module, return C{SYSTEM_UID_MAX}.
"""
guess = SYSTEM_UID_MAX
if pwd is not None:
while True:
try:
pwd.getpwuid(guess)
except KeyError:
break
else:
guess -= 1
return guess
INVALID_UID = findInvalidUID()
class UserDatabaseTestsMixin:
"""
L{UserDatabaseTestsMixin} defines tests which apply to any user database
implementation. Subclasses should mix it in, implement C{setUp} to create
C{self.database} bound to a user database instance, and implement
C{getExistingUserInfo} to return information about a user (such information
should be unique per test method).
"""
def test_getpwuid(self):
"""
I{getpwuid} accepts a uid and returns the user record associated with
it.
"""
for i in range(2):
# Get some user which exists in the database.
username, password, uid, gid, gecos, dir, shell = self.getExistingUserInfo()
# Now try to look it up and make sure the result is correct.
entry = self.database.getpwuid(uid)
self.assertEqual(entry.pw_name, username)
self.assertEqual(entry.pw_passwd, password)
self.assertEqual(entry.pw_uid, uid)
self.assertEqual(entry.pw_gid, gid)
self.assertEqual(entry.pw_gecos, gecos)
self.assertEqual(entry.pw_dir, dir)
self.assertEqual(entry.pw_shell, shell)
def test_noSuchUID(self):
"""
I{getpwuid} raises L{KeyError} when passed a uid which does not exist
in the user database.
"""
self.assertRaises(KeyError, self.database.getpwuid, INVALID_UID)
def test_getpwnam(self):
"""
I{getpwnam} accepts a username and returns the user record associated
with it.
"""
for i in range(2):
# Get some user which exists in the database.
username, password, uid, gid, gecos, dir, shell = self.getExistingUserInfo()
# Now try to look it up and make sure the result is correct.
entry = self.database.getpwnam(username)
self.assertEqual(entry.pw_name, username)
self.assertEqual(entry.pw_passwd, password)
self.assertEqual(entry.pw_uid, uid)
self.assertEqual(entry.pw_gid, gid)
self.assertEqual(entry.pw_gecos, gecos)
self.assertEqual(entry.pw_dir, dir)
self.assertEqual(entry.pw_shell, shell)
def test_noSuchName(self):
"""
I{getpwnam} raises L{KeyError} when passed a username which does not
exist in the user database.
"""
self.assertRaises(
KeyError, self.database.getpwnam,
'no' 'such' 'user' 'exists' 'the' 'name' 'is' 'too' 'long' 'and' 'has'
'\1' 'in' 'it' 'too')
def test_recordLength(self):
"""
The user record returned by I{getpwuid}, I{getpwnam}, and I{getpwall}
has a length.
"""
db = self.database
username, password, uid, gid, gecos, dir, shell = self.getExistingUserInfo()
for entry in [db.getpwuid(uid), db.getpwnam(username), db.getpwall()[0]]:
self.assertIsInstance(len(entry), int)
self.assertEqual(len(entry), 7)
def test_recordIndexable(self):
"""
The user record returned by I{getpwuid}, I{getpwnam}, and I{getpwall}
is indexable, with successive indexes starting from 0 corresponding to
the values of the C{pw_name}, C{pw_passwd}, C{pw_uid}, C{pw_gid},
C{pw_gecos}, C{pw_dir}, and C{pw_shell} attributes, respectively.
"""
db = self.database
username, password, uid, gid, gecos, dir, shell = self.getExistingUserInfo()
for entry in [db.getpwuid(uid), db.getpwnam(username), db.getpwall()[0]]:
self.assertEqual(entry[0], username)
self.assertEqual(entry[1], password)
self.assertEqual(entry[2], uid)
self.assertEqual(entry[3], gid)
self.assertEqual(entry[4], gecos)
self.assertEqual(entry[5], dir)
self.assertEqual(entry[6], shell)
self.assertEqual(len(entry), len(list(entry)))
self.assertRaises(IndexError, getitem, entry, 7)
class UserDatabaseTests(TestCase, UserDatabaseTestsMixin):
"""
Tests for L{UserDatabase}.
"""
def setUp(self):
"""
Create a L{UserDatabase} with no user data in it.
"""
self.database = UserDatabase()
self._counter = SYSTEM_UID_MAX + 1
def getExistingUserInfo(self):
"""
Add a new user to C{self.database} and return its information.
"""
self._counter += 1
suffix = '_' + str(self._counter)
username = 'username' + suffix
password = 'password' + suffix
uid = self._counter
gid = self._counter + 1000
gecos = 'gecos' + suffix
dir = 'dir' + suffix
shell = 'shell' + suffix
self.database.addUser(username, password, uid, gid, gecos, dir, shell)
return (username, password, uid, gid, gecos, dir, shell)
def test_addUser(self):
"""
L{UserDatabase.addUser} accepts seven arguments, one for each field of
a L{pwd.struct_passwd}, and makes the new record available via
L{UserDatabase.getpwuid}, L{UserDatabase.getpwnam}, and
L{UserDatabase.getpwall}.
"""
username = 'alice'
password = 'secr3t'
uid = 123
gid = 456
gecos = 'Alice,,,'
home = '/users/alice'
shell = '/usr/bin/foosh'
db = self.database
db.addUser(username, password, uid, gid, gecos, home, shell)
for [entry] in [[db.getpwuid(uid)], [db.getpwnam(username)],
db.getpwall()]:
self.assertEqual(entry.pw_name, username)
self.assertEqual(entry.pw_passwd, password)
self.assertEqual(entry.pw_uid, uid)
self.assertEqual(entry.pw_gid, gid)
self.assertEqual(entry.pw_gecos, gecos)
self.assertEqual(entry.pw_dir, home)
self.assertEqual(entry.pw_shell, shell)
class PwdModuleTests(TestCase, UserDatabaseTestsMixin):
"""
L{PwdModuleTests} runs the tests defined by L{UserDatabaseTestsMixin}
against the built-in C{pwd} module. This serves to verify that
L{UserDatabase} is really a fake of that API.
"""
if pwd is None:
skip = "Cannot verify UserDatabase against pwd without pwd"
else:
database = pwd
def setUp(self):
self._users = iter(self.database.getpwall())
self._uids = set()
def getExistingUserInfo(self):
"""
Read and return the next record from C{self._users}, filtering out
any records with previously seen uid values (as these cannot be
found with C{getpwuid} and only cause trouble).
"""
while True:
entry = next(self._users)
uid = entry.pw_uid
if uid not in self._uids:
self._uids.add(uid)
return entry
class ShadowDatabaseTestsMixin:
"""
L{ShadowDatabaseTestsMixin} defines tests which apply to any shadow user
database implementation. Subclasses should mix it in, implement C{setUp} to
create C{self.database} bound to a shadow user database instance, and
implement C{getExistingUserInfo} to return information about a user (such
information should be unique per test method).
"""
def test_getspnam(self):
"""
L{getspnam} accepts a username and returns the user record associated
with it.
"""
for i in range(2):
# Get some user which exists in the database.
(username, password, lastChange, min, max, warn, inact, expire,
flag) = self.getExistingUserInfo()
entry = self.database.getspnam(username)
self.assertEqual(entry.sp_nam, username)
self.assertEqual(entry.sp_pwd, password)
self.assertEqual(entry.sp_lstchg, lastChange)
self.assertEqual(entry.sp_min, min)
self.assertEqual(entry.sp_max, max)
self.assertEqual(entry.sp_warn, warn)
self.assertEqual(entry.sp_inact, inact)
self.assertEqual(entry.sp_expire, expire)
self.assertEqual(entry.sp_flag, flag)
def test_noSuchName(self):
"""
I{getspnam} raises L{KeyError} when passed a username which does not
exist in the user database.
"""
self.assertRaises(KeyError, self.database.getspnam, "alice")
def test_recordLength(self):
"""
The shadow user record returned by I{getspnam} and I{getspall} has a
length.
"""
db = self.database
username = self.getExistingUserInfo()[0]
for entry in [db.getspnam(username), db.getspall()[0]]:
self.assertIsInstance(len(entry), int)
self.assertEqual(len(entry), 9)
def test_recordIndexable(self):
"""
The shadow user record returned by I{getpwnam} and I{getspall} is
indexable, with successive indexes starting from 0 corresponding to the
values of the C{sp_nam}, C{sp_pwd}, C{sp_lstchg}, C{sp_min}, C{sp_max},
C{sp_warn}, C{sp_inact}, C{sp_expire}, and C{sp_flag} attributes,
respectively.
"""
db = self.database
(username, password, lastChange, min, max, warn, inact, expire,
flag) = self.getExistingUserInfo()
for entry in [db.getspnam(username), db.getspall()[0]]:
self.assertEqual(entry[0], username)
self.assertEqual(entry[1], password)
self.assertEqual(entry[2], lastChange)
self.assertEqual(entry[3], min)
self.assertEqual(entry[4], max)
self.assertEqual(entry[5], warn)
self.assertEqual(entry[6], inact)
self.assertEqual(entry[7], expire)
self.assertEqual(entry[8], flag)
self.assertEqual(len(entry), len(list(entry)))
self.assertRaises(IndexError, getitem, entry, 9)
class ShadowDatabaseTests(TestCase, ShadowDatabaseTestsMixin):
"""
Tests for L{ShadowDatabase}.
"""
def setUp(self):
"""
Create a L{ShadowDatabase} with no user data in it.
"""
self.database = ShadowDatabase()
self._counter = 0
def getExistingUserInfo(self):
"""
Add a new user to C{self.database} and return its information.
"""
self._counter += 1
suffix = '_' + str(self._counter)
username = 'username' + suffix
password = 'password' + suffix
lastChange = self._counter + 1
min = self._counter + 2
max = self._counter + 3
warn = self._counter + 4
inact = self._counter + 5
expire = self._counter + 6
flag = self._counter + 7
self.database.addUser(username, password, lastChange, min, max, warn,
inact, expire, flag)
return (username, password, lastChange, min, max, warn, inact,
expire, flag)
def test_addUser(self):
"""
L{UserDatabase.addUser} accepts seven arguments, one for each field of
a L{pwd.struct_passwd}, and makes the new record available via
L{UserDatabase.getpwuid}, L{UserDatabase.getpwnam}, and
L{UserDatabase.getpwall}.
"""
username = 'alice'
password = 'secr3t'
lastChange = 17
min = 42
max = 105
warn = 12
inact = 3
expire = 400
flag = 3
db = self.database
db.addUser(username, password, lastChange, min, max, warn, inact,
expire, flag)
for [entry] in [[db.getspnam(username)], db.getspall()]:
self.assertEqual(entry.sp_nam, username)
self.assertEqual(entry.sp_pwd, password)
self.assertEqual(entry.sp_lstchg, lastChange)
self.assertEqual(entry.sp_min, min)
self.assertEqual(entry.sp_max, max)
self.assertEqual(entry.sp_warn, warn)
self.assertEqual(entry.sp_inact, inact)
self.assertEqual(entry.sp_expire, expire)
self.assertEqual(entry.sp_flag, flag)
class SPwdModuleTests(TestCase, ShadowDatabaseTestsMixin):
"""
L{SPwdModuleTests} runs the tests defined by L{ShadowDatabaseTestsMixin}
against the built-in C{spwd} module. This serves to verify that
L{ShadowDatabase} is really a fake of that API.
"""
if spwd is None:
skip = "Cannot verify ShadowDatabase against spwd without spwd"
elif os.getuid() != 0:
skip = "Cannot access shadow user database except as root"
else:
database = spwd
def setUp(self):
self._users = iter(self.database.getspall())
def getExistingUserInfo(self):
"""
Read and return the next record from C{self._users}.
"""
return next(self._users)

View file

@ -0,0 +1,112 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.hashlib}
"""
from twisted.trial.unittest import TestCase
from twisted.trial import util
class HashObjectTests(TestCase):
"""
Tests for the hash object APIs presented by L{hashlib}, C{md5} and C{sha1}.
"""
def test_deprecation(self):
"""
Ensure the deprecation of L{twisted.python.hashlib} is working.
"""
from twisted.python import hashlib
warnings = self.flushWarnings(
offendingFunctions=[self.test_deprecation])
self.assertIdentical(warnings[0]['category'], DeprecationWarning)
self.assertEqual(len(warnings), 1)
self.assertEqual(warnings[0]['message'],
"twisted.python.hashlib was deprecated in "
"Twisted 13.1.0: Please use hashlib from stdlib.")
def test_md5(self):
"""
L{hashlib.md5} returns an object which can be used to compute an MD5
hash as defined by U{RFC 1321<http://www.ietf.org/rfc/rfc1321.txt>}.
"""
from twisted.python.hashlib import md5
# Test the result using values from section A.5 of the RFC.
self.assertEqual(
md5().hexdigest(), "d41d8cd98f00b204e9800998ecf8427e")
self.assertEqual(
md5("a").hexdigest(), "0cc175b9c0f1b6a831c399e269772661")
self.assertEqual(
md5("abc").hexdigest(), "900150983cd24fb0d6963f7d28e17f72")
self.assertEqual(
md5("message digest").hexdigest(),
"f96b697d7cb7938d525a2f31aaf161d0")
self.assertEqual(
md5("abcdefghijklmnopqrstuvwxyz").hexdigest(),
"c3fcd3d76192e4007dfb496cca67e13b")
self.assertEqual(
md5("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
"0123456789").hexdigest(),
"d174ab98d277d9f5a5611c2c9f419d9f")
self.assertEqual(
md5("1234567890123456789012345678901234567890123456789012345678901"
"2345678901234567890").hexdigest(),
"57edf4a22be3c955ac49da2e2107b67a")
# It should have digest and update methods, too.
self.assertEqual(
md5().digest().encode('hex'),
"d41d8cd98f00b204e9800998ecf8427e")
hash = md5()
hash.update("a")
self.assertEqual(
hash.digest().encode('hex'),
"0cc175b9c0f1b6a831c399e269772661")
# Instances of it should have a digest_size attribute
self.assertEqual(md5().digest_size, 16)
test_md5.suppress = [util.suppress(message="twisted.python.hashlib"
"was deprecated in Twisted 13.1.0: Please use hashlib from stdlib.")]
def test_sha1(self):
"""
L{hashlib.sha1} returns an object which can be used to compute a SHA1
hash as defined by U{RFC 3174<http://tools.ietf.org/rfc/rfc3174.txt>}.
"""
from twisted.python.hashlib import sha1
def format(s):
return ''.join(s.split()).lower()
# Test the result using values from section 7.3 of the RFC.
self.assertEqual(
sha1("abc").hexdigest(),
format(
"A9 99 3E 36 47 06 81 6A BA 3E 25 71 78 50 C2 6C 9C D0 D8 9D"))
self.assertEqual(
sha1("abcdbcdecdefdefgefghfghighijhi"
"jkijkljklmklmnlmnomnopnopq").hexdigest(),
format(
"84 98 3E 44 1C 3B D2 6E BA AE 4A A1 F9 51 29 E5 E5 46 70 F1"))
# It should have digest and update methods, too.
self.assertEqual(
sha1("abc").digest().encode('hex'),
format(
"A9 99 3E 36 47 06 81 6A BA 3E 25 71 78 50 C2 6C 9C D0 D8 9D"))
hash = sha1()
hash.update("abc")
self.assertEqual(
hash.digest().encode('hex'),
format(
"A9 99 3E 36 47 06 81 6A BA 3E 25 71 78 50 C2 6C 9C D0 D8 9D"))
# Instances of it should have a digest_size attribute.
self.assertEqual(
sha1().digest_size, 20)
test_sha1.suppress = [util.suppress(message="twisted.python.hashlib"
"was deprecated in Twisted 13.1.0: Please use hashlib from stdlib.")]

View file

@ -0,0 +1,41 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.htmlizer}.
"""
from StringIO import StringIO
from twisted.trial.unittest import TestCase
from twisted.python.htmlizer import filter
class FilterTests(TestCase):
"""
Tests for L{twisted.python.htmlizer.filter}.
"""
def test_empty(self):
"""
If passed an empty input file, L{filter} writes a I{pre} tag containing
only an end marker to the output file.
"""
input = StringIO("")
output = StringIO()
filter(input, output)
self.assertEqual(output.getvalue(), '<pre><span class="py-src-endmarker"></span></pre>\n')
def test_variable(self):
"""
If passed an input file containing a variable access, L{filter} writes
a I{pre} tag containing a I{py-src-variable} span containing the
variable.
"""
input = StringIO("foo\n")
output = StringIO()
filter(input, output)
self.assertEqual(
output.getvalue(),
'<pre><span class="py-src-variable">foo</span><span class="py-src-newline">\n'
'</span><span class="py-src-endmarker"></span></pre>\n')

View file

@ -0,0 +1,120 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python._inotify}.
"""
from twisted.trial.unittest import TestCase
from twisted.python.runtime import platform
if platform.supportsINotify():
from ctypes import c_int, c_uint32, c_char_p
from twisted.python import _inotify
from twisted.python._inotify import (
INotifyError, initializeModule, init, add)
else:
_inotify = None
class INotifyTests(TestCase):
"""
Tests for L{twisted.python._inotify}.
"""
if _inotify is None:
skip = "This platform doesn't support INotify."
def test_missingInit(self):
"""
If the I{libc} object passed to L{initializeModule} has no
C{inotify_init} attribute, L{ImportError} is raised.
"""
class libc:
def inotify_add_watch(self):
pass
def inotify_rm_watch(self):
pass
self.assertRaises(ImportError, initializeModule, libc())
def test_missingAdd(self):
"""
If the I{libc} object passed to L{initializeModule} has no
C{inotify_add_watch} attribute, L{ImportError} is raised.
"""
class libc:
def inotify_init(self):
pass
def inotify_rm_watch(self):
pass
self.assertRaises(ImportError, initializeModule, libc())
def test_missingRemove(self):
"""
If the I{libc} object passed to L{initializeModule} has no
C{inotify_rm_watch} attribute, L{ImportError} is raised.
"""
class libc:
def inotify_init(self):
pass
def inotify_add_watch(self):
pass
self.assertRaises(ImportError, initializeModule, libc())
def test_setTypes(self):
"""
If the I{libc} object passed to L{initializeModule} has all of the
necessary attributes, it sets the C{argtypes} and C{restype} attributes
of the three ctypes methods used from libc.
"""
class libc:
def inotify_init(self):
pass
inotify_init = staticmethod(inotify_init)
def inotify_rm_watch(self):
pass
inotify_rm_watch = staticmethod(inotify_rm_watch)
def inotify_add_watch(self):
pass
inotify_add_watch = staticmethod(inotify_add_watch)
c = libc()
initializeModule(c)
self.assertEqual(c.inotify_init.argtypes, [])
self.assertEqual(c.inotify_init.restype, c_int)
self.assertEqual(c.inotify_rm_watch.argtypes, [c_int, c_int])
self.assertEqual(c.inotify_rm_watch.restype, c_int)
self.assertEqual(
c.inotify_add_watch.argtypes, [c_int, c_char_p, c_uint32])
self.assertEqual(c.inotify_add_watch.restype, c_int)
def test_failedInit(self):
"""
If C{inotify_init} returns a negative number, L{init} raises
L{INotifyError}.
"""
class libc:
def inotify_init(self):
return -1
self.patch(_inotify, 'libc', libc())
self.assertRaises(INotifyError, init)
def test_failedAddWatch(self):
"""
If C{inotify_add_watch} returns a negative number, L{add}
raises L{INotifyError}.
"""
class libc:
def inotify_add_watch(self, fd, path, mask):
return -1
self.patch(_inotify, 'libc', libc())
self.assertRaises(INotifyError, add, 3, '/foo', 0)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,165 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.runtime}.
"""
from __future__ import division, absolute_import
import sys
from twisted.trial.util import suppress as SUPRESS
from twisted.trial.unittest import SynchronousTestCase
from twisted.python.runtime import Platform, shortPythonVersion
class PythonVersionTests(SynchronousTestCase):
"""
Tests the shortPythonVersion method.
"""
def test_shortPythonVersion(self):
"""
Verify if the Python version is returned correctly.
"""
ver = shortPythonVersion().split('.')
for i in range(3):
self.assertEqual(int(ver[i]), sys.version_info[i])
class PlatformTests(SynchronousTestCase):
"""
Tests for the default L{Platform} initializer.
"""
isWinNTDeprecationMessage = ('twisted.python.runtime.Platform.isWinNT was '
'deprecated in Twisted 13.0. Use Platform.isWindows instead.')
def test_isKnown(self):
"""
L{Platform.isKnown} returns a boolean indicating whether this is one of
the L{runtime.knownPlatforms}.
"""
platform = Platform()
self.assertTrue(platform.isKnown())
def test_isVistaConsistency(self):
"""
Verify consistency of L{Platform.isVista}: it can only be C{True} if
L{Platform.isWinNT} and L{Platform.isWindows} are C{True}.
"""
platform = Platform()
if platform.isVista():
self.assertTrue(platform.isWinNT())
self.assertTrue(platform.isWindows())
self.assertFalse(platform.isMacOSX())
def test_isMacOSXConsistency(self):
"""
L{Platform.isMacOSX} can only return C{True} if L{Platform.getType}
returns C{'posix'}.
"""
platform = Platform()
if platform.isMacOSX():
self.assertEqual(platform.getType(), 'posix')
def test_isLinuxConsistency(self):
"""
L{Platform.isLinux} can only return C{True} if L{Platform.getType}
returns C{'posix'} and L{sys.platform} starts with C{"linux"}.
"""
platform = Platform()
if platform.isLinux():
self.assertTrue(sys.platform.startswith("linux"))
def test_isWinNT(self):
"""
L{Platform.isWinNT} can return only C{False} or C{True} and can not
return C{True} if L{Platform.getType} is not C{"win32"}.
"""
platform = Platform()
isWinNT = platform.isWinNT()
self.assertIn(isWinNT, (False, True))
if platform.getType() != "win32":
self.assertEqual(isWinNT, False)
test_isWinNT.suppress = [SUPRESS(category=DeprecationWarning,
message=isWinNTDeprecationMessage)]
def test_isWinNTDeprecated(self):
"""
L{Platform.isWinNT} is deprecated in favor of L{platform.isWindows}.
"""
platform = Platform()
result = platform.isWinNT()
warnings = self.flushWarnings([self.test_isWinNTDeprecated])
self.assertEqual(len(warnings), 1)
self.assertEqual(
warnings[0]['message'], self.isWinNTDeprecationMessage)
def test_supportsThreads(self):
"""
L{Platform.supportsThreads} returns C{True} if threads can be created in
this runtime, C{False} otherwise.
"""
# It's difficult to test both cases of this without faking the threading
# module. Perhaps an adequate test is to just test the behavior with
# the current runtime, whatever that happens to be.
try:
import threading
except ImportError:
self.assertFalse(Platform().supportsThreads())
else:
self.assertTrue(Platform().supportsThreads())
class ForeignPlatformTests(SynchronousTestCase):
"""
Tests for L{Platform} based overridden initializer values.
"""
def test_getType(self):
"""
If an operating system name is supplied to L{Platform}'s initializer,
L{Platform.getType} returns the platform type which corresponds to that
name.
"""
self.assertEqual(Platform('nt').getType(), 'win32')
self.assertEqual(Platform('ce').getType(), 'win32')
self.assertEqual(Platform('posix').getType(), 'posix')
self.assertEqual(Platform('java').getType(), 'java')
def test_isMacOSX(self):
"""
If a system platform name is supplied to L{Platform}'s initializer, it
is used to determine the result of L{Platform.isMacOSX}, which returns
C{True} for C{"darwin"}, C{False} otherwise.
"""
self.assertTrue(Platform(None, 'darwin').isMacOSX())
self.assertFalse(Platform(None, 'linux2').isMacOSX())
self.assertFalse(Platform(None, 'win32').isMacOSX())
def test_isLinux(self):
"""
If a system platform name is supplied to L{Platform}'s initializer, it
is used to determine the result of L{Platform.isLinux}, which returns
C{True} for values beginning with C{"linux"}, C{False} otherwise.
"""
self.assertFalse(Platform(None, 'darwin').isLinux())
self.assertTrue(Platform(None, 'linux').isLinux())
self.assertTrue(Platform(None, 'linux2').isLinux())
self.assertTrue(Platform(None, 'linux3').isLinux())
self.assertFalse(Platform(None, 'win32').isLinux())

View file

@ -0,0 +1,543 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.sendmsg}.
"""
import sys
import errno
from socket import SOL_SOCKET, AF_INET, AF_INET6, socket, error
try:
from socket import AF_UNIX, socketpair
except ImportError:
nonUNIXSkip = "Platform does not support AF_UNIX sockets"
else:
nonUNIXSkip = None
from struct import pack
from os import devnull, pipe, read, close, environ
from twisted.internet.defer import Deferred
from twisted.internet.error import ProcessDone
from twisted.trial.unittest import TestCase
from twisted.internet.defer import inlineCallbacks
from twisted.internet import reactor
from twisted.python.filepath import FilePath
from twisted.python.runtime import platform
from twisted.internet.protocol import ProcessProtocol
if platform.isLinux():
from socket import MSG_DONTWAIT
dontWaitSkip = None
else:
# It would be nice to be able to test flags on more platforms, but finding a
# flag that works *at all* is somewhat challenging.
dontWaitSkip = "MSG_DONTWAIT is only known to work as intended on Linux"
try:
from twisted.python.sendmsg import SCM_RIGHTS, send1msg, recv1msg, getsockfam
except ImportError:
importSkip = "Cannot import twisted.python.sendmsg"
else:
importSkip = None
class ExitedWithStderr(Exception):
"""
A process exited with some stderr.
"""
def __str__(self):
"""
Dump the errors in a pretty way in the event of a subprocess traceback.
"""
return '\n'.join([''] + list(self.args))
class StartStopProcessProtocol(ProcessProtocol):
"""
An L{IProcessProtocol} with a Deferred for events where the subprocess
starts and stops.
@ivar started: A L{Deferred} which fires with this protocol's
L{IProcessTransport} provider when it is connected to one.
@ivar stopped: A L{Deferred} which fires with the process output or a
failure if the process produces output on standard error.
@ivar output: A C{str} used to accumulate standard output.
@ivar errors: A C{str} used to accumulate standard error.
"""
def __init__(self):
self.started = Deferred()
self.stopped = Deferred()
self.output = ''
self.errors = ''
def connectionMade(self):
self.started.callback(self.transport)
def outReceived(self, data):
self.output += data
def errReceived(self, data):
self.errors += data
def processEnded(self, reason):
if reason.check(ProcessDone):
self.stopped.callback(self.output)
else:
self.stopped.errback(ExitedWithStderr(
self.errors, self.output))
class BadList(list):
"""
A list which cannot be iterated sometimes.
This is a C{list} subclass to get past the type check in L{send1msg}, not as
an example of how real programs might want to interact with L{send1msg} (or
anything else). A custom C{list} subclass makes it easier to trigger
certain error cases in the implementation.
@ivar iterate: A flag which indicates whether an instance of L{BadList} will
allow iteration over itself or not. If C{False}, an attempt to iterate
over the instance will raise an exception.
"""
iterate = True
def __iter__(self):
"""
Allow normal list iteration, or raise an exception.
If C{self.iterate} is C{True}, it will be flipped to C{False} and then
normal iteration will proceed. If C{self.iterate} is C{False},
L{RuntimeError} is raised instead.
"""
if self.iterate:
self.iterate = False
return super(BadList, self).__iter__()
raise RuntimeError("Something bad happened")
class WorseList(list):
"""
A list which at first gives the appearance of being iterable, but then
raises an exception.
See L{BadList} for a warning about not writing code like this.
"""
def __iter__(self):
"""
Return an iterator which will raise an exception as soon as C{next} is
called on it.
"""
class BadIterator(object):
def next(self):
raise RuntimeError("This is a really bad case.")
return BadIterator()
class SendmsgTestCase(TestCase):
"""
Tests for sendmsg extension module and associated file-descriptor sending
functionality.
"""
if nonUNIXSkip is not None:
skip = nonUNIXSkip
elif importSkip is not None:
skip = importSkip
def setUp(self):
"""
Create a pair of UNIX sockets.
"""
self.input, self.output = socketpair(AF_UNIX)
def tearDown(self):
"""
Close the sockets opened by setUp.
"""
self.input.close()
self.output.close()
def test_sendmsgBadArguments(self):
"""
The argument types accepted by L{send1msg} are:
1. C{int}
2. read-only character buffer
3. C{int}
4. sequence
The 3rd and 4th arguments are optional. If fewer than two arguments or
more than four arguments are passed, or if any of the arguments passed
are not compatible with these types, L{TypeError} is raised.
"""
# Exercise the wrong number of arguments cases
self.assertRaises(TypeError, send1msg)
self.assertRaises(TypeError, send1msg, 1)
self.assertRaises(TypeError, send1msg, 1, "hello world", 2, [], object())
# Exercise the wrong type of arguments cases
self.assertRaises(TypeError, send1msg, object(), "hello world", 2, [])
self.assertRaises(TypeError, send1msg, 1, object(), 2, [])
self.assertRaises(TypeError, send1msg, 1, "hello world", object(), [])
self.assertRaises(TypeError, send1msg, 1, "hello world", 2, object())
def test_badAncillaryIter(self):
"""
If iteration over the ancillary data list fails (at the point of the
C{__iter__} call), the exception with which it fails is propagated to
the caller of L{send1msg}.
"""
badList = BadList()
badList.append((1, 2, "hello world"))
badList.iterate = False
self.assertRaises(RuntimeError, send1msg, 1, "hello world", 2, badList)
# Hit the second iteration
badList.iterate = True
self.assertRaises(RuntimeError, send1msg, 1, "hello world", 2, badList)
def test_badAncillaryNext(self):
"""
If iteration over the ancillary data list fails (at the point of a
C{next} call), the exception with which it fails is propagated to the
caller of L{send1msg}.
"""
worseList = WorseList()
self.assertRaises(RuntimeError, send1msg, 1, "hello world", 2, worseList)
def test_sendmsgBadAncillaryItem(self):
"""
The ancillary data list contains three-tuples with element types of:
1. C{int}
2. C{int}
3. read-only character buffer
If a tuple in the ancillary data list does not elements of these types,
L{TypeError} is raised.
"""
# Exercise the wrong number of arguments cases
self.assertRaises(TypeError, send1msg, 1, "hello world", 2, [()])
self.assertRaises(TypeError, send1msg, 1, "hello world", 2, [(1,)])
self.assertRaises(TypeError, send1msg, 1, "hello world", 2, [(1, 2)])
self.assertRaises(
TypeError,
send1msg, 1, "hello world", 2, [(1, 2, "goodbye", object())])
# Exercise the wrong type of arguments cases
exc = self.assertRaises(
TypeError, send1msg, 1, "hello world", 2, [object()])
self.assertEqual(
"send1msg argument 3 expected list of tuple, "
"got list containing object",
str(exc))
self.assertRaises(
TypeError,
send1msg, 1, "hello world", 2, [(object(), 1, "goodbye")])
self.assertRaises(
TypeError,
send1msg, 1, "hello world", 2, [(1, object(), "goodbye")])
self.assertRaises(
TypeError,
send1msg, 1, "hello world", 2, [(1, 1, object())])
def test_syscallError(self):
"""
If the underlying C{sendmsg} call fails, L{send1msg} raises
L{socket.error} with its errno set to the underlying errno value.
"""
probe = file(devnull)
fd = probe.fileno()
probe.close()
exc = self.assertRaises(error, send1msg, fd, "hello, world")
self.assertEqual(exc.args[0], errno.EBADF)
def test_syscallErrorWithControlMessage(self):
"""
The behavior when the underlying C{sendmsg} call fails is the same
whether L{send1msg} is passed ancillary data or not.
"""
probe = file(devnull)
fd = probe.fileno()
probe.close()
exc = self.assertRaises(
error, send1msg, fd, "hello, world", 0, [(0, 0, "0123")])
self.assertEqual(exc.args[0], errno.EBADF)
def test_roundtrip(self):
"""
L{recv1msg} will retrieve a message sent via L{send1msg}.
"""
message = "hello, world!"
self.assertEqual(
len(message),
send1msg(self.input.fileno(), message, 0))
result = recv1msg(fd=self.output.fileno())
self.assertEqual(result, (message, 0, []))
def test_shortsend(self):
"""
L{send1msg} returns the number of bytes which it was able to send.
"""
message = "x" * 1024 * 1024
self.input.setblocking(False)
sent = send1msg(self.input.fileno(), message)
# Sanity check - make sure we did fill the send buffer and then some
self.assertTrue(sent < len(message))
received = recv1msg(self.output.fileno(), 0, len(message))
self.assertEqual(len(received[0]), sent)
def test_roundtripEmptyAncillary(self):
"""
L{send1msg} treats an empty ancillary data list the same way it treats
receiving no argument for the ancillary parameter at all.
"""
send1msg(self.input.fileno(), "hello, world!", 0, [])
result = recv1msg(fd=self.output.fileno())
self.assertEqual(result, ("hello, world!", 0, []))
def test_flags(self):
"""
The C{flags} argument to L{send1msg} is passed on to the underlying
C{sendmsg} call, to affect it in whatever way is defined by those flags.
"""
# Just exercise one flag with simple, well-known behavior. MSG_DONTWAIT
# makes the send a non-blocking call, even if the socket is in blocking
# mode. See also test_flags in RecvmsgTestCase
for i in range(1024):
try:
send1msg(self.input.fileno(), "x" * 1024, MSG_DONTWAIT)
except error, e:
self.assertEqual(e.args[0], errno.EAGAIN)
break
else:
self.fail(
"Failed to fill up the send buffer, "
"or maybe send1msg blocked for a while")
if dontWaitSkip is not None:
test_flags.skip = dontWaitSkip
def test_wrongTypeAncillary(self):
"""
L{send1msg} will show a helpful exception message when given the wrong
type of object for the 'ancillary' argument.
"""
error = self.assertRaises(TypeError,
send1msg, self.input.fileno(),
"hello, world!", 0, 4321)
self.assertEqual(str(error),
"send1msg argument 3 expected list, got int")
def spawn(self, script):
"""
Start a script that is a peer of this test as a subprocess.
@param script: the module name of the script in this directory (no
package prefix, no '.py')
@type script: C{str}
@rtype: L{StartStopProcessProtocol}
"""
sspp = StartStopProcessProtocol()
reactor.spawnProcess(
sspp, sys.executable, [
sys.executable,
FilePath(__file__).sibling(script + ".py").path,
str(self.output.fileno()),
],
environ,
childFDs={0: "w", 1: "r", 2: "r",
self.output.fileno(): self.output.fileno()}
)
return sspp
@inlineCallbacks
def test_sendSubProcessFD(self):
"""
Calling L{sendsmsg} with SOL_SOCKET, SCM_RIGHTS, and a platform-endian
packed file descriptor number should send that file descriptor to a
different process, where it can be retrieved by using L{recv1msg}.
"""
sspp = self.spawn("pullpipe")
yield sspp.started
pipeOut, pipeIn = pipe()
self.addCleanup(close, pipeOut)
send1msg(
self.input.fileno(), "blonk", 0,
[(SOL_SOCKET, SCM_RIGHTS, pack("i", pipeIn))])
close(pipeIn)
yield sspp.stopped
self.assertEqual(read(pipeOut, 1024), "Test fixture data: blonk.\n")
# Make sure that the pipe is actually closed now.
self.assertEqual(read(pipeOut, 1024), "")
class RecvmsgTestCase(TestCase):
"""
Tests for L{recv1msg} (primarily error handling cases).
"""
if importSkip is not None:
skip = importSkip
def test_badArguments(self):
"""
The argument types accepted by L{recv1msg} are:
1. C{int}
2. C{int}
3. C{int}
4. C{int}
The 2nd, 3rd, and 4th arguments are optional. If fewer than one
argument or more than four arguments are passed, or if any of the
arguments passed are not compatible with these types, L{TypeError} is
raised.
"""
# Exercise the wrong number of arguments cases
self.assertRaises(TypeError, recv1msg)
self.assertRaises(TypeError, recv1msg, 1, 2, 3, 4, object())
# Exercise the wrong type of arguments cases
self.assertRaises(TypeError, recv1msg, object(), 2, 3, 4)
self.assertRaises(TypeError, recv1msg, 1, object(), 3, 4)
self.assertRaises(TypeError, recv1msg, 1, 2, object(), 4)
self.assertRaises(TypeError, recv1msg, 1, 2, 3, object())
def test_cmsgSpaceOverflow(self):
"""
L{recv1msg} raises L{OverflowError} if passed a value for the
C{cmsg_size} argument which exceeds C{SOCKLEN_MAX}.
"""
self.assertRaises(OverflowError, recv1msg, 0, 0, 0, 0x7FFFFFFF)
def test_syscallError(self):
"""
If the underlying C{recvmsg} call fails, L{recv1msg} raises
L{socket.error} with its errno set to the underlying errno value.
"""
probe = file(devnull)
fd = probe.fileno()
probe.close()
exc = self.assertRaises(error, recv1msg, fd)
self.assertEqual(exc.args[0], errno.EBADF)
def test_flags(self):
"""
The C{flags} argument to L{recv1msg} is passed on to the underlying
C{recvmsg} call, to affect it in whatever way is defined by those flags.
"""
# See test_flags in SendmsgTestCase
reader, writer = socketpair(AF_UNIX)
exc = self.assertRaises(
error, recv1msg, reader.fileno(), MSG_DONTWAIT)
self.assertEqual(exc.args[0], errno.EAGAIN)
if dontWaitSkip is not None:
test_flags.skip = dontWaitSkip
class GetSocketFamilyTests(TestCase):
"""
Tests for L{getsockfam}, a helper which reveals the address family of an
arbitrary socket.
"""
if importSkip is not None:
skip = importSkip
def _socket(self, addressFamily):
"""
Create a new socket using the given address family and return that
socket's file descriptor. The socket will automatically be closed when
the test is torn down.
"""
s = socket(addressFamily)
self.addCleanup(s.close)
return s.fileno()
def test_badArguments(self):
"""
L{getsockfam} accepts a single C{int} argument. If it is called in some
other way, L{TypeError} is raised.
"""
self.assertRaises(TypeError, getsockfam)
self.assertRaises(TypeError, getsockfam, 1, 2)
self.assertRaises(TypeError, getsockfam, object())
def test_syscallError(self):
"""
If the underlying C{getsockname} call fails, L{getsockfam} raises
L{socket.error} with its errno set to the underlying errno value.
"""
probe = file(devnull)
fd = probe.fileno()
probe.close()
exc = self.assertRaises(error, getsockfam, fd)
self.assertEqual(errno.EBADF, exc.args[0])
def test_inet(self):
"""
When passed the file descriptor of a socket created with the C{AF_INET}
address family, L{getsockfam} returns C{AF_INET}.
"""
self.assertEqual(AF_INET, getsockfam(self._socket(AF_INET)))
def test_inet6(self):
"""
When passed the file descriptor of a socket created with the C{AF_INET6}
address family, L{getsockfam} returns C{AF_INET6}.
"""
self.assertEqual(AF_INET6, getsockfam(self._socket(AF_INET6)))
def test_unix(self):
"""
When passed the file descriptor of a socket created with the C{AF_UNIX}
address family, L{getsockfam} returns C{AF_UNIX}.
"""
self.assertEqual(AF_UNIX, getsockfam(self._socket(AF_UNIX)))
if nonUNIXSkip is not None:
test_unix.skip = nonUNIXSkip

View file

@ -0,0 +1,623 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases for twisted.python._shellcomp
"""
import sys
from cStringIO import StringIO
from twisted.trial import unittest
from twisted.python import _shellcomp, usage, reflect
from twisted.python.usage import Completions, Completer, CompleteFiles
from twisted.python.usage import CompleteList
class ZshScriptTestMeta(type):
"""
Metaclass of ZshScriptTestMixin.
"""
def __new__(cls, name, bases, attrs):
def makeTest(cmdName, optionsFQPN):
def runTest(self):
return test_genZshFunction(self, cmdName, optionsFQPN)
return runTest
# add test_ methods to the class for each script
# we are testing.
if 'generateFor' in attrs:
for cmdName, optionsFQPN in attrs['generateFor']:
test = makeTest(cmdName, optionsFQPN)
attrs['test_genZshFunction_' + cmdName] = test
return type.__new__(cls, name, bases, attrs)
class ZshScriptTestMixin(object):
"""
Integration test helper to show that C{usage.Options} classes can have zsh
completion functions generated for them without raising errors.
In your subclasses set a class variable like so:
# | cmd name | Fully Qualified Python Name of Options class |
#
generateFor = [('conch', 'twisted.conch.scripts.conch.ClientOptions'),
('twistd', 'twisted.scripts.twistd.ServerOptions'),
]
Each package that contains Twisted scripts should contain one TestCase
subclass which also inherits from this mixin, and contains a C{generateFor}
list appropriate for the scripts in that package.
"""
__metaclass__ = ZshScriptTestMeta
def test_genZshFunction(self, cmdName, optionsFQPN):
"""
Generate completion functions for given twisted command - no errors
should be raised
@type cmdName: C{str}
@param cmdName: The name of the command-line utility e.g. 'twistd'
@type optionsFQPN: C{str}
@param optionsFQPN: The Fully Qualified Python Name of the C{Options}
class to be tested.
"""
outputFile = StringIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
# some scripts won't import or instantiate because of missing
# dependencies (PyCrypto, etc) so we have to skip them.
try:
o = reflect.namedAny(optionsFQPN)()
except Exception, e:
raise unittest.SkipTest("Couldn't import or instantiate "
"Options class: %s" % (e,))
try:
o.parseOptions(["", "--_shell-completion", "zsh:2"])
except ImportError, e:
# this can happen for commands which don't have all
# the necessary dependencies installed. skip test.
# skip
raise unittest.SkipTest("ImportError calling parseOptions(): %s", (e,))
except SystemExit:
pass # expected
else:
self.fail('SystemExit not raised')
outputFile.seek(0)
# test that we got some output
self.assertEqual(1, len(outputFile.read(1)))
outputFile.seek(0)
outputFile.truncate()
# now, if it has sub commands, we have to test those too
if hasattr(o, 'subCommands'):
for (cmd, short, parser, doc) in o.subCommands:
try:
o.parseOptions([cmd, "", "--_shell-completion",
"zsh:3"])
except ImportError, e:
# this can happen for commands which don't have all
# the necessary dependencies installed. skip test.
raise unittest.SkipTest("ImportError calling parseOptions() "
"on subcommand: %s", (e,))
except SystemExit:
pass # expected
else:
self.fail('SystemExit not raised')
outputFile.seek(0)
# test that we got some output
self.assertEqual(1, len(outputFile.read(1)))
outputFile.seek(0)
outputFile.truncate()
# flushed because we don't want DeprecationWarnings to be printed when
# running these test cases.
self.flushWarnings()
class ZshTestCase(unittest.TestCase):
"""
Tests for zsh completion code
"""
def test_accumulateMetadata(self):
"""
Are `compData' attributes you can place on Options classes
picked up correctly?
"""
opts = FighterAceExtendedOptions()
ag = _shellcomp.ZshArgumentsGenerator(opts, 'ace', 'dummy_value')
descriptions = FighterAceOptions.compData.descriptions.copy()
descriptions.update(FighterAceExtendedOptions.compData.descriptions)
self.assertEqual(ag.descriptions, descriptions)
self.assertEqual(ag.multiUse,
set(FighterAceOptions.compData.multiUse))
self.assertEqual(ag.mutuallyExclusive,
FighterAceOptions.compData.mutuallyExclusive)
optActions = FighterAceOptions.compData.optActions.copy()
optActions.update(FighterAceExtendedOptions.compData.optActions)
self.assertEqual(ag.optActions, optActions)
self.assertEqual(ag.extraActions,
FighterAceOptions.compData.extraActions)
def test_mutuallyExclusiveCornerCase(self):
"""
Exercise a corner-case of ZshArgumentsGenerator.makeExcludesDict()
where the long option name already exists in the `excludes` dict being
built.
"""
class OddFighterAceOptions(FighterAceExtendedOptions):
# since "fokker", etc, are already defined as mutually-
# exclusive on the super-class, defining them again here forces
# the corner-case to be exercised.
optFlags = [['anatra', None,
'Select the Anatra DS as your dogfighter aircraft']]
compData = Completions(
mutuallyExclusive=[['anatra', 'fokker', 'albatros',
'spad', 'bristol']])
opts = OddFighterAceOptions()
ag = _shellcomp.ZshArgumentsGenerator(opts, 'ace', 'dummy_value')
expected = {
'albatros': set(['anatra', 'b', 'bristol', 'f',
'fokker', 's', 'spad']),
'anatra': set(['a', 'albatros', 'b', 'bristol',
'f', 'fokker', 's', 'spad']),
'bristol': set(['a', 'albatros', 'anatra', 'f',
'fokker', 's', 'spad']),
'fokker': set(['a', 'albatros', 'anatra', 'b',
'bristol', 's', 'spad']),
'spad': set(['a', 'albatros', 'anatra', 'b',
'bristol', 'f', 'fokker'])}
self.assertEqual(ag.excludes, expected)
def test_accumulateAdditionalOptions(self):
"""
We pick up options that are only defined by having an
appropriately named method on your Options class,
e.g. def opt_foo(self, foo)
"""
opts = FighterAceExtendedOptions()
ag = _shellcomp.ZshArgumentsGenerator(opts, 'ace', 'dummy_value')
self.assertIn('nocrash', ag.flagNameToDefinition)
self.assertIn('nocrash', ag.allOptionsNameToDefinition)
self.assertIn('difficulty', ag.paramNameToDefinition)
self.assertIn('difficulty', ag.allOptionsNameToDefinition)
def test_verifyZshNames(self):
"""
Using a parameter/flag name that doesn't exist
will raise an error
"""
class TmpOptions(FighterAceExtendedOptions):
# Note typo of detail
compData = Completions(optActions={'detaill' : None})
self.assertRaises(ValueError, _shellcomp.ZshArgumentsGenerator,
TmpOptions(), 'ace', 'dummy_value')
class TmpOptions2(FighterAceExtendedOptions):
# Note that 'foo' and 'bar' are not real option
# names defined in this class
compData = Completions(
mutuallyExclusive=[("foo", "bar")])
self.assertRaises(ValueError, _shellcomp.ZshArgumentsGenerator,
TmpOptions2(), 'ace', 'dummy_value')
def test_zshCode(self):
"""
Generate a completion function, and test the textual output
against a known correct output
"""
outputFile = StringIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
self.patch(sys, 'argv', ["silly", "", "--_shell-completion", "zsh:2"])
opts = SimpleProgOptions()
self.assertRaises(SystemExit, opts.parseOptions)
self.assertEqual(testOutput1, outputFile.getvalue())
def test_zshCodeWithSubs(self):
"""
Generate a completion function with subcommands,
and test the textual output against a known correct output
"""
outputFile = StringIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
self.patch(sys, 'argv', ["silly2", "", "--_shell-completion", "zsh:2"])
opts = SimpleProgWithSubcommands()
self.assertRaises(SystemExit, opts.parseOptions)
self.assertEqual(testOutput2, outputFile.getvalue())
def test_incompleteCommandLine(self):
"""
Completion still happens even if a command-line is given
that would normally throw UsageError.
"""
outputFile = StringIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
opts = FighterAceOptions()
self.assertRaises(SystemExit, opts.parseOptions,
["--fokker", "server", "--unknown-option",
"--unknown-option2",
"--_shell-completion", "zsh:5"])
outputFile.seek(0)
# test that we got some output
self.assertEqual(1, len(outputFile.read(1)))
def test_incompleteCommandLine_case2(self):
"""
Completion still happens even if a command-line is given
that would normally throw UsageError.
The existance of --unknown-option prior to the subcommand
will break subcommand detection... but we complete anyway
"""
outputFile = StringIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
opts = FighterAceOptions()
self.assertRaises(SystemExit, opts.parseOptions,
["--fokker", "--unknown-option", "server",
"--list-server", "--_shell-completion", "zsh:5"])
outputFile.seek(0)
# test that we got some output
self.assertEqual(1, len(outputFile.read(1)))
outputFile.seek(0)
outputFile.truncate()
def test_incompleteCommandLine_case3(self):
"""
Completion still happens even if a command-line is given
that would normally throw UsageError.
Break subcommand detection in a different way by providing
an invalid subcommand name.
"""
outputFile = StringIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
opts = FighterAceOptions()
self.assertRaises(SystemExit, opts.parseOptions,
["--fokker", "unknown-subcommand",
"--list-server", "--_shell-completion", "zsh:4"])
outputFile.seek(0)
# test that we got some output
self.assertEqual(1, len(outputFile.read(1)))
def test_skipSubcommandList(self):
"""
Ensure the optimization which skips building the subcommand list
under certain conditions isn't broken.
"""
outputFile = StringIO()
self.patch(usage.Options, '_shellCompFile', outputFile)
opts = FighterAceOptions()
self.assertRaises(SystemExit, opts.parseOptions,
["--alba", "--_shell-completion", "zsh:2"])
outputFile.seek(0)
# test that we got some output
self.assertEqual(1, len(outputFile.read(1)))
def test_poorlyDescribedOptMethod(self):
"""
Test corner case fetching an option description from a method docstring
"""
opts = FighterAceOptions()
argGen = _shellcomp.ZshArgumentsGenerator(opts, 'ace', None)
descr = argGen.getDescription('silly')
# docstring for opt_silly is useless so it should just use the
# option name as the description
self.assertEqual(descr, 'silly')
def test_brokenActions(self):
"""
A C{Completer} with repeat=True may only be used as the
last item in the extraActions list.
"""
class BrokenActions(usage.Options):
compData = usage.Completions(
extraActions=[usage.Completer(repeat=True),
usage.Completer()]
)
outputFile = StringIO()
opts = BrokenActions()
self.patch(opts, '_shellCompFile', outputFile)
self.assertRaises(ValueError, opts.parseOptions,
["", "--_shell-completion", "zsh:2"])
def test_optMethodsDontOverride(self):
"""
opt_* methods on Options classes should not override the
data provided in optFlags or optParameters.
"""
class Options(usage.Options):
optFlags = [['flag', 'f', 'A flag']]
optParameters = [['param', 'p', None, 'A param']]
def opt_flag(self):
""" junk description """
def opt_param(self, param):
""" junk description """
opts = Options()
argGen = _shellcomp.ZshArgumentsGenerator(opts, 'ace', None)
self.assertEqual(argGen.getDescription('flag'), 'A flag')
self.assertEqual(argGen.getDescription('param'), 'A param')
class EscapeTestCase(unittest.TestCase):
def test_escape(self):
"""
Verify _shellcomp.escape() function
"""
esc = _shellcomp.escape
test = "$"
self.assertEqual(esc(test), "'$'")
test = 'A--\'$"\\`--B'
self.assertEqual(esc(test), '"A--\'\\$\\"\\\\\\`--B"')
class CompleterNotImplementedTestCase(unittest.TestCase):
"""
Test that using an unknown shell constant with SubcommandAction
raises NotImplementedError
The other Completer() subclasses are tested in test_usage.py
"""
def test_unknownShell(self):
"""
Using an unknown shellType should raise NotImplementedError
"""
action = _shellcomp.SubcommandAction()
self.assertRaises(NotImplementedError, action._shellCode,
None, "bad_shell_type")
class FighterAceServerOptions(usage.Options):
"""
Options for FighterAce 'server' subcommand
"""
optFlags = [['list-server', None,
'List this server with the online FighterAce network']]
optParameters = [['packets-per-second', None,
'Number of update packets to send per second', '20']]
class FighterAceOptions(usage.Options):
"""
Command-line options for an imaginary `Fighter Ace` game
"""
optFlags = [['fokker', 'f',
'Select the Fokker Dr.I as your dogfighter aircraft'],
['albatros', 'a',
'Select the Albatros D-III as your dogfighter aircraft'],
['spad', 's',
'Select the SPAD S.VII as your dogfighter aircraft'],
['bristol', 'b',
'Select the Bristol Scout as your dogfighter aircraft'],
['physics', 'p',
'Enable secret Twisted physics engine'],
['jam', 'j',
'Enable a small chance that your machine guns will jam!'],
['verbose', 'v',
'Verbose logging (may be specified more than once)'],
]
optParameters = [['pilot-name', None, "What's your name, Ace?",
'Manfred von Richthofen'],
['detail', 'd',
'Select the level of rendering detail (1-5)', '3'],
]
subCommands = [['server', None, FighterAceServerOptions,
'Start FighterAce game-server.'],
]
compData = Completions(
descriptions={'physics' : 'Twisted-Physics',
'detail' : 'Rendering detail level'},
multiUse=['verbose'],
mutuallyExclusive=[['fokker', 'albatros', 'spad',
'bristol']],
optActions={'detail' : CompleteList(['1' '2' '3'
'4' '5'])},
extraActions=[CompleteFiles(descr='saved game file to load')]
)
def opt_silly(self):
# A silly option which nobody can explain
""" """
class FighterAceExtendedOptions(FighterAceOptions):
"""
Extend the options and zsh metadata provided by FighterAceOptions.
_shellcomp must accumulate options and metadata from all classes in the
hiearchy so this is important to test.
"""
optFlags = [['no-stalls', None,
'Turn off the ability to stall your aircraft']]
optParameters = [['reality-level', None,
'Select the level of physics reality (1-5)', '5']]
compData = Completions(
descriptions={'no-stalls' : 'Can\'t stall your plane'},
optActions={'reality-level' :
Completer(descr='Physics reality level')}
)
def opt_nocrash(self):
"""
Select that you can't crash your plane
"""
def opt_difficulty(self, difficulty):
"""
How tough are you? (1-10)
"""
def _accuracyAction():
# add tick marks just to exercise quoting
return CompleteList(['1', '2', '3'], descr='Accuracy\'`?')
class SimpleProgOptions(usage.Options):
"""
Command-line options for a `Silly` imaginary program
"""
optFlags = [['color', 'c', 'Turn on color output'],
['gray', 'g', 'Turn on gray-scale output'],
['verbose', 'v',
'Verbose logging (may be specified more than once)'],
]
optParameters = [['optimization', None, '5',
'Select the level of optimization (1-5)'],
['accuracy', 'a', '3',
'Select the level of accuracy (1-3)'],
]
compData = Completions(
descriptions={'color' : 'Color on',
'optimization' : 'Optimization level'},
multiUse=['verbose'],
mutuallyExclusive=[['color', 'gray']],
optActions={'optimization' : CompleteList(['1', '2', '3', '4', '5'],
descr='Optimization?'),
'accuracy' : _accuracyAction},
extraActions=[CompleteFiles(descr='output file')]
)
def opt_X(self):
"""
usage.Options does not recognize single-letter opt_ methods
"""
class SimpleProgSub1(usage.Options):
optFlags = [['sub-opt', 's', 'Sub Opt One']]
class SimpleProgSub2(usage.Options):
optFlags = [['sub-opt', 's', 'Sub Opt Two']]
class SimpleProgWithSubcommands(SimpleProgOptions):
optFlags = [['some-option'],
['other-option', 'o']]
optParameters = [['some-param'],
['other-param', 'p'],
['another-param', 'P', 'Yet Another Param']]
subCommands = [ ['sub1', None, SimpleProgSub1, 'Sub Command 1'],
['sub2', None, SimpleProgSub2, 'Sub Command 2']]
testOutput1 = """#compdef silly
_arguments -s -A "-*" \\
':output file (*):_files -g "*"' \\
"(--accuracy)-a[Select the level of accuracy (1-3)]:Accuracy'\`?:(1 2 3)" \\
"(-a)--accuracy=[Select the level of accuracy (1-3)]:Accuracy'\`?:(1 2 3)" \\
'(--color --gray -g)-c[Color on]' \\
'(--gray -c -g)--color[Color on]' \\
'(--color --gray -c)-g[Turn on gray-scale output]' \\
'(--color -c -g)--gray[Turn on gray-scale output]' \\
'--help[Display this help and exit.]' \\
'--optimization=[Optimization level]:Optimization?:(1 2 3 4 5)' \\
'*-v[Verbose logging (may be specified more than once)]' \\
'*--verbose[Verbose logging (may be specified more than once)]' \\
'--version[Display Twisted version and exit.]' \\
&& return 0
"""
# with sub-commands
testOutput2 = """#compdef silly2
_arguments -s -A "-*" \\
'*::subcmd:->subcmd' \\
':output file (*):_files -g "*"' \\
"(--accuracy)-a[Select the level of accuracy (1-3)]:Accuracy'\`?:(1 2 3)" \\
"(-a)--accuracy=[Select the level of accuracy (1-3)]:Accuracy'\`?:(1 2 3)" \\
'(--another-param)-P[another-param]:another-param:_files' \\
'(-P)--another-param=[another-param]:another-param:_files' \\
'(--color --gray -g)-c[Color on]' \\
'(--gray -c -g)--color[Color on]' \\
'(--color --gray -c)-g[Turn on gray-scale output]' \\
'(--color -c -g)--gray[Turn on gray-scale output]' \\
'--help[Display this help and exit.]' \\
'--optimization=[Optimization level]:Optimization?:(1 2 3 4 5)' \\
'(--other-option)-o[other-option]' \\
'(-o)--other-option[other-option]' \\
'(--other-param)-p[other-param]:other-param:_files' \\
'(-p)--other-param=[other-param]:other-param:_files' \\
'--some-option[some-option]' \\
'--some-param=[some-param]:some-param:_files' \\
'*-v[Verbose logging (may be specified more than once)]' \\
'*--verbose[Verbose logging (may be specified more than once)]' \\
'--version[Display Twisted version and exit.]' \\
&& return 0
local _zsh_subcmds_array
_zsh_subcmds_array=(
"sub1:Sub Command 1"
"sub2:Sub Command 2"
)
_describe "sub-command" _zsh_subcmds_array
"""

View file

@ -0,0 +1,151 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from twisted.trial.unittest import TestCase
from twisted.python.failure import Failure
try:
import syslog as stdsyslog
except ImportError:
stdsyslog = None
else:
from twisted.python import syslog
class SyslogObserverTests(TestCase):
"""
Tests for L{SyslogObserver} which sends Twisted log events to the syslog.
"""
events = None
if stdsyslog is None:
skip = "syslog is not supported on this platform"
def setUp(self):
self.patch(syslog.SyslogObserver, 'openlog', self.openlog)
self.patch(syslog.SyslogObserver, 'syslog', self.syslog)
self.observer = syslog.SyslogObserver('SyslogObserverTests')
def openlog(self, prefix, options, facility):
self.logOpened = (prefix, options, facility)
self.events = []
def syslog(self, options, message):
self.events.append((options, message))
def test_emitWithoutMessage(self):
"""
L{SyslogObserver.emit} ignores events with an empty value for the
C{'message'} key.
"""
self.observer.emit({'message': (), 'isError': False, 'system': '-'})
self.assertEqual(self.events, [])
def test_emitCustomPriority(self):
"""
L{SyslogObserver.emit} uses the value of the C{'syslogPriority'} as the
syslog priority, if that key is present in the event dictionary.
"""
self.observer.emit({
'message': ('hello, world',), 'isError': False, 'system': '-',
'syslogPriority': stdsyslog.LOG_DEBUG})
self.assertEqual(
self.events,
[(stdsyslog.LOG_DEBUG, '[-] hello, world')])
def test_emitErrorPriority(self):
"""
L{SyslogObserver.emit} uses C{LOG_ALERT} if the event represents an
error.
"""
self.observer.emit({
'message': ('hello, world',), 'isError': True, 'system': '-',
'failure': Failure(Exception("foo"))})
self.assertEqual(
self.events,
[(stdsyslog.LOG_ALERT, '[-] hello, world')])
def test_emitCustomPriorityOverridesError(self):
"""
L{SyslogObserver.emit} uses the value of the C{'syslogPriority'} key if
it is specified even if the event dictionary represents an error.
"""
self.observer.emit({
'message': ('hello, world',), 'isError': True, 'system': '-',
'syslogPriority': stdsyslog.LOG_NOTICE,
'failure': Failure(Exception("bar"))})
self.assertEqual(
self.events,
[(stdsyslog.LOG_NOTICE, '[-] hello, world')])
def test_emitCustomFacility(self):
"""
L{SyslogObserver.emit} uses the value of the C{'syslogPriority'} as the
syslog priority, if that key is present in the event dictionary.
"""
self.observer.emit({
'message': ('hello, world',), 'isError': False, 'system': '-',
'syslogFacility': stdsyslog.LOG_CRON})
self.assertEqual(
self.events,
[(stdsyslog.LOG_INFO | stdsyslog.LOG_CRON, '[-] hello, world')])
def test_emitCustomSystem(self):
"""
L{SyslogObserver.emit} uses the value of the C{'system'} key to prefix
the logged message.
"""
self.observer.emit({'message': ('hello, world',), 'isError': False,
'system': 'nonDefaultSystem'})
self.assertEqual(
self.events,
[(stdsyslog.LOG_INFO, "[nonDefaultSystem] hello, world")])
def test_emitMessage(self):
"""
L{SyslogObserver.emit} logs the value of the C{'message'} key of the
event dictionary it is passed to the syslog.
"""
self.observer.emit({
'message': ('hello, world',), 'isError': False,
'system': '-'})
self.assertEqual(
self.events,
[(stdsyslog.LOG_INFO, "[-] hello, world")])
def test_emitMultilineMessage(self):
"""
Each line of a multiline message is emitted separately to the syslog.
"""
self.observer.emit({
'message': ('hello,\nworld',), 'isError': False,
'system': '-'})
self.assertEqual(
self.events,
[(stdsyslog.LOG_INFO, '[-] hello,'),
(stdsyslog.LOG_INFO, '[-] \tworld')])
def test_emitStripsTrailingEmptyLines(self):
"""
Trailing empty lines of a multiline message are omitted from the
messages sent to the syslog.
"""
self.observer.emit({
'message': ('hello,\nworld\n\n',), 'isError': False,
'system': '-'})
self.assertEqual(
self.events,
[(stdsyslog.LOG_INFO, '[-] hello,'),
(stdsyslog.LOG_INFO, '[-] \tworld')])

View file

@ -0,0 +1,173 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.systemd}.
"""
import os
from twisted.trial.unittest import TestCase
from twisted.python.systemd import ListenFDs
class InheritedDescriptorsMixin(object):
"""
Mixin for a L{TestCase} subclass which defines test methods for some kind of
systemd sd-daemon class. In particular, it defines tests for a
C{inheritedDescriptors} method.
"""
def test_inheritedDescriptors(self):
"""
C{inheritedDescriptors} returns a list of integers giving the file
descriptors which were inherited from systemd.
"""
sddaemon = self.getDaemon(7, 3)
self.assertEqual([7, 8, 9], sddaemon.inheritedDescriptors())
def test_repeated(self):
"""
Any subsequent calls to C{inheritedDescriptors} return the same list.
"""
sddaemon = self.getDaemon(7, 3)
self.assertEqual(
sddaemon.inheritedDescriptors(),
sddaemon.inheritedDescriptors())
class MemoryOnlyMixin(object):
"""
Mixin for a L{TestCase} subclass which creates creating a fake, in-memory
implementation of C{inheritedDescriptors}. This provides verification that
the fake behaves in a compatible way with the real implementation.
"""
def getDaemon(self, start, count):
"""
Invent C{count} new I{file descriptors} (actually integers, attached to
no real file description), starting at C{start}. Construct and return a
new L{ListenFDs} which will claim those integers represent inherited
file descriptors.
"""
return ListenFDs(range(start, start + count))
class EnvironmentMixin(object):
"""
Mixin for a L{TestCase} subclass which creates a real implementation of
C{inheritedDescriptors} which is based on the environment variables set by
systemd. To facilitate testing, this mixin will also create a fake
environment dictionary and add keys to it to make it look as if some
descriptors have been inherited.
"""
def initializeEnvironment(self, count, pid):
"""
Create a copy of the process environment and add I{LISTEN_FDS} and
I{LISTEN_PID} (the environment variables set by systemd) to it.
"""
result = os.environ.copy()
result['LISTEN_FDS'] = str(count)
result['LISTEN_PID'] = str(pid)
return result
def getDaemon(self, start, count):
"""
Create a new L{ListenFDs} instance, initialized with a fake environment
dictionary which will be set up as systemd would have set it up if
C{count} descriptors were being inherited. The descriptors will also
start at C{start}.
"""
fakeEnvironment = self.initializeEnvironment(count, os.getpid())
return ListenFDs.fromEnvironment(environ=fakeEnvironment, start=start)
class MemoryOnlyTests(MemoryOnlyMixin, InheritedDescriptorsMixin, TestCase):
"""
Apply tests to L{ListenFDs}, explicitly constructed with some fake file
descriptors.
"""
class EnvironmentTests(EnvironmentMixin, InheritedDescriptorsMixin, TestCase):
"""
Apply tests to L{ListenFDs}, constructed based on an environment dictionary.
"""
def test_secondEnvironment(self):
"""
Only a single L{Environment} can extract inherited file descriptors.
"""
fakeEnvironment = self.initializeEnvironment(3, os.getpid())
first = ListenFDs.fromEnvironment(environ=fakeEnvironment)
second = ListenFDs.fromEnvironment(environ=fakeEnvironment)
self.assertEqual(range(3, 6), first.inheritedDescriptors())
self.assertEqual([], second.inheritedDescriptors())
def test_mismatchedPID(self):
"""
If the current process PID does not match the PID in the environment, no
inherited descriptors are reported.
"""
fakeEnvironment = self.initializeEnvironment(3, os.getpid() + 1)
sddaemon = ListenFDs.fromEnvironment(environ=fakeEnvironment)
self.assertEqual([], sddaemon.inheritedDescriptors())
def test_missingPIDVariable(self):
"""
If the I{LISTEN_PID} environment variable is not present, no inherited
descriptors are reported.
"""
fakeEnvironment = self.initializeEnvironment(3, os.getpid())
del fakeEnvironment['LISTEN_PID']
sddaemon = ListenFDs.fromEnvironment(environ=fakeEnvironment)
self.assertEqual([], sddaemon.inheritedDescriptors())
def test_nonIntegerPIDVariable(self):
"""
If the I{LISTEN_PID} environment variable is set to a string that cannot
be parsed as an integer, no inherited descriptors are reported.
"""
fakeEnvironment = self.initializeEnvironment(3, "hello, world")
sddaemon = ListenFDs.fromEnvironment(environ=fakeEnvironment)
self.assertEqual([], sddaemon.inheritedDescriptors())
def test_missingFDSVariable(self):
"""
If the I{LISTEN_FDS} environment variable is not present, no inherited
descriptors are reported.
"""
fakeEnvironment = self.initializeEnvironment(3, os.getpid())
del fakeEnvironment['LISTEN_FDS']
sddaemon = ListenFDs.fromEnvironment(environ=fakeEnvironment)
self.assertEqual([], sddaemon.inheritedDescriptors())
def test_nonIntegerFDSVariable(self):
"""
If the I{LISTEN_FDS} environment variable is set to a string that cannot
be parsed as an integer, no inherited descriptors are reported.
"""
fakeEnvironment = self.initializeEnvironment("hello, world", os.getpid())
sddaemon = ListenFDs.fromEnvironment(environ=fakeEnvironment)
self.assertEqual([], sddaemon.inheritedDescriptors())
def test_defaultEnviron(self):
"""
If the process environment is not explicitly passed to
L{Environment.__init__}, the real process environment dictionary is
used.
"""
self.patch(os, 'environ', {
'LISTEN_PID': str(os.getpid()),
'LISTEN_FDS': '5'})
sddaemon = ListenFDs.fromEnvironment()
self.assertEqual(range(3, 3 + 5), sddaemon.inheritedDescriptors())

View file

@ -0,0 +1,27 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.textattributes}.
"""
from twisted.trial import unittest
from twisted.python._textattributes import DefaultFormattingState
class DefaultFormattingStateTests(unittest.TestCase):
"""
Tests for L{twisted.python._textattributes.DefaultFormattingState}.
"""
def test_equality(self):
"""
L{DefaultFormattingState}s are always equal to other
L{DefaultFormattingState}s.
"""
self.assertEqual(
DefaultFormattingState(),
DefaultFormattingState())
self.assertNotEquals(
DefaultFormattingState(),
'hello')

View file

@ -0,0 +1,44 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.urlpath}.
"""
from twisted.trial import unittest
from twisted.python import urlpath
class URLPathTestCase(unittest.TestCase):
def setUp(self):
self.path = urlpath.URLPath.fromString("http://example.com/foo/bar?yes=no&no=yes#footer")
def testStringConversion(self):
self.assertEqual(str(self.path), "http://example.com/foo/bar?yes=no&no=yes#footer")
def testChildString(self):
self.assertEqual(str(self.path.child('hello')), "http://example.com/foo/bar/hello")
self.assertEqual(str(self.path.child('hello').child('')), "http://example.com/foo/bar/hello/")
def testSiblingString(self):
self.assertEqual(str(self.path.sibling('baz')), 'http://example.com/foo/baz')
# The sibling of http://example.com/foo/bar/
# is http://example.comf/foo/bar/baz
# because really we are constructing a sibling of
# http://example.com/foo/bar/index.html
self.assertEqual(str(self.path.child('').sibling('baz')), 'http://example.com/foo/bar/baz')
def testParentString(self):
# parent should be equivalent to '..'
# 'foo' is the current directory, '/' is the parent directory
self.assertEqual(str(self.path.parent()), 'http://example.com/')
self.assertEqual(str(self.path.child('').parent()), 'http://example.com/foo/')
self.assertEqual(str(self.path.child('baz').parent()), 'http://example.com/foo/')
self.assertEqual(str(self.path.parent().parent().parent().parent().parent()), 'http://example.com/')
def testHereString(self):
# here should be equivalent to '.'
self.assertEqual(str(self.path.here()), 'http://example.com/foo/')
self.assertEqual(str(self.path.child('').here()), 'http://example.com/foo/bar/')

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,361 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.versions}.
"""
from __future__ import division, absolute_import
import sys
import operator
from io import BytesIO
from twisted.python.versions import getVersionString, IncomparableVersions
from twisted.python.versions import Version, _inf
from twisted.python.filepath import FilePath
from twisted.trial.unittest import SynchronousTestCase as TestCase
VERSION_4_ENTRIES = b"""\
<?xml version="1.0" encoding="utf-8"?>
<wc-entries
xmlns="svn:">
<entry
committed-rev="18210"
name=""
committed-date="2006-09-21T04:43:09.542953Z"
url="svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk/twisted"
last-author="exarkun"
kind="dir"
uuid="bbbe8e31-12d6-0310-92fd-ac37d47ddeeb"
repos="svn+ssh://svn.twistedmatrix.com/svn/Twisted"
revision="18211"/>
</wc-entries>
"""
VERSION_8_ENTRIES = b"""\
8
dir
22715
svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk
"""
VERSION_9_ENTRIES = b"""\
9
dir
22715
svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk
"""
VERSION_10_ENTRIES = b"""\
10
dir
22715
svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk
"""
class VersionsTest(TestCase):
def test_versionComparison(self):
"""
Versions can be compared for equality and order.
"""
va = Version("dummy", 1, 0, 0)
vb = Version("dummy", 0, 1, 0)
self.assertTrue(va > vb)
self.assertTrue(vb < va)
self.assertTrue(va >= vb)
self.assertTrue(vb <= va)
self.assertTrue(va != vb)
self.assertTrue(vb == Version("dummy", 0, 1, 0))
self.assertTrue(vb == vb)
def test_comparingPrereleasesWithReleases(self):
"""
Prereleases are always less than versions without prereleases.
"""
va = Version("whatever", 1, 0, 0, prerelease=1)
vb = Version("whatever", 1, 0, 0)
self.assertTrue(va < vb)
self.assertFalse(va > vb)
self.assertNotEquals(vb, va)
def test_comparingPrereleases(self):
"""
The value specified as the prerelease is used in version comparisons.
"""
va = Version("whatever", 1, 0, 0, prerelease=1)
vb = Version("whatever", 1, 0, 0, prerelease=2)
self.assertTrue(va < vb)
self.assertTrue(vb > va)
self.assertTrue(va <= vb)
self.assertTrue(vb >= va)
self.assertTrue(va != vb)
self.assertTrue(vb == Version("whatever", 1, 0, 0, prerelease=2))
self.assertTrue(va == va)
def test_infComparison(self):
"""
L{_inf} is equal to L{_inf}.
This is a regression test.
"""
self.assertEqual(_inf, _inf)
def test_disallowBuggyComparisons(self):
"""
The package names of the Version objects need to be the same,
"""
self.assertRaises(IncomparableVersions,
operator.eq,
Version("dummy", 1, 0, 0),
Version("dumym", 1, 0, 0))
def test_notImplementedComparisons(self):
"""
Comparing a L{Version} to some other object type results in
C{NotImplemented}.
"""
va = Version("dummy", 1, 0, 0)
vb = ("dummy", 1, 0, 0) # a tuple is not a Version object
self.assertEqual(va.__cmp__(vb), NotImplemented)
def test_repr(self):
"""
Calling C{repr} on a version returns a human-readable string
representation of the version.
"""
self.assertEqual(repr(Version("dummy", 1, 2, 3)),
"Version('dummy', 1, 2, 3)")
def test_reprWithPrerelease(self):
"""
Calling C{repr} on a version with a prerelease returns a human-readable
string representation of the version including the prerelease.
"""
self.assertEqual(repr(Version("dummy", 1, 2, 3, prerelease=4)),
"Version('dummy', 1, 2, 3, prerelease=4)")
def test_str(self):
"""
Calling C{str} on a version returns a human-readable string
representation of the version.
"""
self.assertEqual(str(Version("dummy", 1, 2, 3)),
"[dummy, version 1.2.3]")
def test_strWithPrerelease(self):
"""
Calling C{str} on a version with a prerelease includes the prerelease.
"""
self.assertEqual(str(Version("dummy", 1, 0, 0, prerelease=1)),
"[dummy, version 1.0.0pre1]")
def testShort(self):
self.assertEqual(Version('dummy', 1, 2, 3).short(), '1.2.3')
def test_goodSVNEntries_4(self):
"""
Version should be able to parse an SVN format 4 entries file.
"""
version = Version("dummy", 1, 0, 0)
self.assertEqual(
version._parseSVNEntries_4(BytesIO(VERSION_4_ENTRIES)), b'18211')
def test_goodSVNEntries_8(self):
"""
Version should be able to parse an SVN format 8 entries file.
"""
version = Version("dummy", 1, 0, 0)
self.assertEqual(
version._parseSVNEntries_8(BytesIO(VERSION_8_ENTRIES)), b'22715')
def test_goodSVNEntries_9(self):
"""
Version should be able to parse an SVN format 9 entries file.
"""
version = Version("dummy", 1, 0, 0)
self.assertEqual(
version._parseSVNEntries_9(BytesIO(VERSION_9_ENTRIES)), b'22715')
def test_goodSVNEntriesTenPlus(self):
"""
Version should be able to parse an SVN format 10 entries file.
"""
version = Version("dummy", 1, 0, 0)
self.assertEqual(
version._parseSVNEntriesTenPlus(BytesIO(VERSION_10_ENTRIES)), b'22715')
def test_getVersionString(self):
"""
L{getVersionString} returns a string with the package name and the
short version number.
"""
self.assertEqual(
'Twisted 8.0.0', getVersionString(Version('Twisted', 8, 0, 0)))
def test_getVersionStringWithPrerelease(self):
"""
L{getVersionString} includes the prerelease, if any.
"""
self.assertEqual(
getVersionString(Version("whatever", 8, 0, 0, prerelease=1)),
"whatever 8.0.0pre1")
def test_base(self):
"""
The L{base} method returns a very simple representation of the version.
"""
self.assertEqual(Version("foo", 1, 0, 0).base(), "1.0.0")
def test_baseWithPrerelease(self):
"""
The base version includes 'preX' for versions with prereleases.
"""
self.assertEqual(Version("foo", 1, 0, 0, prerelease=8).base(),
"1.0.0pre8")
class FormatDiscoveryTests(TestCase):
"""
Tests which discover the parsing method based on the imported module name.
"""
def mktemp(self):
return TestCase.mktemp(self).encode("utf-8")
def setUp(self):
"""
Create a temporary directory with a package structure in it.
"""
self.entry = FilePath(self.mktemp())
self.preTestModules = sys.modules.copy()
sys.path.append(self.entry.path.decode('utf-8'))
pkg = self.entry.child(b"twisted_python_versions_package")
pkg.makedirs()
pkg.child(b"__init__.py").setContent(
b"from twisted.python.versions import Version\n"
b"version = Version('twisted_python_versions_package', 1, 0, 0)\n")
self.svnEntries = pkg.child(b".svn")
self.svnEntries.makedirs()
def tearDown(self):
"""
Remove the imported modules and sys.path modifications.
"""
sys.modules.clear()
sys.modules.update(self.preTestModules)
sys.path.remove(self.entry.path.decode('utf-8'))
def checkSVNFormat(self, formatVersion, entriesText, expectedRevision):
"""
Check for the given revision being detected after setting the SVN
entries text and format version of the test directory structure.
"""
self.svnEntries.child(b"format").setContent(formatVersion + b"\n")
self.svnEntries.child(b"entries").setContent(entriesText)
self.assertEqual(self.getVersion()._getSVNVersion(), expectedRevision)
def getVersion(self):
"""
Import and retrieve the Version object from our dynamically created
package.
"""
import twisted_python_versions_package
return twisted_python_versions_package.version
def test_detectVersion4(self):
"""
Verify that version 4 format file will be properly detected and parsed.
"""
self.checkSVNFormat(b"4", VERSION_4_ENTRIES, b'18211')
def test_detectVersion8(self):
"""
Verify that version 8 format files will be properly detected and
parsed.
"""
self.checkSVNFormat(b"8", VERSION_8_ENTRIES, b'22715')
def test_detectVersion9(self):
"""
Verify that version 9 format files will be properly detected and
parsed.
"""
self.checkSVNFormat(b"9", VERSION_9_ENTRIES, b'22715')
def test_unparseableEntries(self):
"""
Verify that the result is C{b"Unknown"} for an apparently supported
version for which parsing of the entries file fails.
"""
self.checkSVNFormat(b"4", b"some unsupported stuff", b"Unknown")
def test_detectVersion10(self):
"""
Verify that version 10 format files will be properly detected and
parsed.
Differing from previous formats, the version 10 format lacks a
I{format} file and B{only} has the version information on the first
line of the I{entries} file.
"""
self.svnEntries.child(b"entries").setContent(VERSION_10_ENTRIES)
self.assertEqual(self.getVersion()._getSVNVersion(), b'22715')
def test_detectUnknownVersion(self):
"""
Verify that a new version of SVN will result in the revision 'Unknown'.
"""
self.checkSVNFormat(b"some-random-new-version", b"ooga booga!", b'Unknown')
def test_getVersionStringWithRevision(self):
"""
L{getVersionString} includes the discovered revision number.
"""
self.svnEntries.child(b"format").setContent(b"9\n")
self.svnEntries.child(b"entries").setContent(VERSION_10_ENTRIES)
version = getVersionString(self.getVersion())
self.assertEqual(
"twisted_python_versions_package 1.0.0+r22715",
version)
self.assertTrue(isinstance(version, type("")))

View file

@ -0,0 +1,70 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.win32}.
"""
from twisted.trial import unittest
from twisted.python.runtime import platform
from twisted.python import win32
class CommandLineQuotingTests(unittest.TestCase):
"""
Tests for L{cmdLineQuote}.
"""
def test_argWithoutSpaces(self):
"""
Calling C{cmdLineQuote} with an argument with no spaces should
return the argument unchanged.
"""
self.assertEqual(win32.cmdLineQuote('an_argument'), 'an_argument')
def test_argWithSpaces(self):
"""
Calling C{cmdLineQuote} with an argument containing spaces should
return the argument surrounded by quotes.
"""
self.assertEqual(win32.cmdLineQuote('An Argument'), '"An Argument"')
def test_emptyStringArg(self):
"""
Calling C{cmdLineQuote} with an empty string should return a
quoted empty string.
"""
self.assertEqual(win32.cmdLineQuote(''), '""')
class ProgramPathsTests(unittest.TestCase):
"""
Tests for L{getProgramsMenuPath} and L{getProgramFilesPath}.
"""
def test_getProgramsMenuPath(self):
"""
L{getProgramsMenuPath} guesses the programs menu path on non-win32
platforms. On non-win32 it will try to figure out the path by
examining the registry.
"""
if not platform.isWindows():
self.assertEqual(win32.getProgramsMenuPath(),
"C:\\Windows\\Start Menu\\Programs")
else:
self.assertIsInstance(win32.getProgramsMenuPath(), str)
def test_getProgramFilesPath(self):
"""
L{getProgramFilesPath} returns the "program files" path on win32.
"""
self.assertIsInstance(win32.getProgramFilesPath(), str)
if not platform.isWindows():
test_getProgramFilesPath.skip = (
"Cannot figure out the program files path on non-win32 platform")

View file

@ -0,0 +1,101 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases covering L{twisted.python.zippath}.
"""
import os, zipfile
from twisted.test.test_paths import AbstractFilePathTestCase
from twisted.python.zippath import ZipArchive
def zipit(dirname, zfname):
"""
Create a zipfile on zfname, containing the contents of dirname'
"""
zf = zipfile.ZipFile(zfname, "w")
for root, ignored, files, in os.walk(dirname):
for fname in files:
fspath = os.path.join(root, fname)
arcpath = os.path.join(root, fname)[len(dirname)+1:]
# print fspath, '=>', arcpath
zf.write(fspath, arcpath)
zf.close()
class ZipFilePathTestCase(AbstractFilePathTestCase):
"""
Test various L{ZipPath} path manipulations as well as reprs for L{ZipPath}
and L{ZipArchive}.
"""
def setUp(self):
AbstractFilePathTestCase.setUp(self)
zipit(self.cmn, self.cmn + '.zip')
self.path = ZipArchive(self.cmn + '.zip')
self.root = self.path
self.all = [x.replace(self.cmn, self.cmn + '.zip') for x in self.all]
def test_zipPathRepr(self):
"""
Make sure that invoking ZipPath's repr prints the correct class name
and an absolute path to the zip file.
"""
child = self.path.child("foo")
pathRepr = "ZipPath(%r)" % (
os.path.abspath(self.cmn + ".zip" + os.sep + 'foo'),)
# Check for an absolute path
self.assertEqual(repr(child), pathRepr)
# Create a path to the file rooted in the current working directory
relativeCommon = self.cmn.replace(os.getcwd() + os.sep, "", 1) + ".zip"
relpath = ZipArchive(relativeCommon)
child = relpath.child("foo")
# Check using a path without the cwd prepended
self.assertEqual(repr(child), pathRepr)
def test_zipPathReprParentDirSegment(self):
"""
The repr of a ZipPath with C{".."} in the internal part of its path
includes the C{".."} rather than applying the usual parent directory
meaning.
"""
child = self.path.child("foo").child("..").child("bar")
pathRepr = "ZipPath(%r)" % (
self.cmn + ".zip" + os.sep.join(["", "foo", "..", "bar"]))
self.assertEqual(repr(child), pathRepr)
def test_zipPathReprEscaping(self):
"""
Bytes in the ZipPath path which have special meaning in Python
string literals are escaped in the ZipPath repr.
"""
child = self.path.child("'")
path = self.cmn + ".zip" + os.sep.join(["", "'"])
pathRepr = "ZipPath('%s')" % (path.encode('string-escape'),)
self.assertEqual(repr(child), pathRepr)
def test_zipArchiveRepr(self):
"""
Make sure that invoking ZipArchive's repr prints the correct class
name and an absolute path to the zip file.
"""
pathRepr = 'ZipArchive(%r)' % (os.path.abspath(self.cmn + '.zip'),)
# Check for an absolute path
self.assertEqual(repr(self.path), pathRepr)
# Create a path to the file rooted in the current working directory
relativeCommon = self.cmn.replace(os.getcwd() + os.sep, "", 1) + ".zip"
relpath = ZipArchive(relativeCommon)
# Check using a path without the cwd prepended
self.assertEqual(repr(relpath), pathRepr)

View file

@ -0,0 +1,355 @@
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.zipstream}
"""
import random
import zipfile
from hashlib import md5
from twisted.python import zipstream, filepath
from twisted.trial import unittest
class FileEntryMixin:
"""
File entry classes should behave as file-like objects
"""
def getFileEntry(self, contents):
"""
Return an appropriate zip file entry
"""
filename = self.mktemp()
z = zipfile.ZipFile(filename, 'w', self.compression)
z.writestr('content', contents)
z.close()
z = zipstream.ChunkingZipFile(filename, 'r')
return z.readfile('content')
def test_isatty(self):
"""
zip files should not be ttys, so isatty() should be false
"""
self.assertEqual(self.getFileEntry('').isatty(), False)
def test_closed(self):
"""
The C{closed} attribute should reflect whether C{close()} has been
called.
"""
fileEntry = self.getFileEntry('')
self.assertEqual(fileEntry.closed, False)
fileEntry.close()
self.assertEqual(fileEntry.closed, True)
def test_readline(self):
"""
C{readline()} should mirror L{file.readline} and return up to a single
deliminter.
"""
fileEntry = self.getFileEntry('hoho\nho')
self.assertEqual(fileEntry.readline(), 'hoho\n')
self.assertEqual(fileEntry.readline(), 'ho')
self.assertEqual(fileEntry.readline(), '')
def test_next(self):
"""
Zip file entries should implement the iterator protocol as files do.
"""
fileEntry = self.getFileEntry('ho\nhoho')
self.assertEqual(fileEntry.next(), 'ho\n')
self.assertEqual(fileEntry.next(), 'hoho')
self.assertRaises(StopIteration, fileEntry.next)
def test_readlines(self):
"""
C{readlines()} should return a list of all the lines.
"""
fileEntry = self.getFileEntry('ho\nho\nho')
self.assertEqual(fileEntry.readlines(), ['ho\n', 'ho\n', 'ho'])
def test_iteration(self):
"""
C{__iter__()} and C{xreadlines()} should return C{self}.
"""
fileEntry = self.getFileEntry('')
self.assertIdentical(iter(fileEntry), fileEntry)
self.assertIdentical(fileEntry.xreadlines(), fileEntry)
def test_readWhole(self):
"""
C{.read()} should read the entire file.
"""
contents = "Hello, world!"
entry = self.getFileEntry(contents)
self.assertEqual(entry.read(), contents)
def test_readPartial(self):
"""
C{.read(num)} should read num bytes from the file.
"""
contents = "0123456789"
entry = self.getFileEntry(contents)
one = entry.read(4)
two = entry.read(200)
self.assertEqual(one, "0123")
self.assertEqual(two, "456789")
def test_tell(self):
"""
C{.tell()} should return the number of bytes that have been read so
far.
"""
contents = "x" * 100
entry = self.getFileEntry(contents)
entry.read(2)
self.assertEqual(entry.tell(), 2)
entry.read(4)
self.assertEqual(entry.tell(), 6)
class DeflatedZipFileEntryTest(FileEntryMixin, unittest.TestCase):
"""
DeflatedZipFileEntry should be file-like
"""
compression = zipfile.ZIP_DEFLATED
class ZipFileEntryTest(FileEntryMixin, unittest.TestCase):
"""
ZipFileEntry should be file-like
"""
compression = zipfile.ZIP_STORED
class ZipstreamTest(unittest.TestCase):
"""
Tests for twisted.python.zipstream
"""
def setUp(self):
"""
Creates junk data that can be compressed and a test directory for any
files that will be created
"""
self.testdir = filepath.FilePath(self.mktemp())
self.testdir.makedirs()
self.unzipdir = self.testdir.child('unzipped')
self.unzipdir.makedirs()
def makeZipFile(self, contents, directory=''):
"""
Makes a zip file archive containing len(contents) files. Contents
should be a list of strings, each string being the content of one file.
"""
zpfilename = self.testdir.child('zipfile.zip').path
zpfile = zipfile.ZipFile(zpfilename, 'w')
for i, content in enumerate(contents):
filename = str(i)
if directory:
filename = directory + "/" + filename
zpfile.writestr(filename, content)
zpfile.close()
return zpfilename
def test_invalidMode(self):
"""
A ChunkingZipFile opened in write-mode should not allow .readfile(),
and raise a RuntimeError instead.
"""
czf = zipstream.ChunkingZipFile(self.mktemp(), "w")
self.assertRaises(RuntimeError, czf.readfile, "something")
def test_closedArchive(self):
"""
A closed ChunkingZipFile should raise a L{RuntimeError} when
.readfile() is invoked.
"""
czf = zipstream.ChunkingZipFile(self.makeZipFile(["something"]), "r")
czf.close()
self.assertRaises(RuntimeError, czf.readfile, "something")
def test_invalidHeader(self):
"""
A zipfile entry with the wrong magic number should raise BadZipfile for
readfile(), but that should not affect other files in the archive.
"""
fn = self.makeZipFile(["test contents",
"more contents"])
zf = zipfile.ZipFile(fn, "r")
zeroOffset = zf.getinfo("0").header_offset
zf.close()
# Zero out just the one header.
scribble = file(fn, "r+b")
scribble.seek(zeroOffset, 0)
scribble.write(chr(0) * 4)
scribble.close()
czf = zipstream.ChunkingZipFile(fn)
self.assertRaises(zipfile.BadZipfile, czf.readfile, "0")
self.assertEqual(czf.readfile("1").read(), "more contents")
def test_filenameMismatch(self):
"""
A zipfile entry with a different filename than is found in the central
directory should raise BadZipfile.
"""
fn = self.makeZipFile(["test contents",
"more contents"])
zf = zipfile.ZipFile(fn, "r")
info = zf.getinfo("0")
info.filename = "not zero"
zf.close()
scribble = file(fn, "r+b")
scribble.seek(info.header_offset, 0)
scribble.write(info.FileHeader())
scribble.close()
czf = zipstream.ChunkingZipFile(fn)
self.assertRaises(zipfile.BadZipfile, czf.readfile, "0")
self.assertEqual(czf.readfile("1").read(), "more contents")
def test_unsupportedCompression(self):
"""
A zipfile which describes an unsupported compression mechanism should
raise BadZipfile.
"""
fn = self.mktemp()
zf = zipfile.ZipFile(fn, "w")
zi = zipfile.ZipInfo("0")
zf.writestr(zi, "some data")
# Mangle its compression type in the central directory; can't do this
# before the writestr call or zipfile will (correctly) tell us not to
# pass bad compression types :)
zi.compress_type = 1234
zf.close()
czf = zipstream.ChunkingZipFile(fn)
self.assertRaises(zipfile.BadZipfile, czf.readfile, "0")
def test_extraData(self):
"""
readfile() should skip over 'extra' data present in the zip metadata.
"""
fn = self.mktemp()
zf = zipfile.ZipFile(fn, 'w')
zi = zipfile.ZipInfo("0")
zi.extra = "hello, extra"
zf.writestr(zi, "the real data")
zf.close()
czf = zipstream.ChunkingZipFile(fn)
self.assertEqual(czf.readfile("0").read(), "the real data")
def test_unzipIterChunky(self):
"""
L{twisted.python.zipstream.unzipIterChunky} returns an iterator which
must be exhausted to completely unzip the input archive.
"""
numfiles = 10
contents = ['This is test file %d!' % i for i in range(numfiles)]
zpfilename = self.makeZipFile(contents)
list(zipstream.unzipIterChunky(zpfilename, self.unzipdir.path))
self.assertEqual(
set(self.unzipdir.listdir()),
set(map(str, range(numfiles))))
for child in self.unzipdir.children():
num = int(child.basename())
self.assertEqual(child.getContent(), contents[num])
def test_unzipIterChunkyDirectory(self):
"""
The path to which a file is extracted by L{zipstream.unzipIterChunky}
is determined by joining the C{directory} argument to C{unzip} with the
path within the archive of the file being extracted.
"""
numfiles = 10
contents = ['This is test file %d!' % i for i in range(numfiles)]
zpfilename = self.makeZipFile(contents, 'foo')
list(zipstream.unzipIterChunky(zpfilename, self.unzipdir.path))
self.assertEqual(
set(self.unzipdir.child('foo').listdir()),
set(map(str, range(numfiles))))
for child in self.unzipdir.child('foo').children():
num = int(child.basename())
self.assertEqual(child.getContent(), contents[num])
# XXX these tests are kind of gross and old, but I think unzipIterChunky is
# kind of a gross function anyway. We should really write an abstract
# copyTo/moveTo that operates on FilePath and make sure ZipPath can support
# it, then just deprecate / remove this stuff.
def _unzipIterChunkyTest(self, compression, chunksize, lower, upper):
"""
unzipIterChunky should unzip the given number of bytes per iteration.
"""
junk = ' '.join([str(random.random()) for n in xrange(1000)])
junkmd5 = md5(junk).hexdigest()
tempdir = filepath.FilePath(self.mktemp())
tempdir.makedirs()
zfpath = tempdir.child('bigfile.zip').path
self._makebigfile(zfpath, compression, junk)
uziter = zipstream.unzipIterChunky(zfpath, tempdir.path,
chunksize=chunksize)
r = uziter.next()
# test that the number of chunks is in the right ballpark;
# this could theoretically be any number but statistically it
# should always be in this range
approx = lower < r < upper
self.failUnless(approx)
for r in uziter:
pass
self.assertEqual(r, 0)
newmd5 = md5(
tempdir.child("zipstreamjunk").open().read()).hexdigest()
self.assertEqual(newmd5, junkmd5)
def test_unzipIterChunkyStored(self):
"""
unzipIterChunky should unzip the given number of bytes per iteration on
a stored archive.
"""
self._unzipIterChunkyTest(zipfile.ZIP_STORED, 500, 35, 45)
def test_chunkyDeflated(self):
"""
unzipIterChunky should unzip the given number of bytes per iteration on
a deflated archive.
"""
self._unzipIterChunkyTest(zipfile.ZIP_DEFLATED, 972, 23, 27)
def _makebigfile(self, filename, compression, junk):
"""
Create a zip file with the given file name and compression scheme.
"""
zf = zipfile.ZipFile(filename, 'w', compression)
for i in range(10):
fn = 'zipstream%d' % i
zf.writestr(fn, "")
zf.writestr('zipstreamjunk', junk)
zf.close()

View file

@ -0,0 +1,208 @@
# -*- test-case-name: twisted.test.test_text -*-
#
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Miscellany of text-munging functions.
"""
def stringyString(object, indentation=''):
"""
Expansive string formatting for sequence types.
C{list.__str__} and C{dict.__str__} use C{repr()} to display their
elements. This function also turns these sequence types
into strings, but uses C{str()} on their elements instead.
Sequence elements are also displayed on seperate lines, and nested
sequences have nested indentation.
"""
braces = ''
sl = []
if type(object) is dict:
braces = '{}'
for key, value in object.items():
value = stringyString(value, indentation + ' ')
if isMultiline(value):
if endsInNewline(value):
value = value[:-len('\n')]
sl.append("%s %s:\n%s" % (indentation, key, value))
else:
# Oops. Will have to move that indentation.
sl.append("%s %s: %s" % (indentation, key,
value[len(indentation) + 3:]))
elif type(object) is tuple or type(object) is list:
if type(object) is tuple:
braces = '()'
else:
braces = '[]'
for element in object:
element = stringyString(element, indentation + ' ')
sl.append(element.rstrip() + ',')
else:
sl[:] = map(lambda s, i=indentation: i + s,
str(object).split('\n'))
if not sl:
sl.append(indentation)
if braces:
sl[0] = indentation + braces[0] + sl[0][len(indentation) + 1:]
sl[-1] = sl[-1] + braces[-1]
s = "\n".join(sl)
if isMultiline(s) and not endsInNewline(s):
s = s + '\n'
return s
def isMultiline(s):
"""
Returns C{True} if this string has a newline in it.
"""
return (s.find('\n') != -1)
def endsInNewline(s):
"""
Returns C{True} if this string ends in a newline.
"""
return (s[-len('\n'):] == '\n')
def greedyWrap(inString, width=80):
"""
Given a string and a column width, return a list of lines.
Caveat: I'm use a stupid greedy word-wrapping
algorythm. I won't put two spaces at the end
of a sentence. I don't do full justification.
And no, I've never even *heard* of hypenation.
"""
outLines = []
#eww, evil hacks to allow paragraphs delimited by two \ns :(
if inString.find('\n\n') >= 0:
paragraphs = inString.split('\n\n')
for para in paragraphs:
outLines.extend(greedyWrap(para, width) + [''])
return outLines
inWords = inString.split()
column = 0
ptr_line = 0
while inWords:
column = column + len(inWords[ptr_line])
ptr_line = ptr_line + 1
if (column > width):
if ptr_line == 1:
# This single word is too long, it will be the whole line.
pass
else:
# We've gone too far, stop the line one word back.
ptr_line = ptr_line - 1
(l, inWords) = (inWords[0:ptr_line], inWords[ptr_line:])
outLines.append(' '.join(l))
ptr_line = 0
column = 0
elif not (len(inWords) > ptr_line):
# Clean up the last bit.
outLines.append(' '.join(inWords))
del inWords[:]
else:
# Space
column = column + 1
# next word
return outLines
wordWrap = greedyWrap
def removeLeadingBlanks(lines):
ret = []
for line in lines:
if ret or line.strip():
ret.append(line)
return ret
def removeLeadingTrailingBlanks(s):
lines = removeLeadingBlanks(s.split('\n'))
lines.reverse()
lines = removeLeadingBlanks(lines)
lines.reverse()
return '\n'.join(lines)+'\n'
def splitQuoted(s):
"""
Like a string split, but don't break substrings inside quotes.
>>> splitQuoted('the "hairy monkey" likes pie')
['the', 'hairy monkey', 'likes', 'pie']
Another one of those "someone must have a better solution for
this" things. This implementation is a VERY DUMB hack done too
quickly.
"""
out = []
quot = None
phrase = None
for word in s.split():
if phrase is None:
if word and (word[0] in ("\"", "'")):
quot = word[0]
word = word[1:]
phrase = []
if phrase is None:
out.append(word)
else:
if word and (word[-1] == quot):
word = word[:-1]
phrase.append(word)
out.append(" ".join(phrase))
phrase = None
else:
phrase.append(word)
return out
def strFile(p, f, caseSensitive=True):
"""
Find whether string C{p} occurs in a read()able object C{f}.
@rtype: C{bool}
"""
buf = ""
buf_len = max(len(p), 2**2**2**2)
if not caseSensitive:
p = p.lower()
while 1:
r = f.read(buf_len-len(p))
if not caseSensitive:
r = r.lower()
bytes_read = len(r)
if bytes_read == 0:
return False
l = len(buf)+bytes_read-buf_len
if l <= 0:
buf = buf + r
else:
buf = buf[l:] + r
if buf.find(p) != -1:
return True

View file

@ -0,0 +1,139 @@
# -*- test-case-name: twisted.python.test_threadable -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A module to provide some very basic threading primitives, such as
synchronization.
"""
from __future__ import division, absolute_import
from functools import wraps
class DummyLock(object):
"""
Hack to allow locks to be unpickled on an unthreaded system.
"""
def __reduce__(self):
return (unpickle_lock, ())
def unpickle_lock():
if threadingmodule is not None:
return XLock()
else:
return DummyLock()
unpickle_lock.__safe_for_unpickling__ = True
def _synchPre(self):
if '_threadable_lock' not in self.__dict__:
_synchLockCreator.acquire()
if '_threadable_lock' not in self.__dict__:
self.__dict__['_threadable_lock'] = XLock()
_synchLockCreator.release()
self._threadable_lock.acquire()
def _synchPost(self):
self._threadable_lock.release()
def _sync(klass, function):
@wraps(function)
def sync(self, *args, **kwargs):
_synchPre(self)
try:
return function(self, *args, **kwargs)
finally:
_synchPost(self)
return sync
def synchronize(*klasses):
"""
Make all methods listed in each class' synchronized attribute synchronized.
The synchronized attribute should be a list of strings, consisting of the
names of methods that must be synchronized. If we are running in threaded
mode these methods will be wrapped with a lock.
"""
if threadingmodule is not None:
for klass in klasses:
for methodName in klass.synchronized:
sync = _sync(klass, klass.__dict__[methodName])
setattr(klass, methodName, sync)
def init(with_threads=1):
"""Initialize threading.
Don't bother calling this. If it needs to happen, it will happen.
"""
global threaded, _synchLockCreator, XLock
if with_threads:
if not threaded:
if threadingmodule is not None:
threaded = True
class XLock(threadingmodule._RLock, object):
def __reduce__(self):
return (unpickle_lock, ())
_synchLockCreator = XLock()
else:
raise RuntimeError("Cannot initialize threading, platform lacks thread support")
else:
if threaded:
raise RuntimeError("Cannot uninitialize threads")
else:
pass
_dummyID = object()
def getThreadID():
if threadingmodule is None:
return _dummyID
return threadingmodule.currentThread().ident
def isInIOThread():
"""Are we in the thread responsable for I/O requests (the event loop)?
"""
return ioThread == getThreadID()
def registerAsIOThread():
"""Mark the current thread as responsable for I/O requests.
"""
global ioThread
ioThread = getThreadID()
ioThread = None
threaded = False
try:
import threading as threadingmodule
except ImportError:
threadingmodule = None
else:
init(True)
__all__ = ['isInIOThread', 'registerAsIOThread', 'getThreadID', 'XLock']

View file

@ -0,0 +1,267 @@
# -*- test-case-name: twisted.test.test_threadpool -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
twisted.python.threadpool: a pool of threads to which we dispatch tasks.
In most cases you can just use C{reactor.callInThread} and friends
instead of creating a thread pool directly.
"""
from __future__ import division, absolute_import
try:
from Queue import Queue
except ImportError:
from queue import Queue
import contextlib
import threading
import copy
from twisted.python import log, context, failure
WorkerStop = object()
class ThreadPool:
"""
This class (hopefully) generalizes the functionality of a pool of
threads to which work can be dispatched.
L{callInThread} and L{stop} should only be called from
a single thread, unless you make a subclass where L{stop} and
L{_startSomeWorkers} are synchronized.
@ivar started: Whether or not the thread pool is currently running.
@type started: L{bool}
@ivar threads: List of workers currently running in this thread pool.
@type threads: L{list}
"""
min = 5
max = 20
joined = False
started = False
workers = 0
name = None
threadFactory = threading.Thread
currentThread = staticmethod(threading.currentThread)
def __init__(self, minthreads=5, maxthreads=20, name=None):
"""
Create a new threadpool.
@param minthreads: minimum number of threads in the pool
@param maxthreads: maximum number of threads in the pool
"""
assert minthreads >= 0, 'minimum is negative'
assert minthreads <= maxthreads, 'minimum is greater than maximum'
self.q = Queue(0)
self.min = minthreads
self.max = maxthreads
self.name = name
self.waiters = []
self.threads = []
self.working = []
def start(self):
"""
Start the threadpool.
"""
self.joined = False
self.started = True
# Start some threads.
self.adjustPoolsize()
def startAWorker(self):
self.workers += 1
name = "PoolThread-%s-%s" % (self.name or id(self), self.workers)
newThread = self.threadFactory(target=self._worker, name=name)
self.threads.append(newThread)
newThread.start()
def stopAWorker(self):
self.q.put(WorkerStop)
self.workers -= 1
def __setstate__(self, state):
self.__dict__ = state
ThreadPool.__init__(self, self.min, self.max)
def __getstate__(self):
state = {}
state['min'] = self.min
state['max'] = self.max
return state
def _startSomeWorkers(self):
neededSize = self.q.qsize() + len(self.working)
# Create enough, but not too many
while self.workers < min(self.max, neededSize):
self.startAWorker()
def callInThread(self, func, *args, **kw):
"""
Call a callable object in a separate thread.
@param func: callable object to be called in separate thread
@param *args: positional arguments to be passed to C{func}
@param **kw: keyword args to be passed to C{func}
"""
self.callInThreadWithCallback(None, func, *args, **kw)
def callInThreadWithCallback(self, onResult, func, *args, **kw):
"""
Call a callable object in a separate thread and call C{onResult}
with the return value, or a L{twisted.python.failure.Failure}
if the callable raises an exception.
The callable is allowed to block, but the C{onResult} function
must not block and should perform as little work as possible.
A typical action for C{onResult} for a threadpool used with a
Twisted reactor would be to schedule a
L{twisted.internet.defer.Deferred} to fire in the main
reactor thread using C{.callFromThread}. Note that C{onResult}
is called inside the separate thread, not inside the reactor thread.
@param onResult: a callable with the signature C{(success, result)}.
If the callable returns normally, C{onResult} is called with
C{(True, result)} where C{result} is the return value of the
callable. If the callable throws an exception, C{onResult} is
called with C{(False, failure)}.
Optionally, C{onResult} may be C{None}, in which case it is not
called at all.
@param func: callable object to be called in separate thread
@param *args: positional arguments to be passed to C{func}
@param **kwargs: keyword arguments to be passed to C{func}
"""
if self.joined:
return
ctx = context.theContextTracker.currentContext().contexts[-1]
o = (ctx, func, args, kw, onResult)
self.q.put(o)
if self.started:
self._startSomeWorkers()
@contextlib.contextmanager
def _workerState(self, stateList, workerThread):
"""
Manages adding and removing this worker from a list of workers
in a particular state.
@param stateList: the list managing workers in this state
@param workerThread: the thread the worker is running in, used to
represent the worker in stateList
"""
stateList.append(workerThread)
try:
yield
finally:
stateList.remove(workerThread)
def _worker(self):
"""
Method used as target of the created threads: retrieve a task to run
from the threadpool, run it, and proceed to the next task until
threadpool is stopped.
"""
ct = self.currentThread()
o = self.q.get()
while o is not WorkerStop:
with self._workerState(self.working, ct):
ctx, function, args, kwargs, onResult = o
del o
try:
result = context.call(ctx, function, *args, **kwargs)
success = True
except:
success = False
if onResult is None:
context.call(ctx, log.err)
result = None
else:
result = failure.Failure()
del function, args, kwargs
if onResult is not None:
try:
context.call(ctx, onResult, success, result)
except:
context.call(ctx, log.err)
del ctx, onResult, result
with self._workerState(self.waiters, ct):
o = self.q.get()
self.threads.remove(ct)
def stop(self):
"""
Shutdown the threads in the threadpool.
"""
self.joined = True
self.started = False
threads = copy.copy(self.threads)
while self.workers:
self.q.put(WorkerStop)
self.workers -= 1
# and let's just make sure
# FIXME: threads that have died before calling stop() are not joined.
for thread in threads:
thread.join()
def adjustPoolsize(self, minthreads=None, maxthreads=None):
if minthreads is None:
minthreads = self.min
if maxthreads is None:
maxthreads = self.max
assert minthreads >= 0, 'minimum is negative'
assert minthreads <= maxthreads, 'minimum is greater than maximum'
self.min = minthreads
self.max = maxthreads
if not self.started:
return
# Kill of some threads if we have too many.
while self.workers > self.max:
self.stopAWorker()
# Start some threads if we have too few.
while self.workers < self.min:
self.startAWorker()
# Start some threads if there is a need.
self._startSomeWorkers()
def dumpStats(self):
log.msg('queue: %s' % self.q.queue)
log.msg('waiters: %s' % self.waiters)
log.msg('workers: %s' % self.working)
log.msg('total: %s' % self.threads)

View file

@ -0,0 +1,33 @@
#compdef twistd trial conch cftp tapconvert ckeygen lore pyhtmlizer tap2deb tkconch manhole tap2rpm
#
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked all
# over the user's terminal if completing options for a deprecated command.
# Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,122 @@
# -*- test-case-name: twisted.test.test_paths -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
#
import urlparse
import urllib
class URLPath:
def __init__(self, scheme='', netloc='localhost', path='',
query='', fragment=''):
self.scheme = scheme or 'http'
self.netloc = netloc
self.path = path or '/'
self.query = query
self.fragment = fragment
_qpathlist = None
_uqpathlist = None
def pathList(self, unquote=0, copy=1):
if self._qpathlist is None:
self._qpathlist = self.path.split('/')
self._uqpathlist = map(urllib.unquote, self._qpathlist)
if unquote:
result = self._uqpathlist
else:
result = self._qpathlist
if copy:
return result[:]
else:
return result
def fromString(klass, st):
t = urlparse.urlsplit(st)
u = klass(*t)
return u
fromString = classmethod(fromString)
def fromRequest(klass, request):
return klass.fromString(request.prePathURL())
fromRequest = classmethod(fromRequest)
def _pathMod(self, newpathsegs, keepQuery):
if keepQuery:
query = self.query
else:
query = ''
return URLPath(self.scheme,
self.netloc,
'/'.join(newpathsegs),
query)
def sibling(self, path, keepQuery=0):
l = self.pathList()
l[-1] = path
return self._pathMod(l, keepQuery)
def child(self, path, keepQuery=0):
l = self.pathList()
if l[-1] == '':
l[-1] = path
else:
l.append(path)
return self._pathMod(l, keepQuery)
def parent(self, keepQuery=0):
l = self.pathList()
if l[-1] == '':
del l[-2]
else:
# We are a file, such as http://example.com/foo/bar
# our parent directory is http://example.com/
l.pop()
l[-1] = ''
return self._pathMod(l, keepQuery)
def here(self, keepQuery=0):
l = self.pathList()
if l[-1] != '':
l[-1] = ''
return self._pathMod(l, keepQuery)
def click(self, st):
"""Return a path which is the URL where a browser would presumably take
you if you clicked on a link with an HREF as given.
"""
scheme, netloc, path, query, fragment = urlparse.urlsplit(st)
if not scheme:
scheme = self.scheme
if not netloc:
netloc = self.netloc
if not path:
path = self.path
if not query:
query = self.query
elif path[0] != '/':
l = self.pathList()
l[-1] = path
path = '/'.join(l)
return URLPath(scheme,
netloc,
path,
query,
fragment)
def __str__(self):
x = urlparse.urlunsplit((
self.scheme, self.netloc, self.path,
self.query, self.fragment))
return x
def __repr__(self):
return ('URLPath(scheme=%r, netloc=%r, path=%r, query=%r, fragment=%r)'
% (self.scheme, self.netloc, self.path, self.query, self.fragment))

View file

@ -0,0 +1,973 @@
# -*- test-case-name: twisted.test.test_usage -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
twisted.python.usage is a module for parsing/handling the
command line of your program.
For information on how to use it, see
U{http://twistedmatrix.com/projects/core/documentation/howto/options.html},
or doc/core/howto/options.xhtml in your Twisted directory.
"""
# System Imports
import os
import sys
import getopt
from os import path
# Sibling Imports
from twisted.python import reflect, text, util
class UsageError(Exception):
pass
error = UsageError
class CoerceParameter(object):
"""
Utility class that can corce a parameter before storing it.
"""
def __init__(self, options, coerce):
"""
@param options: parent Options object
@param coerce: callable used to coerce the value.
"""
self.options = options
self.coerce = coerce
self.doc = getattr(self.coerce, 'coerceDoc', '')
def dispatch(self, parameterName, value):
"""
When called in dispatch, do the coerce for C{value} and save the
returned value.
"""
if value is None:
raise UsageError("Parameter '%s' requires an argument."
% (parameterName,))
try:
value = self.coerce(value)
except ValueError, e:
raise UsageError("Parameter type enforcement failed: %s" % (e,))
self.options.opts[parameterName] = value
class Options(dict):
"""
An option list parser class
C{optFlags} and C{optParameters} are lists of available parameters
which your program can handle. The difference between the two
is the 'flags' have an on(1) or off(0) state (off by default)
whereas 'parameters' have an assigned value, with an optional
default. (Compare '--verbose' and '--verbosity=2')
optFlags is assigned a list of lists. Each list represents
a flag parameter, as so::
| optFlags = [['verbose', 'v', 'Makes it tell you what it doing.'],
| ['quiet', 'q', 'Be vewy vewy quiet.']]
As you can see, the first item is the long option name
(prefixed with '--' on the command line), followed by the
short option name (prefixed with '-'), and the description.
The description is used for the built-in handling of the
--help switch, which prints a usage summary.
C{optParameters} is much the same, except the list also contains
a default value::
| optParameters = [['outfile', 'O', 'outfile.log', 'Description...']]
A coerce function can also be specified as the last element: it will be
called with the argument and should return the value that will be stored
for the option. This function can have a C{coerceDoc} attribute which
will be appended to the documentation of the option.
subCommands is a list of 4-tuples of (command name, command shortcut,
parser class, documentation). If the first non-option argument found is
one of the given command names, an instance of the given parser class is
instantiated and given the remainder of the arguments to parse and
self.opts[command] is set to the command name. For example::
| subCommands = [
| ['inquisition', 'inquest', InquisitionOptions,
| 'Perform an inquisition'],
| ['holyquest', 'quest', HolyQuestOptions,
| 'Embark upon a holy quest']
| ]
In this case, C{"<program> holyquest --horseback --for-grail"} will cause
C{HolyQuestOptions} to be instantiated and asked to parse
C{['--horseback', '--for-grail']}. Currently, only the first sub-command
is parsed, and all options following it are passed to its parser. If a
subcommand is found, the subCommand attribute is set to its name and the
subOptions attribute is set to the Option instance that parses the
remaining options. If a subcommand is not given to parseOptions,
the subCommand attribute will be None. You can also mark one of
the subCommands to be the default.
| defaultSubCommand = 'holyquest'
In this case, the subCommand attribute will never be None, and
the subOptions attribute will always be set.
If you want to handle your own options, define a method named
C{opt_paramname} that takes C{(self, option)} as arguments. C{option}
will be whatever immediately follows the parameter on the
command line. Options fully supports the mapping interface, so you
can do things like C{'self["option"] = val'} in these methods.
Shell tab-completion is supported by this class, for zsh only at present.
Zsh ships with a stub file ("completion function") which, for Twisted
commands, performs tab-completion on-the-fly using the support provided
by this class. The stub file lives in our tree at
C{twisted/python/twisted-completion.zsh}, and in the Zsh tree at
C{Completion/Unix/Command/_twisted}.
Tab-completion is based upon the contents of the optFlags and optParameters
lists. And, optionally, additional metadata may be provided by assigning a
special attribute, C{compData}, which should be an instance of
C{Completions}. See that class for details of what can and should be
included - and see the howto for additional help using these features -
including how third-parties may take advantage of tab-completion for their
own commands.
Advanced functionality is covered in the howto documentation,
available at
U{http://twistedmatrix.com/projects/core/documentation/howto/options.html},
or doc/core/howto/options.xhtml in your Twisted directory.
"""
subCommand = None
defaultSubCommand = None
parent = None
completionData = None
_shellCompFile = sys.stdout # file to use if shell completion is requested
def __init__(self):
super(Options, self).__init__()
self.opts = self
self.defaults = {}
# These are strings/lists we will pass to getopt
self.longOpt = []
self.shortOpt = ''
self.docs = {}
self.synonyms = {}
self._dispatch = {}
collectors = [
self._gather_flags,
self._gather_parameters,
self._gather_handlers,
]
for c in collectors:
(longOpt, shortOpt, docs, settings, synonyms, dispatch) = c()
self.longOpt.extend(longOpt)
self.shortOpt = self.shortOpt + shortOpt
self.docs.update(docs)
self.opts.update(settings)
self.defaults.update(settings)
self.synonyms.update(synonyms)
self._dispatch.update(dispatch)
def __hash__(self):
"""
Define a custom hash function so that Options instances can be used
as dictionary keys. This is an internal feature used to implement
the parser. Do not rely on it in application code.
"""
return int(id(self) % sys.maxint)
def opt_help(self):
"""
Display this help and exit.
"""
print self.__str__()
sys.exit(0)
def opt_version(self):
"""
Display Twisted version and exit.
"""
from twisted import copyright
print "Twisted version:", copyright.version
sys.exit(0)
#opt_h = opt_help # this conflicted with existing 'host' options.
def parseOptions(self, options=None):
"""
The guts of the command-line parser.
"""
if options is None:
options = sys.argv[1:]
# we really do need to place the shell completion check here, because
# if we used an opt_shell_completion method then it would be possible
# for other opt_* methods to be run first, and they could possibly
# raise validation errors which would result in error output on the
# terminal of the user performing shell completion. Validation errors
# would occur quite frequently, in fact, because users often initiate
# tab-completion while they are editing an unfinished command-line.
if len(options) > 1 and options[-2] == "--_shell-completion":
from twisted.python import _shellcomp
cmdName = path.basename(sys.argv[0])
_shellcomp.shellComplete(self, cmdName, options,
self._shellCompFile)
sys.exit(0)
try:
opts, args = getopt.getopt(options,
self.shortOpt, self.longOpt)
except getopt.error, e:
raise UsageError(str(e))
for opt, arg in opts:
if opt[1] == '-':
opt = opt[2:]
else:
opt = opt[1:]
optMangled = opt
if optMangled not in self.synonyms:
optMangled = opt.replace("-", "_")
if optMangled not in self.synonyms:
raise UsageError("No such option '%s'" % (opt,))
optMangled = self.synonyms[optMangled]
if isinstance(self._dispatch[optMangled], CoerceParameter):
self._dispatch[optMangled].dispatch(optMangled, arg)
else:
self._dispatch[optMangled](optMangled, arg)
if (getattr(self, 'subCommands', None)
and (args or self.defaultSubCommand is not None)):
if not args:
args = [self.defaultSubCommand]
sub, rest = args[0], args[1:]
for (cmd, short, parser, doc) in self.subCommands:
if sub == cmd or sub == short:
self.subCommand = cmd
self.subOptions = parser()
self.subOptions.parent = self
self.subOptions.parseOptions(rest)
break
else:
raise UsageError("Unknown command: %s" % sub)
else:
try:
self.parseArgs(*args)
except TypeError:
raise UsageError("Wrong number of arguments.")
self.postOptions()
def postOptions(self):
"""
I am called after the options are parsed.
Override this method in your subclass to do something after
the options have been parsed and assigned, like validate that
all options are sane.
"""
def parseArgs(self):
"""
I am called with any leftover arguments which were not options.
Override me to do something with the remaining arguments on
the command line, those which were not flags or options. e.g.
interpret them as a list of files to operate on.
Note that if there more arguments on the command line
than this method accepts, parseArgs will blow up with
a getopt.error. This means if you don't override me,
parseArgs will blow up if I am passed any arguments at
all!
"""
def _generic_flag(self, flagName, value=None):
if value not in ('', None):
raise UsageError("Flag '%s' takes no argument."
" Not even \"%s\"." % (flagName, value))
self.opts[flagName] = 1
def _gather_flags(self):
"""
Gather up boolean (flag) options.
"""
longOpt, shortOpt = [], ''
docs, settings, synonyms, dispatch = {}, {}, {}, {}
flags = []
reflect.accumulateClassList(self.__class__, 'optFlags', flags)
for flag in flags:
long, short, doc = util.padTo(3, flag)
if not long:
raise ValueError("A flag cannot be without a name.")
docs[long] = doc
settings[long] = 0
if short:
shortOpt = shortOpt + short
synonyms[short] = long
longOpt.append(long)
synonyms[long] = long
dispatch[long] = self._generic_flag
return longOpt, shortOpt, docs, settings, synonyms, dispatch
def _gather_parameters(self):
"""
Gather options which take a value.
"""
longOpt, shortOpt = [], ''
docs, settings, synonyms, dispatch = {}, {}, {}, {}
parameters = []
reflect.accumulateClassList(self.__class__, 'optParameters',
parameters)
synonyms = {}
for parameter in parameters:
long, short, default, doc, paramType = util.padTo(5, parameter)
if not long:
raise ValueError("A parameter cannot be without a name.")
docs[long] = doc
settings[long] = default
if short:
shortOpt = shortOpt + short + ':'
synonyms[short] = long
longOpt.append(long + '=')
synonyms[long] = long
if paramType is not None:
dispatch[long] = CoerceParameter(self, paramType)
else:
dispatch[long] = CoerceParameter(self, str)
return longOpt, shortOpt, docs, settings, synonyms, dispatch
def _gather_handlers(self):
"""
Gather up options with their own handler methods.
This returns a tuple of many values. Amongst those values is a
synonyms dictionary, mapping all of the possible aliases (C{str})
for an option to the longest spelling of that option's name
C({str}).
Another element is a dispatch dictionary, mapping each user-facing
option name (with - substituted for _) to a callable to handle that
option.
"""
longOpt, shortOpt = [], ''
docs, settings, synonyms, dispatch = {}, {}, {}, {}
dct = {}
reflect.addMethodNamesToDict(self.__class__, dct, "opt_")
for name in dct.keys():
method = getattr(self, 'opt_'+name)
takesArg = not flagFunction(method, name)
prettyName = name.replace('_', '-')
doc = getattr(method, '__doc__', None)
if doc:
## Only use the first line.
#docs[name] = doc.split('\n')[0]
docs[prettyName] = doc
else:
docs[prettyName] = self.docs.get(prettyName)
synonyms[prettyName] = prettyName
# A little slight-of-hand here makes dispatching much easier
# in parseOptions, as it makes all option-methods have the
# same signature.
if takesArg:
fn = lambda name, value, m=method: m(value)
else:
# XXX: This won't raise a TypeError if it's called
# with a value when it shouldn't be.
fn = lambda name, value=None, m=method: m()
dispatch[prettyName] = fn
if len(name) == 1:
shortOpt = shortOpt + name
if takesArg:
shortOpt = shortOpt + ':'
else:
if takesArg:
prettyName = prettyName + '='
longOpt.append(prettyName)
reverse_dct = {}
# Map synonyms
for name in dct.keys():
method = getattr(self, 'opt_' + name)
if method not in reverse_dct:
reverse_dct[method] = []
reverse_dct[method].append(name.replace('_', '-'))
cmpLength = lambda a, b: cmp(len(a), len(b))
for method, names in reverse_dct.items():
if len(names) < 2:
continue
names_ = names[:]
names_.sort(cmpLength)
longest = names_.pop()
for name in names_:
synonyms[name] = longest
return longOpt, shortOpt, docs, settings, synonyms, dispatch
def __str__(self):
return self.getSynopsis() + '\n' + self.getUsage(width=None)
def getSynopsis(self):
"""
Returns a string containing a description of these options and how to
pass them to the executed file.
"""
default = "%s%s" % (path.basename(sys.argv[0]),
(self.longOpt and " [options]") or '')
if self.parent is None:
default = "Usage: %s%s" % (path.basename(sys.argv[0]),
(self.longOpt and " [options]") or '')
else:
default = '%s' % ((self.longOpt and "[options]") or '')
synopsis = getattr(self, "synopsis", default)
synopsis = synopsis.rstrip()
if self.parent is not None:
synopsis = ' '.join((self.parent.getSynopsis(),
self.parent.subCommand, synopsis))
return synopsis
def getUsage(self, width=None):
# If subOptions exists by now, then there was probably an error while
# parsing its options.
if hasattr(self, 'subOptions'):
return self.subOptions.getUsage(width=width)
if not width:
width = int(os.environ.get('COLUMNS', '80'))
if hasattr(self, 'subCommands'):
cmdDicts = []
for (cmd, short, parser, desc) in self.subCommands:
cmdDicts.append(
{'long': cmd,
'short': short,
'doc': desc,
'optType': 'command',
'default': None
})
chunks = docMakeChunks(cmdDicts, width)
commands = 'Commands:\n' + ''.join(chunks)
else:
commands = ''
longToShort = {}
for key, value in self.synonyms.items():
longname = value
if (key != longname) and (len(key) == 1):
longToShort[longname] = key
else:
if longname not in longToShort:
longToShort[longname] = None
else:
pass
optDicts = []
for opt in self.longOpt:
if opt[-1] == '=':
optType = 'parameter'
opt = opt[:-1]
else:
optType = 'flag'
optDicts.append(
{'long': opt,
'short': longToShort[opt],
'doc': self.docs[opt],
'optType': optType,
'default': self.defaults.get(opt, None),
'dispatch': self._dispatch.get(opt, None)
})
if not (getattr(self, "longdesc", None) is None):
longdesc = self.longdesc
else:
import __main__
if getattr(__main__, '__doc__', None):
longdesc = __main__.__doc__
else:
longdesc = ''
if longdesc:
longdesc = ('\n' +
'\n'.join(text.wordWrap(longdesc, width)).strip()
+ '\n')
if optDicts:
chunks = docMakeChunks(optDicts, width)
s = "Options:\n%s" % (''.join(chunks))
else:
s = "Options: None\n"
return s + longdesc + commands
#def __repr__(self):
# XXX: It'd be cool if we could return a succinct representation
# of which flags and options are set here.
_ZSH = 'zsh'
_BASH = 'bash'
class Completer(object):
"""
A completion "action" - provides completion possibilities for a particular
command-line option. For example we might provide the user a fixed list of
choices, or files/dirs according to a glob.
This class produces no completion matches itself - see the various
subclasses for specific completion functionality.
"""
_descr = None
def __init__(self, descr=None, repeat=False):
"""
@type descr: C{str}
@param descr: An optional descriptive string displayed above matches.
@type repeat: C{bool}
@param repeat: A flag, defaulting to False, indicating whether this
C{Completer} should repeat - that is, be used to complete more
than one command-line word. This may ONLY be set to True for
actions in the C{extraActions} keyword argument to C{Completions}.
And ONLY if it is the LAST (or only) action in the C{extraActions}
list.
"""
if descr is not None:
self._descr = descr
self._repeat = repeat
def _getRepeatFlag(self):
if self._repeat:
return "*"
else:
return ""
_repeatFlag = property(_getRepeatFlag)
def _description(self, optName):
if self._descr is not None:
return self._descr
else:
return optName
def _shellCode(self, optName, shellType):
"""
Fetch a fragment of shell code representing this action which is
suitable for use by the completion system in _shellcomp.py
@type optName: C{str}
@param optName: The long name of the option this action is being
used for.
@type shellType: C{str}
@param shellType: One of the supported shell constants e.g.
C{twisted.python.usage._ZSH}
"""
if shellType == _ZSH:
return "%s:%s:" % (self._repeatFlag,
self._description(optName))
raise NotImplementedError("Unknown shellType %r" % (shellType,))
class CompleteFiles(Completer):
"""
Completes file names based on a glob pattern
"""
def __init__(self, globPattern='*', **kw):
Completer.__init__(self, **kw)
self._globPattern = globPattern
def _description(self, optName):
if self._descr is not None:
return "%s (%s)" % (self._descr, self._globPattern)
else:
return "%s (%s)" % (optName, self._globPattern)
def _shellCode(self, optName, shellType):
if shellType == _ZSH:
return "%s:%s:_files -g \"%s\"" % (self._repeatFlag,
self._description(optName),
self._globPattern,)
raise NotImplementedError("Unknown shellType %r" % (shellType,))
class CompleteDirs(Completer):
"""
Completes directory names
"""
def _shellCode(self, optName, shellType):
if shellType == _ZSH:
return "%s:%s:_directories" % (self._repeatFlag,
self._description(optName))
raise NotImplementedError("Unknown shellType %r" % (shellType,))
class CompleteList(Completer):
"""
Completes based on a fixed list of words
"""
def __init__(self, items, **kw):
Completer.__init__(self, **kw)
self._items = items
def _shellCode(self, optName, shellType):
if shellType == _ZSH:
return "%s:%s:(%s)" % (self._repeatFlag,
self._description(optName),
" ".join(self._items,))
raise NotImplementedError("Unknown shellType %r" % (shellType,))
class CompleteMultiList(Completer):
"""
Completes multiple comma-separated items based on a fixed list of words
"""
def __init__(self, items, **kw):
Completer.__init__(self, **kw)
self._items = items
def _shellCode(self, optName, shellType):
if shellType == _ZSH:
return "%s:%s:_values -s , '%s' %s" % (self._repeatFlag,
self._description(optName),
self._description(optName),
" ".join(self._items))
raise NotImplementedError("Unknown shellType %r" % (shellType,))
class CompleteUsernames(Completer):
"""
Complete usernames
"""
def _shellCode(self, optName, shellType):
if shellType == _ZSH:
return "%s:%s:_users" % (self._repeatFlag,
self._description(optName))
raise NotImplementedError("Unknown shellType %r" % (shellType,))
class CompleteGroups(Completer):
"""
Complete system group names
"""
_descr = 'group'
def _shellCode(self, optName, shellType):
if shellType == _ZSH:
return "%s:%s:_groups" % (self._repeatFlag,
self._description(optName))
raise NotImplementedError("Unknown shellType %r" % (shellType,))
class CompleteHostnames(Completer):
"""
Complete hostnames
"""
def _shellCode(self, optName, shellType):
if shellType == _ZSH:
return "%s:%s:_hosts" % (self._repeatFlag,
self._description(optName))
raise NotImplementedError("Unknown shellType %r" % (shellType,))
class CompleteUserAtHost(Completer):
"""
A completion action which produces matches in any of these forms::
<username>
<hostname>
<username>@<hostname>
"""
_descr = 'host | user@host'
def _shellCode(self, optName, shellType):
if shellType == _ZSH:
# Yes this looks insane but it does work. For bonus points
# add code to grep 'Hostname' lines from ~/.ssh/config
return ('%s:%s:{_ssh;if compset -P "*@"; '
'then _wanted hosts expl "remote host name" _ssh_hosts '
'&& ret=0 elif compset -S "@*"; then _wanted users '
'expl "login name" _ssh_users -S "" && ret=0 '
'else if (( $+opt_args[-l] )); then tmp=() '
'else tmp=( "users:login name:_ssh_users -qS@" ) fi; '
'_alternative "hosts:remote host name:_ssh_hosts" "$tmp[@]"'
' && ret=0 fi}' % (self._repeatFlag,
self._description(optName)))
raise NotImplementedError("Unknown shellType %r" % (shellType,))
class CompleteNetInterfaces(Completer):
"""
Complete network interface names
"""
def _shellCode(self, optName, shellType):
if shellType == _ZSH:
return "%s:%s:_net_interfaces" % (self._repeatFlag,
self._description(optName))
raise NotImplementedError("Unknown shellType %r" % (shellType,))
class Completions(object):
"""
Extra metadata for the shell tab-completion system.
@type descriptions: C{dict}
@ivar descriptions: ex. C{{"foo" : "use this description for foo instead"}}
A dict mapping long option names to alternate descriptions. When this
variable is defined, the descriptions contained here will override
those descriptions provided in the optFlags and optParameters
variables.
@type multiUse: C{list}
@ivar multiUse: ex. C{ ["foo", "bar"] }
An iterable containing those long option names which may appear on the
command line more than once. By default, options will only be completed
one time.
@type mutuallyExclusive: C{list} of C{tuple}
@ivar mutuallyExclusive: ex. C{ [("foo", "bar"), ("bar", "baz")] }
A sequence of sequences, with each sub-sequence containing those long
option names that are mutually exclusive. That is, those options that
cannot appear on the command line together.
@type optActions: C{dict}
@ivar optActions: A dict mapping long option names to shell "actions".
These actions define what may be completed as the argument to the
given option. By default, all files/dirs will be completed if no
action is given. For example::
{"foo" : CompleteFiles("*.py", descr="python files"),
"bar" : CompleteList(["one", "two", "three"]),
"colors" : CompleteMultiList(["red", "green", "blue"])}
Callables may instead be given for the values in this dict. The
callable should accept no arguments, and return a C{Completer}
instance used as the action in the same way as the literal actions in
the example above.
As you can see in the example above. The "foo" option will have files
that end in .py completed when the user presses Tab. The "bar"
option will have either of the strings "one", "two", or "three"
completed when the user presses Tab.
"colors" will allow multiple arguments to be completed, seperated by
commas. The possible arguments are red, green, and blue. Examples::
my_command --foo some-file.foo --colors=red,green
my_command --colors=green
my_command --colors=green,blue
Descriptions for the actions may be given with the optional C{descr}
keyword argument. This is separate from the description of the option
itself.
Normally Zsh does not show these descriptions unless you have
"verbose" completion turned on. Turn on verbosity with this in your
~/.zshrc::
zstyle ':completion:*' verbose yes
zstyle ':completion:*:descriptions' format '%B%d%b'
@type extraActions: C{list}
@ivar extraActions: Extra arguments are those arguments typically
appearing at the end of the command-line, which are not associated
with any particular named option. That is, the arguments that are
given to the parseArgs() method of your usage.Options subclass. For
example::
[CompleteFiles(descr="file to read from"),
Completer(descr="book title")]
In the example above, the 1st non-option argument will be described as
"file to read from" and all file/dir names will be completed (*). The
2nd non-option argument will be described as "book title", but no
actual completion matches will be produced.
See the various C{Completer} subclasses for other types of things which
may be tab-completed (users, groups, network interfaces, etc).
Also note the C{repeat=True} flag which may be passed to any of the
C{Completer} classes. This is set to allow the C{Completer} instance
to be re-used for subsequent command-line words. See the C{Completer}
docstring for details.
"""
def __init__(self, descriptions={}, multiUse=[],
mutuallyExclusive=[], optActions={}, extraActions=[]):
self.descriptions = descriptions
self.multiUse = multiUse
self.mutuallyExclusive = mutuallyExclusive
self.optActions = optActions
self.extraActions = extraActions
def docMakeChunks(optList, width=80):
"""
Makes doc chunks for option declarations.
Takes a list of dictionaries, each of which may have one or more
of the keys 'long', 'short', 'doc', 'default', 'optType'.
Returns a list of strings.
The strings may be multiple lines,
all of them end with a newline.
"""
# XXX: sanity check to make sure we have a sane combination of keys.
maxOptLen = 0
for opt in optList:
optLen = len(opt.get('long', ''))
if optLen:
if opt.get('optType', None) == "parameter":
# these take up an extra character
optLen = optLen + 1
maxOptLen = max(optLen, maxOptLen)
colWidth1 = maxOptLen + len(" -s, -- ")
colWidth2 = width - colWidth1
# XXX - impose some sane minimum limit.
# Then if we don't have enough room for the option and the doc
# to share one line, they can take turns on alternating lines.
colFiller1 = " " * colWidth1
optChunks = []
seen = {}
for opt in optList:
if opt.get('short', None) in seen or opt.get('long', None) in seen:
continue
for x in opt.get('short', None), opt.get('long', None):
if x is not None:
seen[x] = 1
optLines = []
comma = " "
if opt.get('short', None):
short = "-%c" % (opt['short'],)
else:
short = ''
if opt.get('long', None):
long = opt['long']
if opt.get("optType", None) == "parameter":
long = long + '='
long = "%-*s" % (maxOptLen, long)
if short:
comma = ","
else:
long = " " * (maxOptLen + len('--'))
if opt.get('optType', None) == 'command':
column1 = ' %s ' % long
else:
column1 = " %2s%c --%s " % (short, comma, long)
if opt.get('doc', ''):
doc = opt['doc'].strip()
else:
doc = ''
if (opt.get("optType", None) == "parameter") \
and not (opt.get('default', None) is None):
doc = "%s [default: %s]" % (doc, opt['default'])
if (opt.get("optType", None) == "parameter") \
and opt.get('dispatch', None) is not None:
d = opt['dispatch']
if isinstance(d, CoerceParameter) and d.doc:
doc = "%s. %s" % (doc, d.doc)
if doc:
column2_l = text.wordWrap(doc, colWidth2)
else:
column2_l = ['']
optLines.append("%s%s\n" % (column1, column2_l.pop(0)))
for line in column2_l:
optLines.append("%s%s\n" % (colFiller1, line))
optChunks.append(''.join(optLines))
return optChunks
def flagFunction(method, name=None):
reqArgs = method.im_func.func_code.co_argcount
if reqArgs > 2:
raise UsageError('Invalid Option function for %s' %
(name or method.func_name))
if reqArgs == 2:
# argName = method.im_func.func_code.co_varnames[1]
return 0
return 1
def portCoerce(value):
"""
Coerce a string value to an int port number, and checks the validity.
"""
value = int(value)
if value < 0 or value > 65535:
raise ValueError("Port number not in range: %s" % (value,))
return value
portCoerce.coerceDoc = "Must be an int between 0 and 65535."

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,258 @@
# -*- test-case-name: twisted.python.test.test_versions -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Versions for Python packages.
See L{Version}.
"""
from __future__ import division, absolute_import
import sys, os
from twisted.python.compat import cmp, comparable, nativeString
@comparable
class _inf(object):
"""
An object that is bigger than all other objects.
"""
def __cmp__(self, other):
"""
@param other: Another object.
@type other: any
@return: 0 if other is inf, 1 otherwise.
@rtype: C{int}
"""
if other is _inf:
return 0
return 1
_inf = _inf()
class IncomparableVersions(TypeError):
"""
Two versions could not be compared.
"""
@comparable
class Version(object):
"""
An object that represents a three-part version number.
If running from an svn checkout, include the revision number in
the version string.
"""
def __init__(self, package, major, minor, micro, prerelease=None):
"""
@param package: Name of the package that this is a version of.
@type package: C{str}
@param major: The major version number.
@type major: C{int}
@param minor: The minor version number.
@type minor: C{int}
@param micro: The micro version number.
@type micro: C{int}
@param prerelease: The prerelease number.
@type prerelease: C{int}
"""
self.package = package
self.major = major
self.minor = minor
self.micro = micro
self.prerelease = prerelease
def short(self):
"""
Return a string in canonical short version format,
<major>.<minor>.<micro>[+rSVNVer].
"""
s = self.base()
svnver = self._getSVNVersion()
if svnver:
s += '+r' + nativeString(svnver)
return s
def base(self):
"""
Like L{short}, but without the +rSVNVer.
"""
if self.prerelease is None:
pre = ""
else:
pre = "pre%s" % (self.prerelease,)
return '%d.%d.%d%s' % (self.major,
self.minor,
self.micro,
pre)
def __repr__(self):
svnver = self._formatSVNVersion()
if svnver:
svnver = ' #' + svnver
if self.prerelease is None:
prerelease = ""
else:
prerelease = ", prerelease=%r" % (self.prerelease,)
return '%s(%r, %d, %d, %d%s)%s' % (
self.__class__.__name__,
self.package,
self.major,
self.minor,
self.micro,
prerelease,
svnver)
def __str__(self):
return '[%s, version %s]' % (
self.package,
self.short())
def __cmp__(self, other):
"""
Compare two versions, considering major versions, minor versions, micro
versions, then prereleases.
A version with a prerelease is always less than a version without a
prerelease. If both versions have prereleases, they will be included in
the comparison.
@param other: Another version.
@type other: L{Version}
@return: NotImplemented when the other object is not a Version, or one
of -1, 0, or 1.
@raise IncomparableVersions: when the package names of the versions
differ.
"""
if not isinstance(other, self.__class__):
return NotImplemented
if self.package != other.package:
raise IncomparableVersions("%r != %r"
% (self.package, other.package))
if self.prerelease is None:
prerelease = _inf
else:
prerelease = self.prerelease
if other.prerelease is None:
otherpre = _inf
else:
otherpre = other.prerelease
x = cmp((self.major,
self.minor,
self.micro,
prerelease),
(other.major,
other.minor,
other.micro,
otherpre))
return x
def _parseSVNEntries_4(self, entriesFile):
"""
Given a readable file object which represents a .svn/entries file in
format version 4, return the revision as a string. We do this by
reading first XML element in the document that has a 'revision'
attribute.
"""
from xml.dom.minidom import parse
doc = parse(entriesFile).documentElement
for node in doc.childNodes:
if hasattr(node, 'getAttribute'):
rev = node.getAttribute('revision')
if rev is not None:
return rev.encode('ascii')
def _parseSVNEntries_8(self, entriesFile):
"""
Given a readable file object which represents a .svn/entries file in
format version 8, return the revision as a string.
"""
entriesFile.readline()
entriesFile.readline()
entriesFile.readline()
return entriesFile.readline().strip()
# Add handlers for version 9 and 10 formats, which are the same as
# version 8 as far as revision information is concerned.
_parseSVNEntries_9 = _parseSVNEntries_8
_parseSVNEntriesTenPlus = _parseSVNEntries_8
def _getSVNVersion(self):
"""
Figure out the SVN revision number based on the existance of
<package>/.svn/entries, and its contents. This requires discovering the
format version from the 'format' file and parsing the entries file
accordingly.
@return: None or string containing SVN Revision number.
"""
mod = sys.modules.get(self.package)
if mod:
svn = os.path.join(os.path.dirname(mod.__file__), '.svn')
if not os.path.exists(svn):
# It's not an svn working copy
return None
formatFile = os.path.join(svn, 'format')
if os.path.exists(formatFile):
# It looks like a less-than-version-10 working copy.
with open(formatFile, 'rb') as fObj:
format = fObj.read().strip()
parser = getattr(self, '_parseSVNEntries_' + format.decode('ascii'), None)
else:
# It looks like a version-10-or-greater working copy, which
# has version information in the entries file.
parser = self._parseSVNEntriesTenPlus
if parser is None:
return b'Unknown'
entriesFile = os.path.join(svn, 'entries')
entries = open(entriesFile, 'rb')
try:
try:
return parser(entries)
finally:
entries.close()
except:
return b'Unknown'
def _formatSVNVersion(self):
ver = self._getSVNVersion()
if ver is None:
return ''
return ' (SVN r%s)' % (ver,)
def getVersionString(version):
"""
Get a friendly string for the given version object.
@param version: A L{Version} object.
@return: A string containing the package and short version number.
"""
result = '%s %s' % (version.package, version.short())
return result

View file

@ -0,0 +1,169 @@
# -*- test-case-name: twisted.python.test.test_win32 -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Win32 utilities.
See also twisted.python.shortcut.
@var O_BINARY: the 'binary' mode flag on Windows, or 0 on other platforms, so it
may safely be OR'ed into a mask for os.open.
"""
from __future__ import division, absolute_import
import re
import os
try:
import win32api
import win32con
except ImportError:
pass
from twisted.python.runtime import platform
# http://msdn.microsoft.com/library/default.asp?url=/library/en-us/debug/base/system_error_codes.asp
ERROR_FILE_NOT_FOUND = 2
ERROR_PATH_NOT_FOUND = 3
ERROR_INVALID_NAME = 123
ERROR_DIRECTORY = 267
O_BINARY = getattr(os, "O_BINARY", 0)
class FakeWindowsError(OSError):
"""
Stand-in for sometimes-builtin exception on platforms for which it
is missing.
"""
try:
WindowsError = WindowsError
except NameError:
WindowsError = FakeWindowsError
# XXX fix this to use python's builtin _winreg?
def getProgramsMenuPath():
"""
Get the path to the Programs menu.
Probably will break on non-US Windows.
@return: the filesystem location of the common Start Menu->Programs.
@rtype: L{str}
"""
if not platform.isWindows():
return "C:\\Windows\\Start Menu\\Programs"
keyname = 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders'
hShellFolders = win32api.RegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE,
keyname, 0, win32con.KEY_READ)
return win32api.RegQueryValueEx(hShellFolders, 'Common Programs')[0]
def getProgramFilesPath():
"""Get the path to the Program Files folder."""
keyname = 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion'
currentV = win32api.RegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE,
keyname, 0, win32con.KEY_READ)
return win32api.RegQueryValueEx(currentV, 'ProgramFilesDir')[0]
_cmdLineQuoteRe = re.compile(r'(\\*)"')
_cmdLineQuoteRe2 = re.compile(r'(\\+)\Z')
def cmdLineQuote(s):
"""
Internal method for quoting a single command-line argument.
@param s: an unquoted string that you want to quote so that something that
does cmd.exe-style unquoting will interpret it as a single argument,
even if it contains spaces.
@type s: C{str}
@return: a quoted string.
@rtype: C{str}
"""
quote = ((" " in s) or ("\t" in s) or ('"' in s) or s == '') and '"' or ''
return quote + _cmdLineQuoteRe2.sub(r"\1\1", _cmdLineQuoteRe.sub(r'\1\1\\"', s)) + quote
def quoteArguments(arguments):
"""
Quote an iterable of command-line arguments for passing to CreateProcess or
a similar API. This allows the list passed to C{reactor.spawnProcess} to
match the child process's C{sys.argv} properly.
@param arglist: an iterable of C{str}, each unquoted.
@return: a single string, with the given sequence quoted as necessary.
"""
return ' '.join([cmdLineQuote(a) for a in arguments])
class _ErrorFormatter(object):
"""
Formatter for Windows error messages.
@ivar winError: A callable which takes one integer error number argument
and returns an L{exceptions.WindowsError} instance for that error (like
L{ctypes.WinError}).
@ivar formatMessage: A callable which takes one integer error number
argument and returns a C{str} giving the message for that error (like
L{win32api.FormatMessage}).
@ivar errorTab: A mapping from integer error numbers to C{str} messages
which correspond to those erorrs (like L{socket.errorTab}).
"""
def __init__(self, WinError, FormatMessage, errorTab):
self.winError = WinError
self.formatMessage = FormatMessage
self.errorTab = errorTab
def fromEnvironment(cls):
"""
Get as many of the platform-specific error translation objects as
possible and return an instance of C{cls} created with them.
"""
try:
from ctypes import WinError
except ImportError:
WinError = None
try:
from win32api import FormatMessage
except ImportError:
FormatMessage = None
try:
from socket import errorTab
except ImportError:
errorTab = None
return cls(WinError, FormatMessage, errorTab)
fromEnvironment = classmethod(fromEnvironment)
def formatError(self, errorcode):
"""
Returns the string associated with a Windows error message, such as the
ones found in socket.error.
Attempts direct lookup against the win32 API via ctypes and then
pywin32 if available), then in the error table in the socket module,
then finally defaulting to C{os.strerror}.
@param errorcode: the Windows error code
@type errorcode: C{int}
@return: The error message string
@rtype: C{str}
"""
if self.winError is not None:
return self.winError(errorcode).strerror
if self.formatMessage is not None:
return self.formatMessage(errorcode)
if self.errorTab is not None:
result = self.errorTab.get(errorcode)
if result is not None:
return result
return os.strerror(errorcode)
formatError = _ErrorFormatter.fromEnvironment().formatError

View file

@ -0,0 +1,268 @@
# -*- test-case-name: twisted.test.test_paths.ZipFilePathTestCase -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
This module contains implementations of IFilePath for zip files.
See the constructor for ZipArchive for use.
"""
__metaclass__ = type
import os
import time
import errno
# Python 2.6 includes support for incremental unzipping of zipfiles, and
# thus obviates the need for ChunkingZipFile.
import sys
if sys.version_info[:2] >= (2, 6):
_USE_ZIPFILE = True
from zipfile import ZipFile
else:
_USE_ZIPFILE = False
from twisted.python.zipstream import ChunkingZipFile
from twisted.python.filepath import IFilePath, FilePath, AbstractFilePath
from zope.interface import implements
# using FilePath here exclusively rather than os to make sure that we don't do
# anything OS-path-specific here.
ZIP_PATH_SEP = '/' # In zipfiles, "/" is universally used as the
# path separator, regardless of platform.
class ZipPath(AbstractFilePath):
"""
I represent a file or directory contained within a zip file.
"""
implements(IFilePath)
sep = ZIP_PATH_SEP
def __init__(self, archive, pathInArchive):
"""
Don't construct me directly. Use ZipArchive.child().
@param archive: a ZipArchive instance.
@param pathInArchive: a ZIP_PATH_SEP-separated string.
"""
self.archive = archive
self.pathInArchive = pathInArchive
# self.path pretends to be os-specific because that's the way the
# 'zipimport' module does it.
self.path = os.path.join(archive.zipfile.filename,
*(self.pathInArchive.split(ZIP_PATH_SEP)))
def __cmp__(self, other):
if not isinstance(other, ZipPath):
return NotImplemented
return cmp((self.archive, self.pathInArchive),
(other.archive, other.pathInArchive))
def __repr__(self):
parts = [os.path.abspath(self.archive.path)]
parts.extend(self.pathInArchive.split(ZIP_PATH_SEP))
path = os.sep.join(parts)
return "ZipPath('%s')" % (path.encode('string-escape'),)
def parent(self):
splitup = self.pathInArchive.split(ZIP_PATH_SEP)
if len(splitup) == 1:
return self.archive
return ZipPath(self.archive, ZIP_PATH_SEP.join(splitup[:-1]))
def child(self, path):
"""
Return a new ZipPath representing a path in C{self.archive} which is
a child of this path.
@note: Requesting the C{".."} (or other special name) child will not
cause L{InsecurePath} to be raised since these names do not have
any special meaning inside a zip archive. Be particularly
careful with the C{path} attribute (if you absolutely must use
it) as this means it may include special names with special
meaning outside of the context of a zip archive.
"""
return ZipPath(self.archive, ZIP_PATH_SEP.join([self.pathInArchive, path]))
def sibling(self, path):
return self.parent().child(path)
# preauthChild = child
def exists(self):
return self.isdir() or self.isfile()
def isdir(self):
return self.pathInArchive in self.archive.childmap
def isfile(self):
return self.pathInArchive in self.archive.zipfile.NameToInfo
def islink(self):
return False
def listdir(self):
if self.exists():
if self.isdir():
return self.archive.childmap[self.pathInArchive].keys()
else:
raise OSError(errno.ENOTDIR, "Leaf zip entry listed")
else:
raise OSError(errno.ENOENT, "Non-existent zip entry listed")
def splitext(self):
"""
Return a value similar to that returned by os.path.splitext.
"""
# This happens to work out because of the fact that we use OS-specific
# path separators in the constructor to construct our fake 'path'
# attribute.
return os.path.splitext(self.path)
def basename(self):
return self.pathInArchive.split(ZIP_PATH_SEP)[-1]
def dirname(self):
# XXX NOTE: This API isn't a very good idea on filepath, but it's even
# less meaningful here.
return self.parent().path
def open(self, mode="r"):
if _USE_ZIPFILE:
return self.archive.zipfile.open(self.pathInArchive, mode=mode)
else:
# XXX oh man, is this too much hax?
self.archive.zipfile.mode = mode
return self.archive.zipfile.readfile(self.pathInArchive)
def changed(self):
pass
def getsize(self):
"""
Retrieve this file's size.
@return: file size, in bytes
"""
return self.archive.zipfile.NameToInfo[self.pathInArchive].file_size
def getAccessTime(self):
"""
Retrieve this file's last access-time. This is the same as the last access
time for the archive.
@return: a number of seconds since the epoch
"""
return self.archive.getAccessTime()
def getModificationTime(self):
"""
Retrieve this file's last modification time. This is the time of
modification recorded in the zipfile.
@return: a number of seconds since the epoch.
"""
return time.mktime(
self.archive.zipfile.NameToInfo[self.pathInArchive].date_time
+ (0, 0, 0))
def getStatusChangeTime(self):
"""
Retrieve this file's last modification time. This name is provided for
compatibility, and returns the same value as getmtime.
@return: a number of seconds since the epoch.
"""
return self.getModificationTime()
class ZipArchive(ZipPath):
""" I am a FilePath-like object which can wrap a zip archive as if it were a
directory.
"""
archive = property(lambda self: self)
def __init__(self, archivePathname):
"""Create a ZipArchive, treating the archive at archivePathname as a zip file.
@param archivePathname: a str, naming a path in the filesystem.
"""
if _USE_ZIPFILE:
self.zipfile = ZipFile(archivePathname)
else:
self.zipfile = ChunkingZipFile(archivePathname)
self.path = archivePathname
self.pathInArchive = ''
# zipfile is already wasting O(N) memory on cached ZipInfo instances,
# so there's no sense in trying to do this lazily or intelligently
self.childmap = {} # map parent: list of children
for name in self.zipfile.namelist():
name = name.split(ZIP_PATH_SEP)
for x in range(len(name)):
child = name[-x]
parent = ZIP_PATH_SEP.join(name[:-x])
if parent not in self.childmap:
self.childmap[parent] = {}
self.childmap[parent][child] = 1
parent = ''
def child(self, path):
"""
Create a ZipPath pointing at a path within the archive.
@param path: a str with no path separators in it, either '/' or the
system path separator, if it's different.
"""
return ZipPath(self, path)
def exists(self):
"""
Returns true if the underlying archive exists.
"""
return FilePath(self.zipfile.filename).exists()
def getAccessTime(self):
"""
Return the archive file's last access time.
"""
return FilePath(self.zipfile.filename).getAccessTime()
def getModificationTime(self):
"""
Return the archive file's modification time.
"""
return FilePath(self.zipfile.filename).getModificationTime()
def getStatusChangeTime(self):
"""
Return the archive file's status change time.
"""
return FilePath(self.zipfile.filename).getStatusChangeTime()
def __repr__(self):
return 'ZipArchive(%r)' % (os.path.abspath(self.path),)
__all__ = ['ZipArchive', 'ZipPath']

View file

@ -0,0 +1,319 @@
# -*- test-case-name: twisted.python.test.test_zipstream -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
An incremental approach to unzipping files. This allows you to unzip a little
bit of a file at a time, which means you can report progress as a file unzips.
"""
import zipfile
import os.path
import zlib
import struct
_fileHeaderSize = struct.calcsize(zipfile.structFileHeader)
class ChunkingZipFile(zipfile.ZipFile):
"""
A C{ZipFile} object which, with L{readfile}, also gives you access to a
file-like object for each entry.
"""
def readfile(self, name):
"""
Return file-like object for name.
"""
if self.mode not in ("r", "a"):
raise RuntimeError('read() requires mode "r" or "a"')
if not self.fp:
raise RuntimeError(
"Attempt to read ZIP archive that was already closed")
zinfo = self.getinfo(name)
self.fp.seek(zinfo.header_offset, 0)
fheader = self.fp.read(_fileHeaderSize)
if fheader[0:4] != zipfile.stringFileHeader:
raise zipfile.BadZipfile("Bad magic number for file header")
fheader = struct.unpack(zipfile.structFileHeader, fheader)
fname = self.fp.read(fheader[zipfile._FH_FILENAME_LENGTH])
if fheader[zipfile._FH_EXTRA_FIELD_LENGTH]:
self.fp.read(fheader[zipfile._FH_EXTRA_FIELD_LENGTH])
if fname != zinfo.orig_filename:
raise zipfile.BadZipfile(
'File name in directory "%s" and header "%s" differ.' % (
zinfo.orig_filename, fname))
if zinfo.compress_type == zipfile.ZIP_STORED:
return ZipFileEntry(self, zinfo.compress_size)
elif zinfo.compress_type == zipfile.ZIP_DEFLATED:
return DeflatedZipFileEntry(self, zinfo.compress_size)
else:
raise zipfile.BadZipfile(
"Unsupported compression method %d for file %s" %
(zinfo.compress_type, name))
class _FileEntry(object):
"""
Abstract superclass of both compressed and uncompressed variants of
file-like objects within a zip archive.
@ivar chunkingZipFile: a chunking zip file.
@type chunkingZipFile: L{ChunkingZipFile}
@ivar length: The number of bytes within the zip file that represent this
file. (This is the size on disk, not the number of decompressed bytes
which will result from reading it.)
@ivar fp: the underlying file object (that contains pkzip data). Do not
touch this, please. It will quite likely move or go away.
@ivar closed: File-like 'closed' attribute; True before this file has been
closed, False after.
@type closed: C{bool}
@ivar finished: An older, broken synonym for 'closed'. Do not touch this,
please.
@type finished: C{int}
"""
def __init__(self, chunkingZipFile, length):
"""
Create a L{_FileEntry} from a L{ChunkingZipFile}.
"""
self.chunkingZipFile = chunkingZipFile
self.fp = self.chunkingZipFile.fp
self.length = length
self.finished = 0
self.closed = False
def isatty(self):
"""
Returns false because zip files should not be ttys
"""
return False
def close(self):
"""
Close self (file-like object)
"""
self.closed = True
self.finished = 1
del self.fp
def readline(self):
"""
Read a line.
"""
bytes = ""
for byte in iter(lambda : self.read(1), ""):
bytes += byte
if byte == "\n":
break
return bytes
def next(self):
"""
Implement next as file does (like readline, except raises StopIteration
at EOF)
"""
nextline = self.readline()
if nextline:
return nextline
raise StopIteration()
def readlines(self):
"""
Returns a list of all the lines
"""
return list(self)
def xreadlines(self):
"""
Returns an iterator (so self)
"""
return self
def __iter__(self):
"""
Returns an iterator (so self)
"""
return self
class ZipFileEntry(_FileEntry):
"""
File-like object used to read an uncompressed entry in a ZipFile
"""
def __init__(self, chunkingZipFile, length):
_FileEntry.__init__(self, chunkingZipFile, length)
self.readBytes = 0
def tell(self):
return self.readBytes
def read(self, n=None):
if n is None:
n = self.length - self.readBytes
if n == 0 or self.finished:
return ''
data = self.chunkingZipFile.fp.read(
min(n, self.length - self.readBytes))
self.readBytes += len(data)
if self.readBytes == self.length or len(data) < n:
self.finished = 1
return data
class DeflatedZipFileEntry(_FileEntry):
"""
File-like object used to read a deflated entry in a ZipFile
"""
def __init__(self, chunkingZipFile, length):
_FileEntry.__init__(self, chunkingZipFile, length)
self.returnedBytes = 0
self.readBytes = 0
self.decomp = zlib.decompressobj(-15)
self.buffer = ""
def tell(self):
return self.returnedBytes
def read(self, n=None):
if self.finished:
return ""
if n is None:
result = [self.buffer,]
result.append(
self.decomp.decompress(
self.chunkingZipFile.fp.read(
self.length - self.readBytes)))
result.append(self.decomp.decompress("Z"))
result.append(self.decomp.flush())
self.buffer = ""
self.finished = 1
result = "".join(result)
self.returnedBytes += len(result)
return result
else:
while len(self.buffer) < n:
data = self.chunkingZipFile.fp.read(
min(n, 1024, self.length - self.readBytes))
self.readBytes += len(data)
if not data:
result = (self.buffer
+ self.decomp.decompress("Z")
+ self.decomp.flush())
self.finished = 1
self.buffer = ""
self.returnedBytes += len(result)
return result
else:
self.buffer += self.decomp.decompress(data)
result = self.buffer[:n]
self.buffer = self.buffer[n:]
self.returnedBytes += len(result)
return result
DIR_BIT = 16
def countZipFileChunks(filename, chunksize):
"""
Predict the number of chunks that will be extracted from the entire
zipfile, given chunksize blocks.
"""
totalchunks = 0
zf = ChunkingZipFile(filename)
for info in zf.infolist():
totalchunks += countFileChunks(info, chunksize)
return totalchunks
def countFileChunks(zipinfo, chunksize):
"""
Count the number of chunks that will result from the given C{ZipInfo}.
@param zipinfo: a C{zipfile.ZipInfo} instance describing an entry in a zip
archive to be counted.
@return: the number of chunks present in the zip file. (Even an empty file
counts as one chunk.)
@rtype: C{int}
"""
count, extra = divmod(zipinfo.file_size, chunksize)
if extra > 0:
count += 1
return count or 1
def unzipIterChunky(filename, directory='.', overwrite=0,
chunksize=4096):
"""
Return a generator for the zipfile. This implementation will yield after
every chunksize uncompressed bytes, or at the end of a file, whichever
comes first.
The value it yields is the number of chunks left to unzip.
"""
czf = ChunkingZipFile(filename, 'r')
if not os.path.exists(directory):
os.makedirs(directory)
remaining = countZipFileChunks(filename, chunksize)
names = czf.namelist()
infos = czf.infolist()
for entry, info in zip(names, infos):
isdir = info.external_attr & DIR_BIT
f = os.path.join(directory, entry)
if isdir:
# overwrite flag only applies to files
if not os.path.exists(f):
os.makedirs(f)
remaining -= 1
yield remaining
else:
# create the directory the file will be in first,
# since we can't guarantee it exists
fdir = os.path.split(f)[0]
if not os.path.exists(fdir):
os.makedirs(fdir)
if overwrite or not os.path.exists(f):
outfile = file(f, 'wb')
fp = czf.readfile(entry)
if info.file_size == 0:
remaining -= 1
yield remaining
while fp.tell() < info.file_size:
hunk = fp.read(chunksize)
outfile.write(hunk)
remaining -= 1
yield remaining
outfile.close()
else:
remaining -= countFileChunks(info, chunksize)
yield remaining

View file

@ -0,0 +1,9 @@
THIS DIRECTORY AND ALL FILES INCLUDED ARE DEPRECATED.
These are the old zsh completion functions for Twisted commands... they used
to contain full completion functions, but now they've simply been replaced
by the current "stub" code that delegates completion control to Twisted.
This directory and included files need to remain for several years in order
to provide backwards-compatibility with an old version of the Twisted
stub function that was shipped with Zsh.

View file

@ -0,0 +1,34 @@
#compdef cftp
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef ckeygen
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef conch
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef lore
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef manhole
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef mktap
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef pyhtmlizer
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef tap2deb
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef tap2rpm
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef tapconvert
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef tkconch
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef tkmktap
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef trial
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef twistd
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi

View file

@ -0,0 +1,34 @@
#compdef websetroot
# This file is deprecated. See README.
# This is the ZSH completion file for Twisted commands. It calls the current
# command-line with the special "--_shell-completion" option which is handled
# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
# handle the completions for this particular command-line.
#
# 3rd parties that wish to provide zsh completion for commands that
# use t.p.usage may copy this file and change the first line to reference
# the name(s) of their command(s).
#
# This file is included in the official Zsh distribution as
# Completion/Unix/Command/_twisted
# redirect stderr to /dev/null otherwise deprecation warnings may get puked
# all over the user's terminal if completing options for mktap or other
# deprecated commands. Redirect stderr to a file to debug errors.
local cmd output
cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
output=$("$cmd[@]" 2>/dev/null)
if [[ $output == "#compdef "* ]]; then
# Looks like we got a valid completion function - so eval it to produce
# the completion matches.
eval $output
else
echo "\nCompletion error running command:" ${(qqq)cmd}
echo -n "If output below is unhelpful you may need to edit this file and "
echo "redirect stderr to a file."
echo "Expected completion function, but instead got:"
echo $output
return 1
fi